Statistics
| Branch: | Revision:

root / target-sparc / translate.c @ 060718c1

History | View | Annotate | Download (187.9 kB)

1
/*
2
   SPARC translation
3

4
   Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5
   Copyright (C) 2003-2005 Fabrice Bellard
6

7
   This library is free software; you can redistribute it and/or
8
   modify it under the terms of the GNU Lesser General Public
9
   License as published by the Free Software Foundation; either
10
   version 2 of the License, or (at your option) any later version.
11

12
   This library is distributed in the hope that it will be useful,
13
   but WITHOUT ANY WARRANTY; without even the implied warranty of
14
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15
   Lesser General Public License for more details.
16

17
   You should have received a copy of the GNU Lesser General Public
18
   License along with this library; if not, see <http://www.gnu.org/licenses/>.
19
 */
20

    
21
#include <stdarg.h>
22
#include <stdlib.h>
23
#include <stdio.h>
24
#include <string.h>
25
#include <inttypes.h>
26

    
27
#include "cpu.h"
28
#include "exec-all.h"
29
#include "disas.h"
30
#include "helper.h"
31
#include "tcg-op.h"
32

    
33
#define GEN_HELPER 1
34
#include "helper.h"
35

    
36
#define DEBUG_DISAS
37

    
38
#define DYNAMIC_PC  1 /* dynamic pc value */
39
#define JUMP_PC     2 /* dynamic pc value which takes only two values
40
                         according to jump_pc[T2] */
41

    
42
/* global register indexes */
43
static TCGv_ptr cpu_env, cpu_regwptr;
44
static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
45
static TCGv_i32 cpu_cc_op;
46
static TCGv_i32 cpu_psr;
47
static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48
static TCGv cpu_y;
49
#ifndef CONFIG_USER_ONLY
50
static TCGv cpu_tbr;
51
#endif
52
static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
53
#ifdef TARGET_SPARC64
54
static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55
static TCGv cpu_gsr;
56
static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57
static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58
static TCGv_i32 cpu_softint;
59
#else
60
static TCGv cpu_wim;
61
#endif
62
/* local register indexes (only used inside old micro ops) */
63
static TCGv cpu_tmp0;
64
static TCGv_i32 cpu_tmp32;
65
static TCGv_i64 cpu_tmp64;
66
/* Floating point registers */
67
static TCGv_i32 cpu_fpr[TARGET_FPREGS];
68

    
69
static target_ulong gen_opc_npc[OPC_BUF_SIZE];
70
static target_ulong gen_opc_jump_pc[2];
71

    
72
#include "gen-icount.h"
73

    
74
typedef struct DisasContext {
75
    target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
76
    target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
77
    target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
78
    int is_br;
79
    int mem_idx;
80
    int fpu_enabled;
81
    int address_mask_32bit;
82
    int singlestep;
83
    uint32_t cc_op;  /* current CC operation */
84
    struct TranslationBlock *tb;
85
    sparc_def_t *def;
86
} DisasContext;
87

    
88
// This function uses non-native bit order
89
#define GET_FIELD(X, FROM, TO)                                  \
90
    ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
91

    
92
// This function uses the order in the manuals, i.e. bit 0 is 2^0
93
#define GET_FIELD_SP(X, FROM, TO)               \
94
    GET_FIELD(X, 31 - (TO), 31 - (FROM))
95

    
96
#define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
97
#define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
98

    
99
#ifdef TARGET_SPARC64
100
#define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
101
#define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
102
#else
103
#define DFPREG(r) (r & 0x1e)
104
#define QFPREG(r) (r & 0x1c)
105
#endif
106

    
107
#define UA2005_HTRAP_MASK 0xff
108
#define V8_TRAP_MASK 0x7f
109

    
110
static int sign_extend(int x, int len)
111
{
112
    len = 32 - len;
113
    return (x << len) >> len;
114
}
115

    
116
#define IS_IMM (insn & (1<<13))
117

    
118
/* floating point registers moves */
119
static void gen_op_load_fpr_DT0(unsigned int src)
120
{
121
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
122
                   offsetof(CPU_DoubleU, l.upper));
123
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
124
                   offsetof(CPU_DoubleU, l.lower));
125
}
126

    
127
static void gen_op_load_fpr_DT1(unsigned int src)
128
{
129
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
130
                   offsetof(CPU_DoubleU, l.upper));
131
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
132
                   offsetof(CPU_DoubleU, l.lower));
133
}
134

    
135
static void gen_op_store_DT0_fpr(unsigned int dst)
136
{
137
    tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
138
                   offsetof(CPU_DoubleU, l.upper));
139
    tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
140
                   offsetof(CPU_DoubleU, l.lower));
141
}
142

    
143
static void gen_op_load_fpr_QT0(unsigned int src)
144
{
145
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
146
                   offsetof(CPU_QuadU, l.upmost));
147
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
148
                   offsetof(CPU_QuadU, l.upper));
149
    tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
150
                   offsetof(CPU_QuadU, l.lower));
151
    tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
152
                   offsetof(CPU_QuadU, l.lowest));
153
}
154

    
155
static void gen_op_load_fpr_QT1(unsigned int src)
156
{
157
    tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
158
                   offsetof(CPU_QuadU, l.upmost));
159
    tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
160
                   offsetof(CPU_QuadU, l.upper));
161
    tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
162
                   offsetof(CPU_QuadU, l.lower));
163
    tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
164
                   offsetof(CPU_QuadU, l.lowest));
165
}
166

    
167
static void gen_op_store_QT0_fpr(unsigned int dst)
168
{
169
    tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
170
                   offsetof(CPU_QuadU, l.upmost));
171
    tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
172
                   offsetof(CPU_QuadU, l.upper));
173
    tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
174
                   offsetof(CPU_QuadU, l.lower));
175
    tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
176
                   offsetof(CPU_QuadU, l.lowest));
177
}
178

    
179
/* moves */
180
#ifdef CONFIG_USER_ONLY
181
#define supervisor(dc) 0
182
#ifdef TARGET_SPARC64
183
#define hypervisor(dc) 0
184
#endif
185
#else
186
#define supervisor(dc) (dc->mem_idx >= 1)
187
#ifdef TARGET_SPARC64
188
#define hypervisor(dc) (dc->mem_idx == 2)
189
#else
190
#endif
191
#endif
192

    
193
#ifdef TARGET_SPARC64
194
#ifndef TARGET_ABI32
195
#define AM_CHECK(dc) ((dc)->address_mask_32bit)
196
#else
197
#define AM_CHECK(dc) (1)
198
#endif
199
#endif
200

    
201
static inline void gen_address_mask(DisasContext *dc, TCGv addr)
202
{
203
#ifdef TARGET_SPARC64
204
    if (AM_CHECK(dc))
205
        tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
206
#endif
207
}
208

    
209
static inline void gen_movl_reg_TN(int reg, TCGv tn)
210
{
211
    if (reg == 0)
212
        tcg_gen_movi_tl(tn, 0);
213
    else if (reg < 8)
214
        tcg_gen_mov_tl(tn, cpu_gregs[reg]);
215
    else {
216
        tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
217
    }
218
}
219

    
220
static inline void gen_movl_TN_reg(int reg, TCGv tn)
221
{
222
    if (reg == 0)
223
        return;
224
    else if (reg < 8)
225
        tcg_gen_mov_tl(cpu_gregs[reg], tn);
226
    else {
227
        tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
228
    }
229
}
230

    
231
static inline void gen_goto_tb(DisasContext *s, int tb_num,
232
                               target_ulong pc, target_ulong npc)
233
{
234
    TranslationBlock *tb;
235

    
236
    tb = s->tb;
237
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
238
        (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
239
        !s->singlestep)  {
240
        /* jump to same page: we can use a direct jump */
241
        tcg_gen_goto_tb(tb_num);
242
        tcg_gen_movi_tl(cpu_pc, pc);
243
        tcg_gen_movi_tl(cpu_npc, npc);
244
        tcg_gen_exit_tb((long)tb + tb_num);
245
    } else {
246
        /* jump to another page: currently not optimized */
247
        tcg_gen_movi_tl(cpu_pc, pc);
248
        tcg_gen_movi_tl(cpu_npc, npc);
249
        tcg_gen_exit_tb(0);
250
    }
251
}
252

    
253
// XXX suboptimal
254
static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
255
{
256
    tcg_gen_extu_i32_tl(reg, src);
257
    tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
258
    tcg_gen_andi_tl(reg, reg, 0x1);
259
}
260

    
261
static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
262
{
263
    tcg_gen_extu_i32_tl(reg, src);
264
    tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
265
    tcg_gen_andi_tl(reg, reg, 0x1);
266
}
267

    
268
static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
269
{
270
    tcg_gen_extu_i32_tl(reg, src);
271
    tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
272
    tcg_gen_andi_tl(reg, reg, 0x1);
273
}
274

    
275
static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
276
{
277
    tcg_gen_extu_i32_tl(reg, src);
278
    tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
279
    tcg_gen_andi_tl(reg, reg, 0x1);
280
}
281

    
282
static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
283
{
284
    TCGv r_temp;
285
    TCGv_i32 r_const;
286
    int l1;
287

    
288
    l1 = gen_new_label();
289

    
290
    r_temp = tcg_temp_new();
291
    tcg_gen_xor_tl(r_temp, src1, src2);
292
    tcg_gen_not_tl(r_temp, r_temp);
293
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
294
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
295
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
296
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
297
    r_const = tcg_const_i32(TT_TOVF);
298
    gen_helper_raise_exception(r_const);
299
    tcg_temp_free_i32(r_const);
300
    gen_set_label(l1);
301
    tcg_temp_free(r_temp);
302
}
303

    
304
static inline void gen_tag_tv(TCGv src1, TCGv src2)
305
{
306
    int l1;
307
    TCGv_i32 r_const;
308

    
309
    l1 = gen_new_label();
310
    tcg_gen_or_tl(cpu_tmp0, src1, src2);
311
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
312
    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
313
    r_const = tcg_const_i32(TT_TOVF);
314
    gen_helper_raise_exception(r_const);
315
    tcg_temp_free_i32(r_const);
316
    gen_set_label(l1);
317
}
318

    
319
static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
320
{
321
    tcg_gen_mov_tl(cpu_cc_src, src1);
322
    tcg_gen_movi_tl(cpu_cc_src2, src2);
323
    tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
324
    tcg_gen_mov_tl(dst, cpu_cc_dst);
325
}
326

    
327
static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
328
{
329
    tcg_gen_mov_tl(cpu_cc_src, src1);
330
    tcg_gen_mov_tl(cpu_cc_src2, src2);
331
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
332
    tcg_gen_mov_tl(dst, cpu_cc_dst);
333
}
334

    
335
static inline void gen_op_addxi_cc(TCGv dst, TCGv src1, target_long src2)
336
{
337
    tcg_gen_mov_tl(cpu_cc_src, src1);
338
    tcg_gen_movi_tl(cpu_cc_src2, src2);
339
    gen_mov_reg_C(cpu_tmp0, cpu_psr);
340
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
341
    tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_dst, src2);
342
    tcg_gen_mov_tl(dst, cpu_cc_dst);
343
}
344

    
345
static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
346
{
347
    tcg_gen_mov_tl(cpu_cc_src, src1);
348
    tcg_gen_mov_tl(cpu_cc_src2, src2);
349
    gen_mov_reg_C(cpu_tmp0, cpu_psr);
350
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
351
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
352
    tcg_gen_mov_tl(dst, cpu_cc_dst);
353
}
354

    
355
static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
356
{
357
    tcg_gen_mov_tl(cpu_cc_src, src1);
358
    tcg_gen_mov_tl(cpu_cc_src2, src2);
359
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
360
    tcg_gen_mov_tl(dst, cpu_cc_dst);
361
}
362

    
363
static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
364
{
365
    tcg_gen_mov_tl(cpu_cc_src, src1);
366
    tcg_gen_mov_tl(cpu_cc_src2, src2);
367
    gen_tag_tv(cpu_cc_src, cpu_cc_src2);
368
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
369
    gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
370
    tcg_gen_mov_tl(dst, cpu_cc_dst);
371
}
372

    
373
static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
374
{
375
    TCGv r_temp;
376
    TCGv_i32 r_const;
377
    int l1;
378

    
379
    l1 = gen_new_label();
380

    
381
    r_temp = tcg_temp_new();
382
    tcg_gen_xor_tl(r_temp, src1, src2);
383
    tcg_gen_xor_tl(cpu_tmp0, src1, dst);
384
    tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
385
    tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
386
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
387
    r_const = tcg_const_i32(TT_TOVF);
388
    gen_helper_raise_exception(r_const);
389
    tcg_temp_free_i32(r_const);
390
    gen_set_label(l1);
391
    tcg_temp_free(r_temp);
392
}
393

    
394
static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
395
{
396
    tcg_gen_mov_tl(cpu_cc_src, src1);
397
    tcg_gen_movi_tl(cpu_cc_src2, src2);
398
    if (src2 == 0) {
399
        tcg_gen_mov_tl(cpu_cc_dst, src1);
400
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
401
        dc->cc_op = CC_OP_LOGIC;
402
    } else {
403
        tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
404
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
405
        dc->cc_op = CC_OP_SUB;
406
    }
407
    tcg_gen_mov_tl(dst, cpu_cc_dst);
408
}
409

    
410
static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
411
{
412
    tcg_gen_mov_tl(cpu_cc_src, src1);
413
    tcg_gen_mov_tl(cpu_cc_src2, src2);
414
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
415
    tcg_gen_mov_tl(dst, cpu_cc_dst);
416
}
417

    
418
static inline void gen_op_subxi_cc(TCGv dst, TCGv src1, target_long src2)
419
{
420
    tcg_gen_mov_tl(cpu_cc_src, src1);
421
    tcg_gen_movi_tl(cpu_cc_src2, src2);
422
    gen_mov_reg_C(cpu_tmp0, cpu_psr);
423
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
424
    tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_dst, src2);
425
    tcg_gen_mov_tl(dst, cpu_cc_dst);
426
}
427

    
428
static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
429
{
430
    tcg_gen_mov_tl(cpu_cc_src, src1);
431
    tcg_gen_mov_tl(cpu_cc_src2, src2);
432
    gen_mov_reg_C(cpu_tmp0, cpu_psr);
433
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
434
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
435
    tcg_gen_mov_tl(dst, cpu_cc_dst);
436
}
437

    
438
static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
439
{
440
    tcg_gen_mov_tl(cpu_cc_src, src1);
441
    tcg_gen_mov_tl(cpu_cc_src2, src2);
442
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
443
    tcg_gen_mov_tl(dst, cpu_cc_dst);
444
}
445

    
446
static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
447
{
448
    tcg_gen_mov_tl(cpu_cc_src, src1);
449
    tcg_gen_mov_tl(cpu_cc_src2, src2);
450
    gen_tag_tv(cpu_cc_src, cpu_cc_src2);
451
    tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
452
    gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
453
    tcg_gen_mov_tl(dst, cpu_cc_dst);
454
}
455

    
456
static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
457
{
458
    TCGv r_temp;
459
    int l1;
460

    
461
    l1 = gen_new_label();
462
    r_temp = tcg_temp_new();
463

    
464
    /* old op:
465
    if (!(env->y & 1))
466
        T1 = 0;
467
    */
468
    tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
469
    tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
470
    tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
471
    tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
472
    tcg_gen_movi_tl(cpu_cc_src2, 0);
473
    gen_set_label(l1);
474

    
475
    // b2 = T0 & 1;
476
    // env->y = (b2 << 31) | (env->y >> 1);
477
    tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
478
    tcg_gen_shli_tl(r_temp, r_temp, 31);
479
    tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
480
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
481
    tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
482
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
483

    
484
    // b1 = N ^ V;
485
    gen_mov_reg_N(cpu_tmp0, cpu_psr);
486
    gen_mov_reg_V(r_temp, cpu_psr);
487
    tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
488
    tcg_temp_free(r_temp);
489

    
490
    // T0 = (b1 << 31) | (T0 >> 1);
491
    // src1 = T0;
492
    tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
493
    tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
494
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
495

    
496
    tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
497

    
498
    tcg_gen_mov_tl(dst, cpu_cc_dst);
499
}
500

    
501
static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
502
{
503
    TCGv_i64 r_temp, r_temp2;
504

    
505
    r_temp = tcg_temp_new_i64();
506
    r_temp2 = tcg_temp_new_i64();
507

    
508
    tcg_gen_extu_tl_i64(r_temp, src2);
509
    tcg_gen_extu_tl_i64(r_temp2, src1);
510
    tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
511

    
512
    tcg_gen_shri_i64(r_temp, r_temp2, 32);
513
    tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
514
    tcg_temp_free_i64(r_temp);
515
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
516
#ifdef TARGET_SPARC64
517
    tcg_gen_mov_i64(dst, r_temp2);
518
#else
519
    tcg_gen_trunc_i64_tl(dst, r_temp2);
520
#endif
521
    tcg_temp_free_i64(r_temp2);
522
}
523

    
524
static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
525
{
526
    TCGv_i64 r_temp, r_temp2;
527

    
528
    r_temp = tcg_temp_new_i64();
529
    r_temp2 = tcg_temp_new_i64();
530

    
531
    tcg_gen_ext_tl_i64(r_temp, src2);
532
    tcg_gen_ext_tl_i64(r_temp2, src1);
533
    tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
534

    
535
    tcg_gen_shri_i64(r_temp, r_temp2, 32);
536
    tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
537
    tcg_temp_free_i64(r_temp);
538
    tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
539
#ifdef TARGET_SPARC64
540
    tcg_gen_mov_i64(dst, r_temp2);
541
#else
542
    tcg_gen_trunc_i64_tl(dst, r_temp2);
543
#endif
544
    tcg_temp_free_i64(r_temp2);
545
}
546

    
547
#ifdef TARGET_SPARC64
548
static inline void gen_trap_ifdivzero_tl(TCGv divisor)
549
{
550
    TCGv_i32 r_const;
551
    int l1;
552

    
553
    l1 = gen_new_label();
554
    tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
555
    r_const = tcg_const_i32(TT_DIV_ZERO);
556
    gen_helper_raise_exception(r_const);
557
    tcg_temp_free_i32(r_const);
558
    gen_set_label(l1);
559
}
560

    
561
static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
562
{
563
    int l1, l2;
564

    
565
    l1 = gen_new_label();
566
    l2 = gen_new_label();
567
    tcg_gen_mov_tl(cpu_cc_src, src1);
568
    tcg_gen_mov_tl(cpu_cc_src2, src2);
569
    gen_trap_ifdivzero_tl(cpu_cc_src2);
570
    tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
571
    tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
572
    tcg_gen_movi_i64(dst, INT64_MIN);
573
    tcg_gen_br(l2);
574
    gen_set_label(l1);
575
    tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
576
    gen_set_label(l2);
577
}
578
#endif
579

    
580
// 1
581
static inline void gen_op_eval_ba(TCGv dst)
582
{
583
    tcg_gen_movi_tl(dst, 1);
584
}
585

    
586
// Z
587
static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
588
{
589
    gen_mov_reg_Z(dst, src);
590
}
591

    
592
// Z | (N ^ V)
593
static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
594
{
595
    gen_mov_reg_N(cpu_tmp0, src);
596
    gen_mov_reg_V(dst, src);
597
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
598
    gen_mov_reg_Z(cpu_tmp0, src);
599
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
600
}
601

    
602
// N ^ V
603
static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
604
{
605
    gen_mov_reg_V(cpu_tmp0, src);
606
    gen_mov_reg_N(dst, src);
607
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
608
}
609

    
610
// C | Z
611
static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
612
{
613
    gen_mov_reg_Z(cpu_tmp0, src);
614
    gen_mov_reg_C(dst, src);
615
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
616
}
617

    
618
// C
619
static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
620
{
621
    gen_mov_reg_C(dst, src);
622
}
623

    
624
// V
625
static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
626
{
627
    gen_mov_reg_V(dst, src);
628
}
629

    
630
// 0
631
static inline void gen_op_eval_bn(TCGv dst)
632
{
633
    tcg_gen_movi_tl(dst, 0);
634
}
635

    
636
// N
637
static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
638
{
639
    gen_mov_reg_N(dst, src);
640
}
641

    
642
// !Z
643
static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
644
{
645
    gen_mov_reg_Z(dst, src);
646
    tcg_gen_xori_tl(dst, dst, 0x1);
647
}
648

    
649
// !(Z | (N ^ V))
650
static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
651
{
652
    gen_mov_reg_N(cpu_tmp0, src);
653
    gen_mov_reg_V(dst, src);
654
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
655
    gen_mov_reg_Z(cpu_tmp0, src);
656
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
657
    tcg_gen_xori_tl(dst, dst, 0x1);
658
}
659

    
660
// !(N ^ V)
661
static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
662
{
663
    gen_mov_reg_V(cpu_tmp0, src);
664
    gen_mov_reg_N(dst, src);
665
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
666
    tcg_gen_xori_tl(dst, dst, 0x1);
667
}
668

    
669
// !(C | Z)
670
static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
671
{
672
    gen_mov_reg_Z(cpu_tmp0, src);
673
    gen_mov_reg_C(dst, src);
674
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
675
    tcg_gen_xori_tl(dst, dst, 0x1);
676
}
677

    
678
// !C
679
static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
680
{
681
    gen_mov_reg_C(dst, src);
682
    tcg_gen_xori_tl(dst, dst, 0x1);
683
}
684

    
685
// !N
686
static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
687
{
688
    gen_mov_reg_N(dst, src);
689
    tcg_gen_xori_tl(dst, dst, 0x1);
690
}
691

    
692
// !V
693
static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
694
{
695
    gen_mov_reg_V(dst, src);
696
    tcg_gen_xori_tl(dst, dst, 0x1);
697
}
698

    
699
/*
700
  FPSR bit field FCC1 | FCC0:
701
   0 =
702
   1 <
703
   2 >
704
   3 unordered
705
*/
706
static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
707
                                    unsigned int fcc_offset)
708
{
709
    tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
710
    tcg_gen_andi_tl(reg, reg, 0x1);
711
}
712

    
713
static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
714
                                    unsigned int fcc_offset)
715
{
716
    tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
717
    tcg_gen_andi_tl(reg, reg, 0x1);
718
}
719

    
720
// !0: FCC0 | FCC1
721
static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
722
                                    unsigned int fcc_offset)
723
{
724
    gen_mov_reg_FCC0(dst, src, fcc_offset);
725
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
726
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
727
}
728

    
729
// 1 or 2: FCC0 ^ FCC1
730
static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
731
                                    unsigned int fcc_offset)
732
{
733
    gen_mov_reg_FCC0(dst, src, fcc_offset);
734
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
735
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
736
}
737

    
738
// 1 or 3: FCC0
739
static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
740
                                    unsigned int fcc_offset)
741
{
742
    gen_mov_reg_FCC0(dst, src, fcc_offset);
743
}
744

    
745
// 1: FCC0 & !FCC1
746
static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
747
                                    unsigned int fcc_offset)
748
{
749
    gen_mov_reg_FCC0(dst, src, fcc_offset);
750
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
751
    tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
752
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
753
}
754

    
755
// 2 or 3: FCC1
756
static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
757
                                    unsigned int fcc_offset)
758
{
759
    gen_mov_reg_FCC1(dst, src, fcc_offset);
760
}
761

    
762
// 2: !FCC0 & FCC1
763
static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
764
                                    unsigned int fcc_offset)
765
{
766
    gen_mov_reg_FCC0(dst, src, fcc_offset);
767
    tcg_gen_xori_tl(dst, dst, 0x1);
768
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
769
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
770
}
771

    
772
// 3: FCC0 & FCC1
773
static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
774
                                    unsigned int fcc_offset)
775
{
776
    gen_mov_reg_FCC0(dst, src, fcc_offset);
777
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
778
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
779
}
780

    
781
// 0: !(FCC0 | FCC1)
782
static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
783
                                    unsigned int fcc_offset)
784
{
785
    gen_mov_reg_FCC0(dst, src, fcc_offset);
786
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
787
    tcg_gen_or_tl(dst, dst, cpu_tmp0);
788
    tcg_gen_xori_tl(dst, dst, 0x1);
789
}
790

    
791
// 0 or 3: !(FCC0 ^ FCC1)
792
static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
793
                                    unsigned int fcc_offset)
794
{
795
    gen_mov_reg_FCC0(dst, src, fcc_offset);
796
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
797
    tcg_gen_xor_tl(dst, dst, cpu_tmp0);
798
    tcg_gen_xori_tl(dst, dst, 0x1);
799
}
800

    
801
// 0 or 2: !FCC0
802
static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
803
                                    unsigned int fcc_offset)
804
{
805
    gen_mov_reg_FCC0(dst, src, fcc_offset);
806
    tcg_gen_xori_tl(dst, dst, 0x1);
807
}
808

    
809
// !1: !(FCC0 & !FCC1)
810
static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
811
                                    unsigned int fcc_offset)
812
{
813
    gen_mov_reg_FCC0(dst, src, fcc_offset);
814
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
815
    tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
816
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
817
    tcg_gen_xori_tl(dst, dst, 0x1);
818
}
819

    
820
// 0 or 1: !FCC1
821
static inline void gen_op_eval_fble(TCGv dst, TCGv src,
822
                                    unsigned int fcc_offset)
823
{
824
    gen_mov_reg_FCC1(dst, src, fcc_offset);
825
    tcg_gen_xori_tl(dst, dst, 0x1);
826
}
827

    
828
// !2: !(!FCC0 & FCC1)
829
static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
830
                                    unsigned int fcc_offset)
831
{
832
    gen_mov_reg_FCC0(dst, src, fcc_offset);
833
    tcg_gen_xori_tl(dst, dst, 0x1);
834
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
835
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
836
    tcg_gen_xori_tl(dst, dst, 0x1);
837
}
838

    
839
// !3: !(FCC0 & FCC1)
840
static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
841
                                    unsigned int fcc_offset)
842
{
843
    gen_mov_reg_FCC0(dst, src, fcc_offset);
844
    gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
845
    tcg_gen_and_tl(dst, dst, cpu_tmp0);
846
    tcg_gen_xori_tl(dst, dst, 0x1);
847
}
848

    
849
static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
850
                               target_ulong pc2, TCGv r_cond)
851
{
852
    int l1;
853

    
854
    l1 = gen_new_label();
855

    
856
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
857

    
858
    gen_goto_tb(dc, 0, pc1, pc1 + 4);
859

    
860
    gen_set_label(l1);
861
    gen_goto_tb(dc, 1, pc2, pc2 + 4);
862
}
863

    
864
static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
865
                                target_ulong pc2, TCGv r_cond)
866
{
867
    int l1;
868

    
869
    l1 = gen_new_label();
870

    
871
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
872

    
873
    gen_goto_tb(dc, 0, pc2, pc1);
874

    
875
    gen_set_label(l1);
876
    gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
877
}
878

    
879
static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
880
                                      TCGv r_cond)
881
{
882
    int l1, l2;
883

    
884
    l1 = gen_new_label();
885
    l2 = gen_new_label();
886

    
887
    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
888

    
889
    tcg_gen_movi_tl(cpu_npc, npc1);
890
    tcg_gen_br(l2);
891

    
892
    gen_set_label(l1);
893
    tcg_gen_movi_tl(cpu_npc, npc2);
894
    gen_set_label(l2);
895
}
896

    
897
/* call this function before using the condition register as it may
898
   have been set for a jump */
899
static inline void flush_cond(DisasContext *dc, TCGv cond)
900
{
901
    if (dc->npc == JUMP_PC) {
902
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
903
        dc->npc = DYNAMIC_PC;
904
    }
905
}
906

    
907
static inline void save_npc(DisasContext *dc, TCGv cond)
908
{
909
    if (dc->npc == JUMP_PC) {
910
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
911
        dc->npc = DYNAMIC_PC;
912
    } else if (dc->npc != DYNAMIC_PC) {
913
        tcg_gen_movi_tl(cpu_npc, dc->npc);
914
    }
915
}
916

    
917
static inline void save_state(DisasContext *dc, TCGv cond)
918
{
919
    tcg_gen_movi_tl(cpu_pc, dc->pc);
920
    /* flush pending conditional evaluations before exposing cpu state */
921
    if (dc->cc_op != CC_OP_FLAGS) {
922
        dc->cc_op = CC_OP_FLAGS;
923
        gen_helper_compute_psr();
924
    }
925
    save_npc(dc, cond);
926
}
927

    
928
static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
929
{
930
    if (dc->npc == JUMP_PC) {
931
        gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
932
        tcg_gen_mov_tl(cpu_pc, cpu_npc);
933
        dc->pc = DYNAMIC_PC;
934
    } else if (dc->npc == DYNAMIC_PC) {
935
        tcg_gen_mov_tl(cpu_pc, cpu_npc);
936
        dc->pc = DYNAMIC_PC;
937
    } else {
938
        dc->pc = dc->npc;
939
    }
940
}
941

    
942
static inline void gen_op_next_insn(void)
943
{
944
    tcg_gen_mov_tl(cpu_pc, cpu_npc);
945
    tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
946
}
947

    
948
static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
949
                            DisasContext *dc)
950
{
951
    TCGv_i32 r_src;
952

    
953
#ifdef TARGET_SPARC64
954
    if (cc)
955
        r_src = cpu_xcc;
956
    else
957
        r_src = cpu_psr;
958
#else
959
    r_src = cpu_psr;
960
#endif
961
    switch (dc->cc_op) {
962
    case CC_OP_FLAGS:
963
        break;
964
    default:
965
        gen_helper_compute_psr();
966
        dc->cc_op = CC_OP_FLAGS;
967
        break;
968
    }
969
    switch (cond) {
970
    case 0x0:
971
        gen_op_eval_bn(r_dst);
972
        break;
973
    case 0x1:
974
        gen_op_eval_be(r_dst, r_src);
975
        break;
976
    case 0x2:
977
        gen_op_eval_ble(r_dst, r_src);
978
        break;
979
    case 0x3:
980
        gen_op_eval_bl(r_dst, r_src);
981
        break;
982
    case 0x4:
983
        gen_op_eval_bleu(r_dst, r_src);
984
        break;
985
    case 0x5:
986
        gen_op_eval_bcs(r_dst, r_src);
987
        break;
988
    case 0x6:
989
        gen_op_eval_bneg(r_dst, r_src);
990
        break;
991
    case 0x7:
992
        gen_op_eval_bvs(r_dst, r_src);
993
        break;
994
    case 0x8:
995
        gen_op_eval_ba(r_dst);
996
        break;
997
    case 0x9:
998
        gen_op_eval_bne(r_dst, r_src);
999
        break;
1000
    case 0xa:
1001
        gen_op_eval_bg(r_dst, r_src);
1002
        break;
1003
    case 0xb:
1004
        gen_op_eval_bge(r_dst, r_src);
1005
        break;
1006
    case 0xc:
1007
        gen_op_eval_bgu(r_dst, r_src);
1008
        break;
1009
    case 0xd:
1010
        gen_op_eval_bcc(r_dst, r_src);
1011
        break;
1012
    case 0xe:
1013
        gen_op_eval_bpos(r_dst, r_src);
1014
        break;
1015
    case 0xf:
1016
        gen_op_eval_bvc(r_dst, r_src);
1017
        break;
1018
    }
1019
}
1020

    
1021
static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1022
{
1023
    unsigned int offset;
1024

    
1025
    switch (cc) {
1026
    default:
1027
    case 0x0:
1028
        offset = 0;
1029
        break;
1030
    case 0x1:
1031
        offset = 32 - 10;
1032
        break;
1033
    case 0x2:
1034
        offset = 34 - 10;
1035
        break;
1036
    case 0x3:
1037
        offset = 36 - 10;
1038
        break;
1039
    }
1040

    
1041
    switch (cond) {
1042
    case 0x0:
1043
        gen_op_eval_bn(r_dst);
1044
        break;
1045
    case 0x1:
1046
        gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1047
        break;
1048
    case 0x2:
1049
        gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1050
        break;
1051
    case 0x3:
1052
        gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1053
        break;
1054
    case 0x4:
1055
        gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1056
        break;
1057
    case 0x5:
1058
        gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1059
        break;
1060
    case 0x6:
1061
        gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1062
        break;
1063
    case 0x7:
1064
        gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1065
        break;
1066
    case 0x8:
1067
        gen_op_eval_ba(r_dst);
1068
        break;
1069
    case 0x9:
1070
        gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1071
        break;
1072
    case 0xa:
1073
        gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1074
        break;
1075
    case 0xb:
1076
        gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1077
        break;
1078
    case 0xc:
1079
        gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1080
        break;
1081
    case 0xd:
1082
        gen_op_eval_fble(r_dst, cpu_fsr, offset);
1083
        break;
1084
    case 0xe:
1085
        gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1086
        break;
1087
    case 0xf:
1088
        gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1089
        break;
1090
    }
1091
}
1092

    
1093
#ifdef TARGET_SPARC64
1094
// Inverted logic
1095
static const int gen_tcg_cond_reg[8] = {
1096
    -1,
1097
    TCG_COND_NE,
1098
    TCG_COND_GT,
1099
    TCG_COND_GE,
1100
    -1,
1101
    TCG_COND_EQ,
1102
    TCG_COND_LE,
1103
    TCG_COND_LT,
1104
};
1105

    
1106
static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1107
{
1108
    int l1;
1109

    
1110
    l1 = gen_new_label();
1111
    tcg_gen_movi_tl(r_dst, 0);
1112
    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1113
    tcg_gen_movi_tl(r_dst, 1);
1114
    gen_set_label(l1);
1115
}
1116
#endif
1117

    
1118
/* XXX: potentially incorrect if dynamic npc */
1119
static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1120
                      TCGv r_cond)
1121
{
1122
    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1123
    target_ulong target = dc->pc + offset;
1124

    
1125
    if (cond == 0x0) {
1126
        /* unconditional not taken */
1127
        if (a) {
1128
            dc->pc = dc->npc + 4;
1129
            dc->npc = dc->pc + 4;
1130
        } else {
1131
            dc->pc = dc->npc;
1132
            dc->npc = dc->pc + 4;
1133
        }
1134
    } else if (cond == 0x8) {
1135
        /* unconditional taken */
1136
        if (a) {
1137
            dc->pc = target;
1138
            dc->npc = dc->pc + 4;
1139
        } else {
1140
            dc->pc = dc->npc;
1141
            dc->npc = target;
1142
            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1143
        }
1144
    } else {
1145
        flush_cond(dc, r_cond);
1146
        gen_cond(r_cond, cc, cond, dc);
1147
        if (a) {
1148
            gen_branch_a(dc, target, dc->npc, r_cond);
1149
            dc->is_br = 1;
1150
        } else {
1151
            dc->pc = dc->npc;
1152
            dc->jump_pc[0] = target;
1153
            dc->jump_pc[1] = dc->npc + 4;
1154
            dc->npc = JUMP_PC;
1155
        }
1156
    }
1157
}
1158

    
1159
/* XXX: potentially incorrect if dynamic npc */
1160
static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1161
                      TCGv r_cond)
1162
{
1163
    unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1164
    target_ulong target = dc->pc + offset;
1165

    
1166
    if (cond == 0x0) {
1167
        /* unconditional not taken */
1168
        if (a) {
1169
            dc->pc = dc->npc + 4;
1170
            dc->npc = dc->pc + 4;
1171
        } else {
1172
            dc->pc = dc->npc;
1173
            dc->npc = dc->pc + 4;
1174
        }
1175
    } else if (cond == 0x8) {
1176
        /* unconditional taken */
1177
        if (a) {
1178
            dc->pc = target;
1179
            dc->npc = dc->pc + 4;
1180
        } else {
1181
            dc->pc = dc->npc;
1182
            dc->npc = target;
1183
            tcg_gen_mov_tl(cpu_pc, cpu_npc);
1184
        }
1185
    } else {
1186
        flush_cond(dc, r_cond);
1187
        gen_fcond(r_cond, cc, cond);
1188
        if (a) {
1189
            gen_branch_a(dc, target, dc->npc, r_cond);
1190
            dc->is_br = 1;
1191
        } else {
1192
            dc->pc = dc->npc;
1193
            dc->jump_pc[0] = target;
1194
            dc->jump_pc[1] = dc->npc + 4;
1195
            dc->npc = JUMP_PC;
1196
        }
1197
    }
1198
}
1199

    
1200
#ifdef TARGET_SPARC64
1201
/* XXX: potentially incorrect if dynamic npc */
1202
static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1203
                          TCGv r_cond, TCGv r_reg)
1204
{
1205
    unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1206
    target_ulong target = dc->pc + offset;
1207

    
1208
    flush_cond(dc, r_cond);
1209
    gen_cond_reg(r_cond, cond, r_reg);
1210
    if (a) {
1211
        gen_branch_a(dc, target, dc->npc, r_cond);
1212
        dc->is_br = 1;
1213
    } else {
1214
        dc->pc = dc->npc;
1215
        dc->jump_pc[0] = target;
1216
        dc->jump_pc[1] = dc->npc + 4;
1217
        dc->npc = JUMP_PC;
1218
    }
1219
}
1220

    
1221
static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1222
{
1223
    switch (fccno) {
1224
    case 0:
1225
        gen_helper_fcmps(r_rs1, r_rs2);
1226
        break;
1227
    case 1:
1228
        gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1229
        break;
1230
    case 2:
1231
        gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1232
        break;
1233
    case 3:
1234
        gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1235
        break;
1236
    }
1237
}
1238

    
1239
static inline void gen_op_fcmpd(int fccno)
1240
{
1241
    switch (fccno) {
1242
    case 0:
1243
        gen_helper_fcmpd();
1244
        break;
1245
    case 1:
1246
        gen_helper_fcmpd_fcc1();
1247
        break;
1248
    case 2:
1249
        gen_helper_fcmpd_fcc2();
1250
        break;
1251
    case 3:
1252
        gen_helper_fcmpd_fcc3();
1253
        break;
1254
    }
1255
}
1256

    
1257
static inline void gen_op_fcmpq(int fccno)
1258
{
1259
    switch (fccno) {
1260
    case 0:
1261
        gen_helper_fcmpq();
1262
        break;
1263
    case 1:
1264
        gen_helper_fcmpq_fcc1();
1265
        break;
1266
    case 2:
1267
        gen_helper_fcmpq_fcc2();
1268
        break;
1269
    case 3:
1270
        gen_helper_fcmpq_fcc3();
1271
        break;
1272
    }
1273
}
1274

    
1275
static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1276
{
1277
    switch (fccno) {
1278
    case 0:
1279
        gen_helper_fcmpes(r_rs1, r_rs2);
1280
        break;
1281
    case 1:
1282
        gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1283
        break;
1284
    case 2:
1285
        gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1286
        break;
1287
    case 3:
1288
        gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1289
        break;
1290
    }
1291
}
1292

    
1293
static inline void gen_op_fcmped(int fccno)
1294
{
1295
    switch (fccno) {
1296
    case 0:
1297
        gen_helper_fcmped();
1298
        break;
1299
    case 1:
1300
        gen_helper_fcmped_fcc1();
1301
        break;
1302
    case 2:
1303
        gen_helper_fcmped_fcc2();
1304
        break;
1305
    case 3:
1306
        gen_helper_fcmped_fcc3();
1307
        break;
1308
    }
1309
}
1310

    
1311
static inline void gen_op_fcmpeq(int fccno)
1312
{
1313
    switch (fccno) {
1314
    case 0:
1315
        gen_helper_fcmpeq();
1316
        break;
1317
    case 1:
1318
        gen_helper_fcmpeq_fcc1();
1319
        break;
1320
    case 2:
1321
        gen_helper_fcmpeq_fcc2();
1322
        break;
1323
    case 3:
1324
        gen_helper_fcmpeq_fcc3();
1325
        break;
1326
    }
1327
}
1328

    
1329
#else
1330

    
1331
static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1332
{
1333
    gen_helper_fcmps(r_rs1, r_rs2);
1334
}
1335

    
1336
static inline void gen_op_fcmpd(int fccno)
1337
{
1338
    gen_helper_fcmpd();
1339
}
1340

    
1341
static inline void gen_op_fcmpq(int fccno)
1342
{
1343
    gen_helper_fcmpq();
1344
}
1345

    
1346
static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1347
{
1348
    gen_helper_fcmpes(r_rs1, r_rs2);
1349
}
1350

    
1351
static inline void gen_op_fcmped(int fccno)
1352
{
1353
    gen_helper_fcmped();
1354
}
1355

    
1356
static inline void gen_op_fcmpeq(int fccno)
1357
{
1358
    gen_helper_fcmpeq();
1359
}
1360
#endif
1361

    
1362
static inline void gen_op_fpexception_im(int fsr_flags)
1363
{
1364
    TCGv_i32 r_const;
1365

    
1366
    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1367
    tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1368
    r_const = tcg_const_i32(TT_FP_EXCP);
1369
    gen_helper_raise_exception(r_const);
1370
    tcg_temp_free_i32(r_const);
1371
}
1372

    
1373
static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1374
{
1375
#if !defined(CONFIG_USER_ONLY)
1376
    if (!dc->fpu_enabled) {
1377
        TCGv_i32 r_const;
1378

    
1379
        save_state(dc, r_cond);
1380
        r_const = tcg_const_i32(TT_NFPU_INSN);
1381
        gen_helper_raise_exception(r_const);
1382
        tcg_temp_free_i32(r_const);
1383
        dc->is_br = 1;
1384
        return 1;
1385
    }
1386
#endif
1387
    return 0;
1388
}
1389

    
1390
static inline void gen_op_clear_ieee_excp_and_FTT(void)
1391
{
1392
    tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1393
}
1394

    
1395
static inline void gen_clear_float_exceptions(void)
1396
{
1397
    gen_helper_clear_float_exceptions();
1398
}
1399

    
1400
/* asi moves */
1401
#ifdef TARGET_SPARC64
1402
static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1403
{
1404
    int asi;
1405
    TCGv_i32 r_asi;
1406

    
1407
    if (IS_IMM) {
1408
        r_asi = tcg_temp_new_i32();
1409
        tcg_gen_mov_i32(r_asi, cpu_asi);
1410
    } else {
1411
        asi = GET_FIELD(insn, 19, 26);
1412
        r_asi = tcg_const_i32(asi);
1413
    }
1414
    return r_asi;
1415
}
1416

    
1417
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1418
                              int sign)
1419
{
1420
    TCGv_i32 r_asi, r_size, r_sign;
1421

    
1422
    r_asi = gen_get_asi(insn, addr);
1423
    r_size = tcg_const_i32(size);
1424
    r_sign = tcg_const_i32(sign);
1425
    gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1426
    tcg_temp_free_i32(r_sign);
1427
    tcg_temp_free_i32(r_size);
1428
    tcg_temp_free_i32(r_asi);
1429
}
1430

    
1431
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1432
{
1433
    TCGv_i32 r_asi, r_size;
1434

    
1435
    r_asi = gen_get_asi(insn, addr);
1436
    r_size = tcg_const_i32(size);
1437
    gen_helper_st_asi(addr, src, r_asi, r_size);
1438
    tcg_temp_free_i32(r_size);
1439
    tcg_temp_free_i32(r_asi);
1440
}
1441

    
1442
static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1443
{
1444
    TCGv_i32 r_asi, r_size, r_rd;
1445

    
1446
    r_asi = gen_get_asi(insn, addr);
1447
    r_size = tcg_const_i32(size);
1448
    r_rd = tcg_const_i32(rd);
1449
    gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1450
    tcg_temp_free_i32(r_rd);
1451
    tcg_temp_free_i32(r_size);
1452
    tcg_temp_free_i32(r_asi);
1453
}
1454

    
1455
static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1456
{
1457
    TCGv_i32 r_asi, r_size, r_rd;
1458

    
1459
    r_asi = gen_get_asi(insn, addr);
1460
    r_size = tcg_const_i32(size);
1461
    r_rd = tcg_const_i32(rd);
1462
    gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1463
    tcg_temp_free_i32(r_rd);
1464
    tcg_temp_free_i32(r_size);
1465
    tcg_temp_free_i32(r_asi);
1466
}
1467

    
1468
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1469
{
1470
    TCGv_i32 r_asi, r_size, r_sign;
1471

    
1472
    r_asi = gen_get_asi(insn, addr);
1473
    r_size = tcg_const_i32(4);
1474
    r_sign = tcg_const_i32(0);
1475
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1476
    tcg_temp_free_i32(r_sign);
1477
    gen_helper_st_asi(addr, dst, r_asi, r_size);
1478
    tcg_temp_free_i32(r_size);
1479
    tcg_temp_free_i32(r_asi);
1480
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1481
}
1482

    
1483
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1484
{
1485
    TCGv_i32 r_asi, r_rd;
1486

    
1487
    r_asi = gen_get_asi(insn, addr);
1488
    r_rd = tcg_const_i32(rd);
1489
    gen_helper_ldda_asi(addr, r_asi, r_rd);
1490
    tcg_temp_free_i32(r_rd);
1491
    tcg_temp_free_i32(r_asi);
1492
}
1493

    
1494
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1495
{
1496
    TCGv_i32 r_asi, r_size;
1497

    
1498
    gen_movl_reg_TN(rd + 1, cpu_tmp0);
1499
    tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1500
    r_asi = gen_get_asi(insn, addr);
1501
    r_size = tcg_const_i32(8);
1502
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1503
    tcg_temp_free_i32(r_size);
1504
    tcg_temp_free_i32(r_asi);
1505
}
1506

    
1507
static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1508
                               int rd)
1509
{
1510
    TCGv r_val1;
1511
    TCGv_i32 r_asi;
1512

    
1513
    r_val1 = tcg_temp_new();
1514
    gen_movl_reg_TN(rd, r_val1);
1515
    r_asi = gen_get_asi(insn, addr);
1516
    gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1517
    tcg_temp_free_i32(r_asi);
1518
    tcg_temp_free(r_val1);
1519
}
1520

    
1521
static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1522
                                int rd)
1523
{
1524
    TCGv_i32 r_asi;
1525

    
1526
    gen_movl_reg_TN(rd, cpu_tmp64);
1527
    r_asi = gen_get_asi(insn, addr);
1528
    gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1529
    tcg_temp_free_i32(r_asi);
1530
}
1531

    
1532
#elif !defined(CONFIG_USER_ONLY)
1533

    
1534
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1535
                              int sign)
1536
{
1537
    TCGv_i32 r_asi, r_size, r_sign;
1538

    
1539
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1540
    r_size = tcg_const_i32(size);
1541
    r_sign = tcg_const_i32(sign);
1542
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1543
    tcg_temp_free(r_sign);
1544
    tcg_temp_free(r_size);
1545
    tcg_temp_free(r_asi);
1546
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1547
}
1548

    
1549
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1550
{
1551
    TCGv_i32 r_asi, r_size;
1552

    
1553
    tcg_gen_extu_tl_i64(cpu_tmp64, src);
1554
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1555
    r_size = tcg_const_i32(size);
1556
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1557
    tcg_temp_free(r_size);
1558
    tcg_temp_free(r_asi);
1559
}
1560

    
1561
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1562
{
1563
    TCGv_i32 r_asi, r_size, r_sign;
1564
    TCGv_i64 r_val;
1565

    
1566
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1567
    r_size = tcg_const_i32(4);
1568
    r_sign = tcg_const_i32(0);
1569
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1570
    tcg_temp_free(r_sign);
1571
    r_val = tcg_temp_new_i64();
1572
    tcg_gen_extu_tl_i64(r_val, dst);
1573
    gen_helper_st_asi(addr, r_val, r_asi, r_size);
1574
    tcg_temp_free_i64(r_val);
1575
    tcg_temp_free(r_size);
1576
    tcg_temp_free(r_asi);
1577
    tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1578
}
1579

    
1580
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1581
{
1582
    TCGv_i32 r_asi, r_size, r_sign;
1583

    
1584
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1585
    r_size = tcg_const_i32(8);
1586
    r_sign = tcg_const_i32(0);
1587
    gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1588
    tcg_temp_free(r_sign);
1589
    tcg_temp_free(r_size);
1590
    tcg_temp_free(r_asi);
1591
    tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1592
    gen_movl_TN_reg(rd + 1, cpu_tmp0);
1593
    tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1594
    tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1595
    gen_movl_TN_reg(rd, hi);
1596
}
1597

    
1598
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1599
{
1600
    TCGv_i32 r_asi, r_size;
1601

    
1602
    gen_movl_reg_TN(rd + 1, cpu_tmp0);
1603
    tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1604
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1605
    r_size = tcg_const_i32(8);
1606
    gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1607
    tcg_temp_free(r_size);
1608
    tcg_temp_free(r_asi);
1609
}
1610
#endif
1611

    
1612
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1613
static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1614
{
1615
    TCGv_i64 r_val;
1616
    TCGv_i32 r_asi, r_size;
1617

    
1618
    gen_ld_asi(dst, addr, insn, 1, 0);
1619

    
1620
    r_val = tcg_const_i64(0xffULL);
1621
    r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1622
    r_size = tcg_const_i32(1);
1623
    gen_helper_st_asi(addr, r_val, r_asi, r_size);
1624
    tcg_temp_free_i32(r_size);
1625
    tcg_temp_free_i32(r_asi);
1626
    tcg_temp_free_i64(r_val);
1627
}
1628
#endif
1629

    
1630
static inline TCGv get_src1(unsigned int insn, TCGv def)
1631
{
1632
    TCGv r_rs1 = def;
1633
    unsigned int rs1;
1634

    
1635
    rs1 = GET_FIELD(insn, 13, 17);
1636
    if (rs1 == 0) {
1637
        tcg_gen_movi_tl(def, 0);
1638
    } else if (rs1 < 8) {
1639
        r_rs1 = cpu_gregs[rs1];
1640
    } else {
1641
        tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1642
    }
1643
    return r_rs1;
1644
}
1645

    
1646
static inline TCGv get_src2(unsigned int insn, TCGv def)
1647
{
1648
    TCGv r_rs2 = def;
1649

    
1650
    if (IS_IMM) { /* immediate */
1651
        target_long simm = GET_FIELDs(insn, 19, 31);
1652
        tcg_gen_movi_tl(def, simm);
1653
    } else { /* register */
1654
        unsigned int rs2 = GET_FIELD(insn, 27, 31);
1655
        if (rs2 == 0) {
1656
            tcg_gen_movi_tl(def, 0);
1657
        } else if (rs2 < 8) {
1658
            r_rs2 = cpu_gregs[rs2];
1659
        } else {
1660
            tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1661
        }
1662
    }
1663
    return r_rs2;
1664
}
1665

    
1666
#ifdef TARGET_SPARC64
1667
static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
1668
{
1669
    TCGv_i32 r_tl = tcg_temp_new_i32();
1670

    
1671
    /* load env->tl into r_tl */
1672
    tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
1673

    
1674
    /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1675
    tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
1676

    
1677
    /* calculate offset to current trap state from env->ts, reuse r_tl */
1678
    tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
1679
    tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
1680

    
1681
    /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1682
    {
1683
        TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
1684
        tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
1685
        tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
1686
        tcg_temp_free_ptr(r_tl_tmp);
1687
    }
1688

    
1689
    tcg_temp_free_i32(r_tl);
1690
}
1691
#endif
1692

    
1693
#define CHECK_IU_FEATURE(dc, FEATURE)                      \
1694
    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
1695
        goto illegal_insn;
1696
#define CHECK_FPU_FEATURE(dc, FEATURE)                     \
1697
    if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
1698
        goto nfpu_insn;
1699

    
1700
/* before an instruction, dc->pc must be static */
1701
static void disas_sparc_insn(DisasContext * dc)
1702
{
1703
    unsigned int insn, opc, rs1, rs2, rd;
1704
    TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
1705
    target_long simm;
1706

    
1707
    if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1708
        tcg_gen_debug_insn_start(dc->pc);
1709
    insn = ldl_code(dc->pc);
1710
    opc = GET_FIELD(insn, 0, 1);
1711

    
1712
    rd = GET_FIELD(insn, 2, 6);
1713

    
1714
    cpu_tmp1 = cpu_src1 = tcg_temp_new();
1715
    cpu_tmp2 = cpu_src2 = tcg_temp_new();
1716

    
1717
    switch (opc) {
1718
    case 0:                     /* branches/sethi */
1719
        {
1720
            unsigned int xop = GET_FIELD(insn, 7, 9);
1721
            int32_t target;
1722
            switch (xop) {
1723
#ifdef TARGET_SPARC64
1724
            case 0x1:           /* V9 BPcc */
1725
                {
1726
                    int cc;
1727

    
1728
                    target = GET_FIELD_SP(insn, 0, 18);
1729
                    target = sign_extend(target, 18);
1730
                    target <<= 2;
1731
                    cc = GET_FIELD_SP(insn, 20, 21);
1732
                    if (cc == 0)
1733
                        do_branch(dc, target, insn, 0, cpu_cond);
1734
                    else if (cc == 2)
1735
                        do_branch(dc, target, insn, 1, cpu_cond);
1736
                    else
1737
                        goto illegal_insn;
1738
                    goto jmp_insn;
1739
                }
1740
            case 0x3:           /* V9 BPr */
1741
                {
1742
                    target = GET_FIELD_SP(insn, 0, 13) |
1743
                        (GET_FIELD_SP(insn, 20, 21) << 14);
1744
                    target = sign_extend(target, 16);
1745
                    target <<= 2;
1746
                    cpu_src1 = get_src1(insn, cpu_src1);
1747
                    do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1748
                    goto jmp_insn;
1749
                }
1750
            case 0x5:           /* V9 FBPcc */
1751
                {
1752
                    int cc = GET_FIELD_SP(insn, 20, 21);
1753
                    if (gen_trap_ifnofpu(dc, cpu_cond))
1754
                        goto jmp_insn;
1755
                    target = GET_FIELD_SP(insn, 0, 18);
1756
                    target = sign_extend(target, 19);
1757
                    target <<= 2;
1758
                    do_fbranch(dc, target, insn, cc, cpu_cond);
1759
                    goto jmp_insn;
1760
                }
1761
#else
1762
            case 0x7:           /* CBN+x */
1763
                {
1764
                    goto ncp_insn;
1765
                }
1766
#endif
1767
            case 0x2:           /* BN+x */
1768
                {
1769
                    target = GET_FIELD(insn, 10, 31);
1770
                    target = sign_extend(target, 22);
1771
                    target <<= 2;
1772
                    do_branch(dc, target, insn, 0, cpu_cond);
1773
                    goto jmp_insn;
1774
                }
1775
            case 0x6:           /* FBN+x */
1776
                {
1777
                    if (gen_trap_ifnofpu(dc, cpu_cond))
1778
                        goto jmp_insn;
1779
                    target = GET_FIELD(insn, 10, 31);
1780
                    target = sign_extend(target, 22);
1781
                    target <<= 2;
1782
                    do_fbranch(dc, target, insn, 0, cpu_cond);
1783
                    goto jmp_insn;
1784
                }
1785
            case 0x4:           /* SETHI */
1786
                if (rd) { // nop
1787
                    uint32_t value = GET_FIELD(insn, 10, 31);
1788
                    TCGv r_const;
1789

    
1790
                    r_const = tcg_const_tl(value << 10);
1791
                    gen_movl_TN_reg(rd, r_const);
1792
                    tcg_temp_free(r_const);
1793
                }
1794
                break;
1795
            case 0x0:           /* UNIMPL */
1796
            default:
1797
                goto illegal_insn;
1798
            }
1799
            break;
1800
        }
1801
        break;
1802
    case 1:                     /*CALL*/
1803
        {
1804
            target_long target = GET_FIELDs(insn, 2, 31) << 2;
1805
            TCGv r_const;
1806

    
1807
            r_const = tcg_const_tl(dc->pc);
1808
            gen_movl_TN_reg(15, r_const);
1809
            tcg_temp_free(r_const);
1810
            target += dc->pc;
1811
            gen_mov_pc_npc(dc, cpu_cond);
1812
            dc->npc = target;
1813
        }
1814
        goto jmp_insn;
1815
    case 2:                     /* FPU & Logical Operations */
1816
        {
1817
            unsigned int xop = GET_FIELD(insn, 7, 12);
1818
            if (xop == 0x3a) {  /* generate trap */
1819
                int cond;
1820

    
1821
                cpu_src1 = get_src1(insn, cpu_src1);
1822
                if (IS_IMM) {
1823
                    rs2 = GET_FIELD(insn, 25, 31);
1824
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
1825
                } else {
1826
                    rs2 = GET_FIELD(insn, 27, 31);
1827
                    if (rs2 != 0) {
1828
                        gen_movl_reg_TN(rs2, cpu_src2);
1829
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
1830
                    } else
1831
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
1832
                }
1833
                cond = GET_FIELD(insn, 3, 6);
1834
                if (cond == 0x8) {
1835
                    save_state(dc, cpu_cond);
1836
                    if ((dc->def->features & CPU_FEATURE_HYPV) &&
1837
                        supervisor(dc))
1838
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1839
                    else
1840
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1841
                    tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1842
                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1843
                    gen_helper_raise_exception(cpu_tmp32);
1844
                } else if (cond != 0) {
1845
                    TCGv r_cond = tcg_temp_new();
1846
                    int l1;
1847
#ifdef TARGET_SPARC64
1848
                    /* V9 icc/xcc */
1849
                    int cc = GET_FIELD_SP(insn, 11, 12);
1850

    
1851
                    save_state(dc, cpu_cond);
1852
                    if (cc == 0)
1853
                        gen_cond(r_cond, 0, cond, dc);
1854
                    else if (cc == 2)
1855
                        gen_cond(r_cond, 1, cond, dc);
1856
                    else
1857
                        goto illegal_insn;
1858
#else
1859
                    save_state(dc, cpu_cond);
1860
                    gen_cond(r_cond, 0, cond, dc);
1861
#endif
1862
                    l1 = gen_new_label();
1863
                    tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1864

    
1865
                    if ((dc->def->features & CPU_FEATURE_HYPV) &&
1866
                        supervisor(dc))
1867
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1868
                    else
1869
                        tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1870
                    tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1871
                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1872
                    gen_helper_raise_exception(cpu_tmp32);
1873

    
1874
                    gen_set_label(l1);
1875
                    tcg_temp_free(r_cond);
1876
                }
1877
                gen_op_next_insn();
1878
                tcg_gen_exit_tb(0);
1879
                dc->is_br = 1;
1880
                goto jmp_insn;
1881
            } else if (xop == 0x28) {
1882
                rs1 = GET_FIELD(insn, 13, 17);
1883
                switch(rs1) {
1884
                case 0: /* rdy */
1885
#ifndef TARGET_SPARC64
1886
                case 0x01 ... 0x0e: /* undefined in the SPARCv8
1887
                                       manual, rdy on the microSPARC
1888
                                       II */
1889
                case 0x0f:          /* stbar in the SPARCv8 manual,
1890
                                       rdy on the microSPARC II */
1891
                case 0x10 ... 0x1f: /* implementation-dependent in the
1892
                                       SPARCv8 manual, rdy on the
1893
                                       microSPARC II */
1894
#endif
1895
                    gen_movl_TN_reg(rd, cpu_y);
1896
                    break;
1897
#ifdef TARGET_SPARC64
1898
                case 0x2: /* V9 rdccr */
1899
                    gen_helper_compute_psr();
1900
                    gen_helper_rdccr(cpu_dst);
1901
                    gen_movl_TN_reg(rd, cpu_dst);
1902
                    break;
1903
                case 0x3: /* V9 rdasi */
1904
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
1905
                    gen_movl_TN_reg(rd, cpu_dst);
1906
                    break;
1907
                case 0x4: /* V9 rdtick */
1908
                    {
1909
                        TCGv_ptr r_tickptr;
1910

    
1911
                        r_tickptr = tcg_temp_new_ptr();
1912
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
1913
                                       offsetof(CPUState, tick));
1914
                        gen_helper_tick_get_count(cpu_dst, r_tickptr);
1915
                        tcg_temp_free_ptr(r_tickptr);
1916
                        gen_movl_TN_reg(rd, cpu_dst);
1917
                    }
1918
                    break;
1919
                case 0x5: /* V9 rdpc */
1920
                    {
1921
                        TCGv r_const;
1922

    
1923
                        r_const = tcg_const_tl(dc->pc);
1924
                        gen_movl_TN_reg(rd, r_const);
1925
                        tcg_temp_free(r_const);
1926
                    }
1927
                    break;
1928
                case 0x6: /* V9 rdfprs */
1929
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
1930
                    gen_movl_TN_reg(rd, cpu_dst);
1931
                    break;
1932
                case 0xf: /* V9 membar */
1933
                    break; /* no effect */
1934
                case 0x13: /* Graphics Status */
1935
                    if (gen_trap_ifnofpu(dc, cpu_cond))
1936
                        goto jmp_insn;
1937
                    gen_movl_TN_reg(rd, cpu_gsr);
1938
                    break;
1939
                case 0x16: /* Softint */
1940
                    tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
1941
                    gen_movl_TN_reg(rd, cpu_dst);
1942
                    break;
1943
                case 0x17: /* Tick compare */
1944
                    gen_movl_TN_reg(rd, cpu_tick_cmpr);
1945
                    break;
1946
                case 0x18: /* System tick */
1947
                    {
1948
                        TCGv_ptr r_tickptr;
1949

    
1950
                        r_tickptr = tcg_temp_new_ptr();
1951
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
1952
                                       offsetof(CPUState, stick));
1953
                        gen_helper_tick_get_count(cpu_dst, r_tickptr);
1954
                        tcg_temp_free_ptr(r_tickptr);
1955
                        gen_movl_TN_reg(rd, cpu_dst);
1956
                    }
1957
                    break;
1958
                case 0x19: /* System tick compare */
1959
                    gen_movl_TN_reg(rd, cpu_stick_cmpr);
1960
                    break;
1961
                case 0x10: /* Performance Control */
1962
                case 0x11: /* Performance Instrumentation Counter */
1963
                case 0x12: /* Dispatch Control */
1964
                case 0x14: /* Softint set, WO */
1965
                case 0x15: /* Softint clear, WO */
1966
#endif
1967
                default:
1968
                    goto illegal_insn;
1969
                }
1970
#if !defined(CONFIG_USER_ONLY)
1971
            } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
1972
#ifndef TARGET_SPARC64
1973
                if (!supervisor(dc))
1974
                    goto priv_insn;
1975
                gen_helper_compute_psr();
1976
                dc->cc_op = CC_OP_FLAGS;
1977
                gen_helper_rdpsr(cpu_dst);
1978
#else
1979
                CHECK_IU_FEATURE(dc, HYPV);
1980
                if (!hypervisor(dc))
1981
                    goto priv_insn;
1982
                rs1 = GET_FIELD(insn, 13, 17);
1983
                switch (rs1) {
1984
                case 0: // hpstate
1985
                    // gen_op_rdhpstate();
1986
                    break;
1987
                case 1: // htstate
1988
                    // gen_op_rdhtstate();
1989
                    break;
1990
                case 3: // hintp
1991
                    tcg_gen_mov_tl(cpu_dst, cpu_hintp);
1992
                    break;
1993
                case 5: // htba
1994
                    tcg_gen_mov_tl(cpu_dst, cpu_htba);
1995
                    break;
1996
                case 6: // hver
1997
                    tcg_gen_mov_tl(cpu_dst, cpu_hver);
1998
                    break;
1999
                case 31: // hstick_cmpr
2000
                    tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2001
                    break;
2002
                default:
2003
                    goto illegal_insn;
2004
                }
2005
#endif
2006
                gen_movl_TN_reg(rd, cpu_dst);
2007
                break;
2008
            } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2009
                if (!supervisor(dc))
2010
                    goto priv_insn;
2011
#ifdef TARGET_SPARC64
2012
                rs1 = GET_FIELD(insn, 13, 17);
2013
                switch (rs1) {
2014
                case 0: // tpc
2015
                    {
2016
                        TCGv_ptr r_tsptr;
2017

    
2018
                        r_tsptr = tcg_temp_new_ptr();
2019
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2020
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2021
                                      offsetof(trap_state, tpc));
2022
                        tcg_temp_free_ptr(r_tsptr);
2023
                    }
2024
                    break;
2025
                case 1: // tnpc
2026
                    {
2027
                        TCGv_ptr r_tsptr;
2028

    
2029
                        r_tsptr = tcg_temp_new_ptr();
2030
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2031
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2032
                                      offsetof(trap_state, tnpc));
2033
                        tcg_temp_free_ptr(r_tsptr);
2034
                    }
2035
                    break;
2036
                case 2: // tstate
2037
                    {
2038
                        TCGv_ptr r_tsptr;
2039

    
2040
                        r_tsptr = tcg_temp_new_ptr();
2041
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2042
                        tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2043
                                      offsetof(trap_state, tstate));
2044
                        tcg_temp_free_ptr(r_tsptr);
2045
                    }
2046
                    break;
2047
                case 3: // tt
2048
                    {
2049
                        TCGv_ptr r_tsptr;
2050

    
2051
                        r_tsptr = tcg_temp_new_ptr();
2052
                        gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2053
                        tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2054
                                       offsetof(trap_state, tt));
2055
                        tcg_temp_free_ptr(r_tsptr);
2056
                        tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2057
                    }
2058
                    break;
2059
                case 4: // tick
2060
                    {
2061
                        TCGv_ptr r_tickptr;
2062

    
2063
                        r_tickptr = tcg_temp_new_ptr();
2064
                        tcg_gen_ld_ptr(r_tickptr, cpu_env,
2065
                                       offsetof(CPUState, tick));
2066
                        gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2067
                        gen_movl_TN_reg(rd, cpu_tmp0);
2068
                        tcg_temp_free_ptr(r_tickptr);
2069
                    }
2070
                    break;
2071
                case 5: // tba
2072
                    tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2073
                    break;
2074
                case 6: // pstate
2075
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2076
                                   offsetof(CPUSPARCState, pstate));
2077
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2078
                    break;
2079
                case 7: // tl
2080
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2081
                                   offsetof(CPUSPARCState, tl));
2082
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2083
                    break;
2084
                case 8: // pil
2085
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2086
                                   offsetof(CPUSPARCState, psrpil));
2087
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2088
                    break;
2089
                case 9: // cwp
2090
                    gen_helper_rdcwp(cpu_tmp0);
2091
                    break;
2092
                case 10: // cansave
2093
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2094
                                   offsetof(CPUSPARCState, cansave));
2095
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2096
                    break;
2097
                case 11: // canrestore
2098
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2099
                                   offsetof(CPUSPARCState, canrestore));
2100
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2101
                    break;
2102
                case 12: // cleanwin
2103
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2104
                                   offsetof(CPUSPARCState, cleanwin));
2105
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2106
                    break;
2107
                case 13: // otherwin
2108
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2109
                                   offsetof(CPUSPARCState, otherwin));
2110
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2111
                    break;
2112
                case 14: // wstate
2113
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2114
                                   offsetof(CPUSPARCState, wstate));
2115
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2116
                    break;
2117
                case 16: // UA2005 gl
2118
                    CHECK_IU_FEATURE(dc, GL);
2119
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2120
                                   offsetof(CPUSPARCState, gl));
2121
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2122
                    break;
2123
                case 26: // UA2005 strand status
2124
                    CHECK_IU_FEATURE(dc, HYPV);
2125
                    if (!hypervisor(dc))
2126
                        goto priv_insn;
2127
                    tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2128
                    break;
2129
                case 31: // ver
2130
                    tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2131
                    break;
2132
                case 15: // fq
2133
                default:
2134
                    goto illegal_insn;
2135
                }
2136
#else
2137
                tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2138
#endif
2139
                gen_movl_TN_reg(rd, cpu_tmp0);
2140
                break;
2141
            } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2142
#ifdef TARGET_SPARC64
2143
                save_state(dc, cpu_cond);
2144
                gen_helper_flushw();
2145
#else
2146
                if (!supervisor(dc))
2147
                    goto priv_insn;
2148
                gen_movl_TN_reg(rd, cpu_tbr);
2149
#endif
2150
                break;
2151
#endif
2152
            } else if (xop == 0x34) {   /* FPU Operations */
2153
                if (gen_trap_ifnofpu(dc, cpu_cond))
2154
                    goto jmp_insn;
2155
                gen_op_clear_ieee_excp_and_FTT();
2156
                rs1 = GET_FIELD(insn, 13, 17);
2157
                rs2 = GET_FIELD(insn, 27, 31);
2158
                xop = GET_FIELD(insn, 18, 26);
2159
                save_state(dc, cpu_cond);
2160
                switch (xop) {
2161
                case 0x1: /* fmovs */
2162
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2163
                    break;
2164
                case 0x5: /* fnegs */
2165
                    gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2166
                    break;
2167
                case 0x9: /* fabss */
2168
                    gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2169
                    break;
2170
                case 0x29: /* fsqrts */
2171
                    CHECK_FPU_FEATURE(dc, FSQRT);
2172
                    gen_clear_float_exceptions();
2173
                    gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2174
                    gen_helper_check_ieee_exceptions();
2175
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2176
                    break;
2177
                case 0x2a: /* fsqrtd */
2178
                    CHECK_FPU_FEATURE(dc, FSQRT);
2179
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2180
                    gen_clear_float_exceptions();
2181
                    gen_helper_fsqrtd();
2182
                    gen_helper_check_ieee_exceptions();
2183
                    gen_op_store_DT0_fpr(DFPREG(rd));
2184
                    break;
2185
                case 0x2b: /* fsqrtq */
2186
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2187
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2188
                    gen_clear_float_exceptions();
2189
                    gen_helper_fsqrtq();
2190
                    gen_helper_check_ieee_exceptions();
2191
                    gen_op_store_QT0_fpr(QFPREG(rd));
2192
                    break;
2193
                case 0x41: /* fadds */
2194
                    gen_clear_float_exceptions();
2195
                    gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2196
                    gen_helper_check_ieee_exceptions();
2197
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2198
                    break;
2199
                case 0x42: /* faddd */
2200
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2201
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2202
                    gen_clear_float_exceptions();
2203
                    gen_helper_faddd();
2204
                    gen_helper_check_ieee_exceptions();
2205
                    gen_op_store_DT0_fpr(DFPREG(rd));
2206
                    break;
2207
                case 0x43: /* faddq */
2208
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2209
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2210
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2211
                    gen_clear_float_exceptions();
2212
                    gen_helper_faddq();
2213
                    gen_helper_check_ieee_exceptions();
2214
                    gen_op_store_QT0_fpr(QFPREG(rd));
2215
                    break;
2216
                case 0x45: /* fsubs */
2217
                    gen_clear_float_exceptions();
2218
                    gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2219
                    gen_helper_check_ieee_exceptions();
2220
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2221
                    break;
2222
                case 0x46: /* fsubd */
2223
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2224
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2225
                    gen_clear_float_exceptions();
2226
                    gen_helper_fsubd();
2227
                    gen_helper_check_ieee_exceptions();
2228
                    gen_op_store_DT0_fpr(DFPREG(rd));
2229
                    break;
2230
                case 0x47: /* fsubq */
2231
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2232
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2233
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2234
                    gen_clear_float_exceptions();
2235
                    gen_helper_fsubq();
2236
                    gen_helper_check_ieee_exceptions();
2237
                    gen_op_store_QT0_fpr(QFPREG(rd));
2238
                    break;
2239
                case 0x49: /* fmuls */
2240
                    CHECK_FPU_FEATURE(dc, FMUL);
2241
                    gen_clear_float_exceptions();
2242
                    gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2243
                    gen_helper_check_ieee_exceptions();
2244
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2245
                    break;
2246
                case 0x4a: /* fmuld */
2247
                    CHECK_FPU_FEATURE(dc, FMUL);
2248
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2249
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2250
                    gen_clear_float_exceptions();
2251
                    gen_helper_fmuld();
2252
                    gen_helper_check_ieee_exceptions();
2253
                    gen_op_store_DT0_fpr(DFPREG(rd));
2254
                    break;
2255
                case 0x4b: /* fmulq */
2256
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2257
                    CHECK_FPU_FEATURE(dc, FMUL);
2258
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2259
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2260
                    gen_clear_float_exceptions();
2261
                    gen_helper_fmulq();
2262
                    gen_helper_check_ieee_exceptions();
2263
                    gen_op_store_QT0_fpr(QFPREG(rd));
2264
                    break;
2265
                case 0x4d: /* fdivs */
2266
                    gen_clear_float_exceptions();
2267
                    gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2268
                    gen_helper_check_ieee_exceptions();
2269
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2270
                    break;
2271
                case 0x4e: /* fdivd */
2272
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2273
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2274
                    gen_clear_float_exceptions();
2275
                    gen_helper_fdivd();
2276
                    gen_helper_check_ieee_exceptions();
2277
                    gen_op_store_DT0_fpr(DFPREG(rd));
2278
                    break;
2279
                case 0x4f: /* fdivq */
2280
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2281
                    gen_op_load_fpr_QT0(QFPREG(rs1));
2282
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2283
                    gen_clear_float_exceptions();
2284
                    gen_helper_fdivq();
2285
                    gen_helper_check_ieee_exceptions();
2286
                    gen_op_store_QT0_fpr(QFPREG(rd));
2287
                    break;
2288
                case 0x69: /* fsmuld */
2289
                    CHECK_FPU_FEATURE(dc, FSMULD);
2290
                    gen_clear_float_exceptions();
2291
                    gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2292
                    gen_helper_check_ieee_exceptions();
2293
                    gen_op_store_DT0_fpr(DFPREG(rd));
2294
                    break;
2295
                case 0x6e: /* fdmulq */
2296
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2297
                    gen_op_load_fpr_DT0(DFPREG(rs1));
2298
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2299
                    gen_clear_float_exceptions();
2300
                    gen_helper_fdmulq();
2301
                    gen_helper_check_ieee_exceptions();
2302
                    gen_op_store_QT0_fpr(QFPREG(rd));
2303
                    break;
2304
                case 0xc4: /* fitos */
2305
                    gen_clear_float_exceptions();
2306
                    gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2307
                    gen_helper_check_ieee_exceptions();
2308
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2309
                    break;
2310
                case 0xc6: /* fdtos */
2311
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2312
                    gen_clear_float_exceptions();
2313
                    gen_helper_fdtos(cpu_tmp32);
2314
                    gen_helper_check_ieee_exceptions();
2315
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2316
                    break;
2317
                case 0xc7: /* fqtos */
2318
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2319
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2320
                    gen_clear_float_exceptions();
2321
                    gen_helper_fqtos(cpu_tmp32);
2322
                    gen_helper_check_ieee_exceptions();
2323
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2324
                    break;
2325
                case 0xc8: /* fitod */
2326
                    gen_helper_fitod(cpu_fpr[rs2]);
2327
                    gen_op_store_DT0_fpr(DFPREG(rd));
2328
                    break;
2329
                case 0xc9: /* fstod */
2330
                    gen_helper_fstod(cpu_fpr[rs2]);
2331
                    gen_op_store_DT0_fpr(DFPREG(rd));
2332
                    break;
2333
                case 0xcb: /* fqtod */
2334
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2335
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2336
                    gen_clear_float_exceptions();
2337
                    gen_helper_fqtod();
2338
                    gen_helper_check_ieee_exceptions();
2339
                    gen_op_store_DT0_fpr(DFPREG(rd));
2340
                    break;
2341
                case 0xcc: /* fitoq */
2342
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2343
                    gen_helper_fitoq(cpu_fpr[rs2]);
2344
                    gen_op_store_QT0_fpr(QFPREG(rd));
2345
                    break;
2346
                case 0xcd: /* fstoq */
2347
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2348
                    gen_helper_fstoq(cpu_fpr[rs2]);
2349
                    gen_op_store_QT0_fpr(QFPREG(rd));
2350
                    break;
2351
                case 0xce: /* fdtoq */
2352
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2353
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2354
                    gen_helper_fdtoq();
2355
                    gen_op_store_QT0_fpr(QFPREG(rd));
2356
                    break;
2357
                case 0xd1: /* fstoi */
2358
                    gen_clear_float_exceptions();
2359
                    gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2360
                    gen_helper_check_ieee_exceptions();
2361
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2362
                    break;
2363
                case 0xd2: /* fdtoi */
2364
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2365
                    gen_clear_float_exceptions();
2366
                    gen_helper_fdtoi(cpu_tmp32);
2367
                    gen_helper_check_ieee_exceptions();
2368
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2369
                    break;
2370
                case 0xd3: /* fqtoi */
2371
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2372
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2373
                    gen_clear_float_exceptions();
2374
                    gen_helper_fqtoi(cpu_tmp32);
2375
                    gen_helper_check_ieee_exceptions();
2376
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2377
                    break;
2378
#ifdef TARGET_SPARC64
2379
                case 0x2: /* V9 fmovd */
2380
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2381
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2382
                                    cpu_fpr[DFPREG(rs2) + 1]);
2383
                    break;
2384
                case 0x3: /* V9 fmovq */
2385
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2386
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2387
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2388
                                    cpu_fpr[QFPREG(rs2) + 1]);
2389
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2390
                                    cpu_fpr[QFPREG(rs2) + 2]);
2391
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2392
                                    cpu_fpr[QFPREG(rs2) + 3]);
2393
                    break;
2394
                case 0x6: /* V9 fnegd */
2395
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2396
                    gen_helper_fnegd();
2397
                    gen_op_store_DT0_fpr(DFPREG(rd));
2398
                    break;
2399
                case 0x7: /* V9 fnegq */
2400
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2401
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2402
                    gen_helper_fnegq();
2403
                    gen_op_store_QT0_fpr(QFPREG(rd));
2404
                    break;
2405
                case 0xa: /* V9 fabsd */
2406
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2407
                    gen_helper_fabsd();
2408
                    gen_op_store_DT0_fpr(DFPREG(rd));
2409
                    break;
2410
                case 0xb: /* V9 fabsq */
2411
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2412
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2413
                    gen_helper_fabsq();
2414
                    gen_op_store_QT0_fpr(QFPREG(rd));
2415
                    break;
2416
                case 0x81: /* V9 fstox */
2417
                    gen_clear_float_exceptions();
2418
                    gen_helper_fstox(cpu_fpr[rs2]);
2419
                    gen_helper_check_ieee_exceptions();
2420
                    gen_op_store_DT0_fpr(DFPREG(rd));
2421
                    break;
2422
                case 0x82: /* V9 fdtox */
2423
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2424
                    gen_clear_float_exceptions();
2425
                    gen_helper_fdtox();
2426
                    gen_helper_check_ieee_exceptions();
2427
                    gen_op_store_DT0_fpr(DFPREG(rd));
2428
                    break;
2429
                case 0x83: /* V9 fqtox */
2430
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2431
                    gen_op_load_fpr_QT1(QFPREG(rs2));
2432
                    gen_clear_float_exceptions();
2433
                    gen_helper_fqtox();
2434
                    gen_helper_check_ieee_exceptions();
2435
                    gen_op_store_DT0_fpr(DFPREG(rd));
2436
                    break;
2437
                case 0x84: /* V9 fxtos */
2438
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2439
                    gen_clear_float_exceptions();
2440
                    gen_helper_fxtos(cpu_tmp32);
2441
                    gen_helper_check_ieee_exceptions();
2442
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2443
                    break;
2444
                case 0x88: /* V9 fxtod */
2445
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2446
                    gen_clear_float_exceptions();
2447
                    gen_helper_fxtod();
2448
                    gen_helper_check_ieee_exceptions();
2449
                    gen_op_store_DT0_fpr(DFPREG(rd));
2450
                    break;
2451
                case 0x8c: /* V9 fxtoq */
2452
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2453
                    gen_op_load_fpr_DT1(DFPREG(rs2));
2454
                    gen_clear_float_exceptions();
2455
                    gen_helper_fxtoq();
2456
                    gen_helper_check_ieee_exceptions();
2457
                    gen_op_store_QT0_fpr(QFPREG(rd));
2458
                    break;
2459
#endif
2460
                default:
2461
                    goto illegal_insn;
2462
                }
2463
            } else if (xop == 0x35) {   /* FPU Operations */
2464
#ifdef TARGET_SPARC64
2465
                int cond;
2466
#endif
2467
                if (gen_trap_ifnofpu(dc, cpu_cond))
2468
                    goto jmp_insn;
2469
                gen_op_clear_ieee_excp_and_FTT();
2470
                rs1 = GET_FIELD(insn, 13, 17);
2471
                rs2 = GET_FIELD(insn, 27, 31);
2472
                xop = GET_FIELD(insn, 18, 26);
2473
                save_state(dc, cpu_cond);
2474
#ifdef TARGET_SPARC64
2475
                if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2476
                    int l1;
2477

    
2478
                    l1 = gen_new_label();
2479
                    cond = GET_FIELD_SP(insn, 14, 17);
2480
                    cpu_src1 = get_src1(insn, cpu_src1);
2481
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2482
                                       0, l1);
2483
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2484
                    gen_set_label(l1);
2485
                    break;
2486
                } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2487
                    int l1;
2488

    
2489
                    l1 = gen_new_label();
2490
                    cond = GET_FIELD_SP(insn, 14, 17);
2491
                    cpu_src1 = get_src1(insn, cpu_src1);
2492
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2493
                                       0, l1);
2494
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2495
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2496
                    gen_set_label(l1);
2497
                    break;
2498
                } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2499
                    int l1;
2500

    
2501
                    CHECK_FPU_FEATURE(dc, FLOAT128);
2502
                    l1 = gen_new_label();
2503
                    cond = GET_FIELD_SP(insn, 14, 17);
2504
                    cpu_src1 = get_src1(insn, cpu_src1);
2505
                    tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2506
                                       0, l1);
2507
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2508
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2509
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2510
                    tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2511
                    gen_set_label(l1);
2512
                    break;
2513
                }
2514
#endif
2515
                switch (xop) {
2516
#ifdef TARGET_SPARC64
2517
#define FMOVSCC(fcc)                                                    \
2518
                    {                                                   \
2519
                        TCGv r_cond;                                    \
2520
                        int l1;                                         \
2521
                                                                        \
2522
                        l1 = gen_new_label();                           \
2523
                        r_cond = tcg_temp_new();                        \
2524
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2525
                        gen_fcond(r_cond, fcc, cond);                   \
2526
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2527
                                           0, l1);                      \
2528
                        tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);     \
2529
                        gen_set_label(l1);                              \
2530
                        tcg_temp_free(r_cond);                          \
2531
                    }
2532
#define FMOVDCC(fcc)                                                    \
2533
                    {                                                   \
2534
                        TCGv r_cond;                                    \
2535
                        int l1;                                         \
2536
                                                                        \
2537
                        l1 = gen_new_label();                           \
2538
                        r_cond = tcg_temp_new();                        \
2539
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2540
                        gen_fcond(r_cond, fcc, cond);                   \
2541
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2542
                                           0, l1);                      \
2543
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],            \
2544
                                        cpu_fpr[DFPREG(rs2)]);          \
2545
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],        \
2546
                                        cpu_fpr[DFPREG(rs2) + 1]);      \
2547
                        gen_set_label(l1);                              \
2548
                        tcg_temp_free(r_cond);                          \
2549
                    }
2550
#define FMOVQCC(fcc)                                                    \
2551
                    {                                                   \
2552
                        TCGv r_cond;                                    \
2553
                        int l1;                                         \
2554
                                                                        \
2555
                        l1 = gen_new_label();                           \
2556
                        r_cond = tcg_temp_new();                        \
2557
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2558
                        gen_fcond(r_cond, fcc, cond);                   \
2559
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2560
                                           0, l1);                      \
2561
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],            \
2562
                                        cpu_fpr[QFPREG(rs2)]);          \
2563
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],        \
2564
                                        cpu_fpr[QFPREG(rs2) + 1]);      \
2565
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],        \
2566
                                        cpu_fpr[QFPREG(rs2) + 2]);      \
2567
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],        \
2568
                                        cpu_fpr[QFPREG(rs2) + 3]);      \
2569
                        gen_set_label(l1);                              \
2570
                        tcg_temp_free(r_cond);                          \
2571
                    }
2572
                    case 0x001: /* V9 fmovscc %fcc0 */
2573
                        FMOVSCC(0);
2574
                        break;
2575
                    case 0x002: /* V9 fmovdcc %fcc0 */
2576
                        FMOVDCC(0);
2577
                        break;
2578
                    case 0x003: /* V9 fmovqcc %fcc0 */
2579
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2580
                        FMOVQCC(0);
2581
                        break;
2582
                    case 0x041: /* V9 fmovscc %fcc1 */
2583
                        FMOVSCC(1);
2584
                        break;
2585
                    case 0x042: /* V9 fmovdcc %fcc1 */
2586
                        FMOVDCC(1);
2587
                        break;
2588
                    case 0x043: /* V9 fmovqcc %fcc1 */
2589
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2590
                        FMOVQCC(1);
2591
                        break;
2592
                    case 0x081: /* V9 fmovscc %fcc2 */
2593
                        FMOVSCC(2);
2594
                        break;
2595
                    case 0x082: /* V9 fmovdcc %fcc2 */
2596
                        FMOVDCC(2);
2597
                        break;
2598
                    case 0x083: /* V9 fmovqcc %fcc2 */
2599
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2600
                        FMOVQCC(2);
2601
                        break;
2602
                    case 0x0c1: /* V9 fmovscc %fcc3 */
2603
                        FMOVSCC(3);
2604
                        break;
2605
                    case 0x0c2: /* V9 fmovdcc %fcc3 */
2606
                        FMOVDCC(3);
2607
                        break;
2608
                    case 0x0c3: /* V9 fmovqcc %fcc3 */
2609
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2610
                        FMOVQCC(3);
2611
                        break;
2612
#undef FMOVSCC
2613
#undef FMOVDCC
2614
#undef FMOVQCC
2615
#define FMOVSCC(icc)                                                    \
2616
                    {                                                   \
2617
                        TCGv r_cond;                                    \
2618
                        int l1;                                         \
2619
                                                                        \
2620
                        l1 = gen_new_label();                           \
2621
                        r_cond = tcg_temp_new();                        \
2622
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2623
                        gen_cond(r_cond, icc, cond, dc);                \
2624
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2625
                                           0, l1);                      \
2626
                        tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);     \
2627
                        gen_set_label(l1);                              \
2628
                        tcg_temp_free(r_cond);                          \
2629
                    }
2630
#define FMOVDCC(icc)                                                    \
2631
                    {                                                   \
2632
                        TCGv r_cond;                                    \
2633
                        int l1;                                         \
2634
                                                                        \
2635
                        l1 = gen_new_label();                           \
2636
                        r_cond = tcg_temp_new();                        \
2637
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2638
                        gen_cond(r_cond, icc, cond, dc);                \
2639
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2640
                                           0, l1);                      \
2641
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],            \
2642
                                        cpu_fpr[DFPREG(rs2)]);          \
2643
                        tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],        \
2644
                                        cpu_fpr[DFPREG(rs2) + 1]);      \
2645
                        gen_set_label(l1);                              \
2646
                        tcg_temp_free(r_cond);                          \
2647
                    }
2648
#define FMOVQCC(icc)                                                    \
2649
                    {                                                   \
2650
                        TCGv r_cond;                                    \
2651
                        int l1;                                         \
2652
                                                                        \
2653
                        l1 = gen_new_label();                           \
2654
                        r_cond = tcg_temp_new();                        \
2655
                        cond = GET_FIELD_SP(insn, 14, 17);              \
2656
                        gen_cond(r_cond, icc, cond, dc);                \
2657
                        tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2658
                                           0, l1);                      \
2659
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],            \
2660
                                        cpu_fpr[QFPREG(rs2)]);          \
2661
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],        \
2662
                                        cpu_fpr[QFPREG(rs2) + 1]);      \
2663
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],        \
2664
                                        cpu_fpr[QFPREG(rs2) + 2]);      \
2665
                        tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],        \
2666
                                        cpu_fpr[QFPREG(rs2) + 3]);      \
2667
                        gen_set_label(l1);                              \
2668
                        tcg_temp_free(r_cond);                          \
2669
                    }
2670

    
2671
                    case 0x101: /* V9 fmovscc %icc */
2672
                        FMOVSCC(0);
2673
                        break;
2674
                    case 0x102: /* V9 fmovdcc %icc */
2675
                        FMOVDCC(0);
2676
                    case 0x103: /* V9 fmovqcc %icc */
2677
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2678
                        FMOVQCC(0);
2679
                        break;
2680
                    case 0x181: /* V9 fmovscc %xcc */
2681
                        FMOVSCC(1);
2682
                        break;
2683
                    case 0x182: /* V9 fmovdcc %xcc */
2684
                        FMOVDCC(1);
2685
                        break;
2686
                    case 0x183: /* V9 fmovqcc %xcc */
2687
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2688
                        FMOVQCC(1);
2689
                        break;
2690
#undef FMOVSCC
2691
#undef FMOVDCC
2692
#undef FMOVQCC
2693
#endif
2694
                    case 0x51: /* fcmps, V9 %fcc */
2695
                        gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2696
                        break;
2697
                    case 0x52: /* fcmpd, V9 %fcc */
2698
                        gen_op_load_fpr_DT0(DFPREG(rs1));
2699
                        gen_op_load_fpr_DT1(DFPREG(rs2));
2700
                        gen_op_fcmpd(rd & 3);
2701
                        break;
2702
                    case 0x53: /* fcmpq, V9 %fcc */
2703
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2704
                        gen_op_load_fpr_QT0(QFPREG(rs1));
2705
                        gen_op_load_fpr_QT1(QFPREG(rs2));
2706
                        gen_op_fcmpq(rd & 3);
2707
                        break;
2708
                    case 0x55: /* fcmpes, V9 %fcc */
2709
                        gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2710
                        break;
2711
                    case 0x56: /* fcmped, V9 %fcc */
2712
                        gen_op_load_fpr_DT0(DFPREG(rs1));
2713
                        gen_op_load_fpr_DT1(DFPREG(rs2));
2714
                        gen_op_fcmped(rd & 3);
2715
                        break;
2716
                    case 0x57: /* fcmpeq, V9 %fcc */
2717
                        CHECK_FPU_FEATURE(dc, FLOAT128);
2718
                        gen_op_load_fpr_QT0(QFPREG(rs1));
2719
                        gen_op_load_fpr_QT1(QFPREG(rs2));
2720
                        gen_op_fcmpeq(rd & 3);
2721
                        break;
2722
                    default:
2723
                        goto illegal_insn;
2724
                }
2725
            } else if (xop == 0x2) {
2726
                // clr/mov shortcut
2727

    
2728
                rs1 = GET_FIELD(insn, 13, 17);
2729
                if (rs1 == 0) {
2730
                    // or %g0, x, y -> mov T0, x; mov y, T0
2731
                    if (IS_IMM) {       /* immediate */
2732
                        TCGv r_const;
2733

    
2734
                        simm = GET_FIELDs(insn, 19, 31);
2735
                        r_const = tcg_const_tl(simm);
2736
                        gen_movl_TN_reg(rd, r_const);
2737
                        tcg_temp_free(r_const);
2738
                    } else {            /* register */
2739
                        rs2 = GET_FIELD(insn, 27, 31);
2740
                        gen_movl_reg_TN(rs2, cpu_dst);
2741
                        gen_movl_TN_reg(rd, cpu_dst);
2742
                    }
2743
                } else {
2744
                    cpu_src1 = get_src1(insn, cpu_src1);
2745
                    if (IS_IMM) {       /* immediate */
2746
                        simm = GET_FIELDs(insn, 19, 31);
2747
                        tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2748
                        gen_movl_TN_reg(rd, cpu_dst);
2749
                    } else {            /* register */
2750
                        // or x, %g0, y -> mov T1, x; mov y, T1
2751
                        rs2 = GET_FIELD(insn, 27, 31);
2752
                        if (rs2 != 0) {
2753
                            gen_movl_reg_TN(rs2, cpu_src2);
2754
                            tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2755
                            gen_movl_TN_reg(rd, cpu_dst);
2756
                        } else
2757
                            gen_movl_TN_reg(rd, cpu_src1);
2758
                    }
2759
                }
2760
#ifdef TARGET_SPARC64
2761
            } else if (xop == 0x25) { /* sll, V9 sllx */
2762
                cpu_src1 = get_src1(insn, cpu_src1);
2763
                if (IS_IMM) {   /* immediate */
2764
                    simm = GET_FIELDs(insn, 20, 31);
2765
                    if (insn & (1 << 12)) {
2766
                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
2767
                    } else {
2768
                        tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
2769
                    }
2770
                } else {                /* register */
2771
                    rs2 = GET_FIELD(insn, 27, 31);
2772
                    gen_movl_reg_TN(rs2, cpu_src2);
2773
                    if (insn & (1 << 12)) {
2774
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2775
                    } else {
2776
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2777
                    }
2778
                    tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2779
                }
2780
                gen_movl_TN_reg(rd, cpu_dst);
2781
            } else if (xop == 0x26) { /* srl, V9 srlx */
2782
                cpu_src1 = get_src1(insn, cpu_src1);
2783
                if (IS_IMM) {   /* immediate */
2784
                    simm = GET_FIELDs(insn, 20, 31);
2785
                    if (insn & (1 << 12)) {
2786
                        tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
2787
                    } else {
2788
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2789
                        tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
2790
                    }
2791
                } else {                /* register */
2792
                    rs2 = GET_FIELD(insn, 27, 31);
2793
                    gen_movl_reg_TN(rs2, cpu_src2);
2794
                    if (insn & (1 << 12)) {
2795
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2796
                        tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2797
                    } else {
2798
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2799
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2800
                        tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2801
                    }
2802
                }
2803
                gen_movl_TN_reg(rd, cpu_dst);
2804
            } else if (xop == 0x27) { /* sra, V9 srax */
2805
                cpu_src1 = get_src1(insn, cpu_src1);
2806
                if (IS_IMM) {   /* immediate */
2807
                    simm = GET_FIELDs(insn, 20, 31);
2808
                    if (insn & (1 << 12)) {
2809
                        tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
2810
                    } else {
2811
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2812
                        tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2813
                        tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
2814
                    }
2815
                } else {                /* register */
2816
                    rs2 = GET_FIELD(insn, 27, 31);
2817
                    gen_movl_reg_TN(rs2, cpu_src2);
2818
                    if (insn & (1 << 12)) {
2819
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2820
                        tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
2821
                    } else {
2822
                        tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2823
                        tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2824
                        tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2825
                        tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
2826
                    }
2827
                }
2828
                gen_movl_TN_reg(rd, cpu_dst);
2829
#endif
2830
            } else if (xop < 0x36) {
2831
                if (xop < 0x20) {
2832
                    cpu_src1 = get_src1(insn, cpu_src1);
2833
                    cpu_src2 = get_src2(insn, cpu_src2);
2834
                    switch (xop & ~0x10) {
2835
                    case 0x0: /* add */
2836
                        if (IS_IMM) {
2837
                            simm = GET_FIELDs(insn, 19, 31);
2838
                            if (xop & 0x10) {
2839
                                gen_op_addi_cc(cpu_dst, cpu_src1, simm);
2840
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2841
                                dc->cc_op = CC_OP_ADD;
2842
                            } else {
2843
                                tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
2844
                            }
2845
                        } else {
2846
                            if (xop & 0x10) {
2847
                                gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
2848
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2849
                                dc->cc_op = CC_OP_ADD;
2850
                            } else {
2851
                                tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2852
                            }
2853
                        }
2854
                        break;
2855
                    case 0x1: /* and */
2856
                        if (IS_IMM) {
2857
                            simm = GET_FIELDs(insn, 19, 31);
2858
                            tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
2859
                        } else {
2860
                            tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
2861
                        }
2862
                        if (xop & 0x10) {
2863
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2864
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2865
                            dc->cc_op = CC_OP_LOGIC;
2866
                        }
2867
                        break;
2868
                    case 0x2: /* or */
2869
                        if (IS_IMM) {
2870
                            simm = GET_FIELDs(insn, 19, 31);
2871
                            tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2872
                        } else {
2873
                            tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2874
                        }
2875
                        if (xop & 0x10) {
2876
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2877
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2878
                            dc->cc_op = CC_OP_LOGIC;
2879
                        }
2880
                        break;
2881
                    case 0x3: /* xor */
2882
                        if (IS_IMM) {
2883
                            simm = GET_FIELDs(insn, 19, 31);
2884
                            tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
2885
                        } else {
2886
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
2887
                        }
2888
                        if (xop & 0x10) {
2889
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2890
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2891
                            dc->cc_op = CC_OP_LOGIC;
2892
                        }
2893
                        break;
2894
                    case 0x4: /* sub */
2895
                        if (IS_IMM) {
2896
                            simm = GET_FIELDs(insn, 19, 31);
2897
                            if (xop & 0x10) {
2898
                                gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
2899
                            } else {
2900
                                tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
2901
                            }
2902
                        } else {
2903
                            if (xop & 0x10) {
2904
                                gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
2905
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2906
                                dc->cc_op = CC_OP_SUB;
2907
                            } else {
2908
                                tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
2909
                            }
2910
                        }
2911
                        break;
2912
                    case 0x5: /* andn */
2913
                        if (IS_IMM) {
2914
                            simm = GET_FIELDs(insn, 19, 31);
2915
                            tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
2916
                        } else {
2917
                            tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
2918
                        }
2919
                        if (xop & 0x10) {
2920
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2921
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2922
                            dc->cc_op = CC_OP_LOGIC;
2923
                        }
2924
                        break;
2925
                    case 0x6: /* orn */
2926
                        if (IS_IMM) {
2927
                            simm = GET_FIELDs(insn, 19, 31);
2928
                            tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
2929
                        } else {
2930
                            tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
2931
                        }
2932
                        if (xop & 0x10) {
2933
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2934
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2935
                            dc->cc_op = CC_OP_LOGIC;
2936
                        }
2937
                        break;
2938
                    case 0x7: /* xorn */
2939
                        if (IS_IMM) {
2940
                            simm = GET_FIELDs(insn, 19, 31);
2941
                            tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
2942
                        } else {
2943
                            tcg_gen_not_tl(cpu_tmp0, cpu_src2);
2944
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
2945
                        }
2946
                        if (xop & 0x10) {
2947
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2948
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2949
                            dc->cc_op = CC_OP_LOGIC;
2950
                        }
2951
                        break;
2952
                    case 0x8: /* addx, V9 addc */
2953
                        if (IS_IMM) {
2954
                            simm = GET_FIELDs(insn, 19, 31);
2955
                            if (xop & 0x10) {
2956
                                gen_helper_compute_psr();
2957
                                gen_op_addxi_cc(cpu_dst, cpu_src1, simm);
2958
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
2959
                                dc->cc_op = CC_OP_ADDX;
2960
                            } else {
2961
                                gen_helper_compute_psr();
2962
                                gen_mov_reg_C(cpu_tmp0, cpu_psr);
2963
                                tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
2964
                                tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
2965
                            }
2966
                        } else {
2967
                            if (xop & 0x10) {
2968
                                gen_helper_compute_psr();
2969
                                gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
2970
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
2971
                                dc->cc_op = CC_OP_ADDX;
2972
                            } else {
2973
                                gen_helper_compute_psr();
2974
                                gen_mov_reg_C(cpu_tmp0, cpu_psr);
2975
                                tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
2976
                                tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
2977
                            }
2978
                        }
2979
                        break;
2980
#ifdef TARGET_SPARC64
2981
                    case 0x9: /* V9 mulx */
2982
                        if (IS_IMM) {
2983
                            simm = GET_FIELDs(insn, 19, 31);
2984
                            tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
2985
                        } else {
2986
                            tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
2987
                        }
2988
                        break;
2989
#endif
2990
                    case 0xa: /* umul */
2991
                        CHECK_IU_FEATURE(dc, MUL);
2992
                        gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
2993
                        if (xop & 0x10) {
2994
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2995
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2996
                            dc->cc_op = CC_OP_LOGIC;
2997
                        }
2998
                        break;
2999
                    case 0xb: /* smul */
3000
                        CHECK_IU_FEATURE(dc, MUL);
3001
                        gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3002
                        if (xop & 0x10) {
3003
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3004
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3005
                            dc->cc_op = CC_OP_LOGIC;
3006
                        }
3007
                        break;
3008
                    case 0xc: /* subx, V9 subc */
3009
                        if (IS_IMM) {
3010
                            simm = GET_FIELDs(insn, 19, 31);
3011
                            if (xop & 0x10) {
3012
                                gen_helper_compute_psr();
3013
                                gen_op_subxi_cc(cpu_dst, cpu_src1, simm);
3014
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3015
                                dc->cc_op = CC_OP_SUBX;
3016
                            } else {
3017
                                gen_helper_compute_psr();
3018
                                gen_mov_reg_C(cpu_tmp0, cpu_psr);
3019
                                tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3020
                                tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3021
                            }
3022
                        } else {
3023
                            if (xop & 0x10) {
3024
                                gen_helper_compute_psr();
3025
                                gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3026
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3027
                                dc->cc_op = CC_OP_SUBX;
3028
                            } else {
3029
                                gen_helper_compute_psr();
3030
                                gen_mov_reg_C(cpu_tmp0, cpu_psr);
3031
                                tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3032
                                tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3033
                            }
3034
                        }
3035
                        break;
3036
#ifdef TARGET_SPARC64
3037
                    case 0xd: /* V9 udivx */
3038
                        tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3039
                        tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3040
                        gen_trap_ifdivzero_tl(cpu_cc_src2);
3041
                        tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3042
                        break;
3043
#endif
3044
                    case 0xe: /* udiv */
3045
                        CHECK_IU_FEATURE(dc, DIV);
3046
                        gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3047
                        if (xop & 0x10) {
3048
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3049
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3050
                            dc->cc_op = CC_OP_DIV;
3051
                        }
3052
                        break;
3053
                    case 0xf: /* sdiv */
3054
                        CHECK_IU_FEATURE(dc, DIV);
3055
                        gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3056
                        if (xop & 0x10) {
3057
                            tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3058
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3059
                            dc->cc_op = CC_OP_DIV;
3060
                        }
3061
                        break;
3062
                    default:
3063
                        goto illegal_insn;
3064
                    }
3065
                    gen_movl_TN_reg(rd, cpu_dst);
3066
                } else {
3067
                    cpu_src1 = get_src1(insn, cpu_src1);
3068
                    cpu_src2 = get_src2(insn, cpu_src2);
3069
                    switch (xop) {
3070
                    case 0x20: /* taddcc */
3071
                        gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3072
                        gen_movl_TN_reg(rd, cpu_dst);
3073
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3074
                        dc->cc_op = CC_OP_TADD;
3075
                        break;
3076
                    case 0x21: /* tsubcc */
3077
                        gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3078
                        gen_movl_TN_reg(rd, cpu_dst);
3079
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3080
                        dc->cc_op = CC_OP_TSUB;
3081
                        break;
3082
                    case 0x22: /* taddcctv */
3083
                        save_state(dc, cpu_cond);
3084
                        gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3085
                        gen_movl_TN_reg(rd, cpu_dst);
3086
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3087
                        dc->cc_op = CC_OP_TADDTV;
3088
                        break;
3089
                    case 0x23: /* tsubcctv */
3090
                        save_state(dc, cpu_cond);
3091
                        gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3092
                        gen_movl_TN_reg(rd, cpu_dst);
3093
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3094
                        dc->cc_op = CC_OP_TSUBTV;
3095
                        break;
3096
                    case 0x24: /* mulscc */
3097
                        gen_helper_compute_psr();
3098
                        gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3099
                        gen_movl_TN_reg(rd, cpu_dst);
3100
                        tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3101
                        dc->cc_op = CC_OP_ADD;
3102
                        break;
3103
#ifndef TARGET_SPARC64
3104
                    case 0x25:  /* sll */
3105
                        if (IS_IMM) { /* immediate */
3106
                            simm = GET_FIELDs(insn, 20, 31);
3107
                            tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3108
                        } else { /* register */
3109
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3110
                            tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3111
                        }
3112
                        gen_movl_TN_reg(rd, cpu_dst);
3113
                        break;
3114
                    case 0x26:  /* srl */
3115
                        if (IS_IMM) { /* immediate */
3116
                            simm = GET_FIELDs(insn, 20, 31);
3117
                            tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3118
                        } else { /* register */
3119
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3120
                            tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3121
                        }
3122
                        gen_movl_TN_reg(rd, cpu_dst);
3123
                        break;
3124
                    case 0x27:  /* sra */
3125
                        if (IS_IMM) { /* immediate */
3126
                            simm = GET_FIELDs(insn, 20, 31);
3127
                            tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3128
                        } else { /* register */
3129
                            tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3130
                            tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3131
                        }
3132
                        gen_movl_TN_reg(rd, cpu_dst);
3133
                        break;
3134
#endif
3135
                    case 0x30:
3136
                        {
3137
                            switch(rd) {
3138
                            case 0: /* wry */
3139
                                tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3140
                                tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3141
                                break;
3142
#ifndef TARGET_SPARC64
3143
                            case 0x01 ... 0x0f: /* undefined in the
3144
                                                   SPARCv8 manual, nop
3145
                                                   on the microSPARC
3146
                                                   II */
3147
                            case 0x10 ... 0x1f: /* implementation-dependent
3148
                                                   in the SPARCv8
3149
                                                   manual, nop on the
3150
                                                   microSPARC II */
3151
                                break;
3152
#else
3153
                            case 0x2: /* V9 wrccr */
3154
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3155
                                gen_helper_wrccr(cpu_dst);
3156
                                tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3157
                                dc->cc_op = CC_OP_FLAGS;
3158
                                break;
3159
                            case 0x3: /* V9 wrasi */
3160
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3161
                                tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3162
                                tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3163
                                break;
3164
                            case 0x6: /* V9 wrfprs */
3165
                                tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3166
                                tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3167
                                save_state(dc, cpu_cond);
3168
                                gen_op_next_insn();
3169
                                tcg_gen_exit_tb(0);
3170
                                dc->is_br = 1;
3171
                                break;
3172
                            case 0xf: /* V9 sir, nop if user */
3173
#if !defined(CONFIG_USER_ONLY)
3174
                                if (supervisor(dc)) {
3175
                                    ; // XXX
3176
                                }
3177
#endif
3178
                                break;
3179
                            case 0x13: /* Graphics Status */
3180
                                if (gen_trap_ifnofpu(dc, cpu_cond))
3181
                                    goto jmp_insn;
3182
                                tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3183
                                break;
3184
                            case 0x14: /* Softint set */
3185
                                if (!supervisor(dc))
3186
                                    goto illegal_insn;
3187
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3188
                                gen_helper_set_softint(cpu_tmp64);
3189
                                break;
3190
                            case 0x15: /* Softint clear */
3191
                                if (!supervisor(dc))
3192
                                    goto illegal_insn;
3193
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3194
                                gen_helper_clear_softint(cpu_tmp64);
3195
                                break;
3196
                            case 0x16: /* Softint write */
3197
                                if (!supervisor(dc))
3198
                                    goto illegal_insn;
3199
                                tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3200
                                gen_helper_write_softint(cpu_tmp64);
3201
                                break;
3202
                            case 0x17: /* Tick compare */
3203
#if !defined(CONFIG_USER_ONLY)
3204
                                if (!supervisor(dc))
3205
                                    goto illegal_insn;
3206
#endif
3207
                                {
3208
                                    TCGv_ptr r_tickptr;
3209

    
3210
                                    tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3211
                                                   cpu_src2);
3212
                                    r_tickptr = tcg_temp_new_ptr();
3213
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3214
                                                   offsetof(CPUState, tick));
3215
                                    gen_helper_tick_set_limit(r_tickptr,
3216
                                                              cpu_tick_cmpr);
3217
                                    tcg_temp_free_ptr(r_tickptr);
3218
                                }
3219
                                break;
3220
                            case 0x18: /* System tick */
3221
#if !defined(CONFIG_USER_ONLY)
3222
                                if (!supervisor(dc))
3223
                                    goto illegal_insn;
3224
#endif
3225
                                {
3226
                                    TCGv_ptr r_tickptr;
3227

    
3228
                                    tcg_gen_xor_tl(cpu_dst, cpu_src1,
3229
                                                   cpu_src2);
3230
                                    r_tickptr = tcg_temp_new_ptr();
3231
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3232
                                                   offsetof(CPUState, stick));
3233
                                    gen_helper_tick_set_count(r_tickptr,
3234
                                                              cpu_dst);
3235
                                    tcg_temp_free_ptr(r_tickptr);
3236
                                }
3237
                                break;
3238
                            case 0x19: /* System tick compare */
3239
#if !defined(CONFIG_USER_ONLY)
3240
                                if (!supervisor(dc))
3241
                                    goto illegal_insn;
3242
#endif
3243
                                {
3244
                                    TCGv_ptr r_tickptr;
3245

    
3246
                                    tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3247
                                                   cpu_src2);
3248
                                    r_tickptr = tcg_temp_new_ptr();
3249
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3250
                                                   offsetof(CPUState, stick));
3251
                                    gen_helper_tick_set_limit(r_tickptr,
3252
                                                              cpu_stick_cmpr);
3253
                                    tcg_temp_free_ptr(r_tickptr);
3254
                                }
3255
                                break;
3256

    
3257
                            case 0x10: /* Performance Control */
3258
                            case 0x11: /* Performance Instrumentation
3259
                                          Counter */
3260
                            case 0x12: /* Dispatch Control */
3261
#endif
3262
                            default:
3263
                                goto illegal_insn;
3264
                            }
3265
                        }
3266
                        break;
3267
#if !defined(CONFIG_USER_ONLY)
3268
                    case 0x31: /* wrpsr, V9 saved, restored */
3269
                        {
3270
                            if (!supervisor(dc))
3271
                                goto priv_insn;
3272
#ifdef TARGET_SPARC64
3273
                            switch (rd) {
3274
                            case 0:
3275
                                gen_helper_saved();
3276
                                break;
3277
                            case 1:
3278
                                gen_helper_restored();
3279
                                break;
3280
                            case 2: /* UA2005 allclean */
3281
                            case 3: /* UA2005 otherw */
3282
                            case 4: /* UA2005 normalw */
3283
                            case 5: /* UA2005 invalw */
3284
                                // XXX
3285
                            default:
3286
                                goto illegal_insn;
3287
                            }
3288
#else
3289
                            tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3290
                            gen_helper_wrpsr(cpu_dst);
3291
                            tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3292
                            dc->cc_op = CC_OP_FLAGS;
3293
                            save_state(dc, cpu_cond);
3294
                            gen_op_next_insn();
3295
                            tcg_gen_exit_tb(0);
3296
                            dc->is_br = 1;
3297
#endif
3298
                        }
3299
                        break;
3300
                    case 0x32: /* wrwim, V9 wrpr */
3301
                        {
3302
                            if (!supervisor(dc))
3303
                                goto priv_insn;
3304
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3305
#ifdef TARGET_SPARC64
3306
                            switch (rd) {
3307
                            case 0: // tpc
3308
                                {
3309
                                    TCGv_ptr r_tsptr;
3310

    
3311
                                    r_tsptr = tcg_temp_new_ptr();
3312
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3313
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3314
                                                  offsetof(trap_state, tpc));
3315
                                    tcg_temp_free_ptr(r_tsptr);
3316
                                }
3317
                                break;
3318
                            case 1: // tnpc
3319
                                {
3320
                                    TCGv_ptr r_tsptr;
3321

    
3322
                                    r_tsptr = tcg_temp_new_ptr();
3323
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3324
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3325
                                                  offsetof(trap_state, tnpc));
3326
                                    tcg_temp_free_ptr(r_tsptr);
3327
                                }
3328
                                break;
3329
                            case 2: // tstate
3330
                                {
3331
                                    TCGv_ptr r_tsptr;
3332

    
3333
                                    r_tsptr = tcg_temp_new_ptr();
3334
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3335
                                    tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3336
                                                  offsetof(trap_state,
3337
                                                           tstate));
3338
                                    tcg_temp_free_ptr(r_tsptr);
3339
                                }
3340
                                break;
3341
                            case 3: // tt
3342
                                {
3343
                                    TCGv_ptr r_tsptr;
3344

    
3345
                                    r_tsptr = tcg_temp_new_ptr();
3346
                                    gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3347
                                    tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3348
                                    tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3349
                                                   offsetof(trap_state, tt));
3350
                                    tcg_temp_free_ptr(r_tsptr);
3351
                                }
3352
                                break;
3353
                            case 4: // tick
3354
                                {
3355
                                    TCGv_ptr r_tickptr;
3356

    
3357
                                    r_tickptr = tcg_temp_new_ptr();
3358
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3359
                                                   offsetof(CPUState, tick));
3360
                                    gen_helper_tick_set_count(r_tickptr,
3361
                                                              cpu_tmp0);
3362
                                    tcg_temp_free_ptr(r_tickptr);
3363
                                }
3364
                                break;
3365
                            case 5: // tba
3366
                                tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3367
                                break;
3368
                            case 6: // pstate
3369
                                save_state(dc, cpu_cond);
3370
                                gen_helper_wrpstate(cpu_tmp0);
3371
                                gen_op_next_insn();
3372
                                tcg_gen_exit_tb(0);
3373
                                dc->is_br = 1;
3374
                                break;
3375
                            case 7: // tl
3376
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3377
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3378
                                               offsetof(CPUSPARCState, tl));
3379
                                break;
3380
                            case 8: // pil
3381
                                gen_helper_wrpil(cpu_tmp0);
3382
                                break;
3383
                            case 9: // cwp
3384
                                gen_helper_wrcwp(cpu_tmp0);
3385
                                break;
3386
                            case 10: // cansave
3387
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3388
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3389
                                               offsetof(CPUSPARCState,
3390
                                                        cansave));
3391
                                break;
3392
                            case 11: // canrestore
3393
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3394
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3395
                                               offsetof(CPUSPARCState,
3396
                                                        canrestore));
3397
                                break;
3398
                            case 12: // cleanwin
3399
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3400
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3401
                                               offsetof(CPUSPARCState,
3402
                                                        cleanwin));
3403
                                break;
3404
                            case 13: // otherwin
3405
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3406
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3407
                                               offsetof(CPUSPARCState,
3408
                                                        otherwin));
3409
                                break;
3410
                            case 14: // wstate
3411
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3412
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3413
                                               offsetof(CPUSPARCState,
3414
                                                        wstate));
3415
                                break;
3416
                            case 16: // UA2005 gl
3417
                                CHECK_IU_FEATURE(dc, GL);
3418
                                tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3419
                                tcg_gen_st_i32(cpu_tmp32, cpu_env,
3420
                                               offsetof(CPUSPARCState, gl));
3421
                                break;
3422
                            case 26: // UA2005 strand status
3423
                                CHECK_IU_FEATURE(dc, HYPV);
3424
                                if (!hypervisor(dc))
3425
                                    goto priv_insn;
3426
                                tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3427
                                break;
3428
                            default:
3429
                                goto illegal_insn;
3430
                            }
3431
#else
3432
                            tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3433
                            if (dc->def->nwindows != 32)
3434
                                tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3435
                                                (1 << dc->def->nwindows) - 1);
3436
                            tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3437
#endif
3438
                        }
3439
                        break;
3440
                    case 0x33: /* wrtbr, UA2005 wrhpr */
3441
                        {
3442
#ifndef TARGET_SPARC64
3443
                            if (!supervisor(dc))
3444
                                goto priv_insn;
3445
                            tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3446
#else
3447
                            CHECK_IU_FEATURE(dc, HYPV);
3448
                            if (!hypervisor(dc))
3449
                                goto priv_insn;
3450
                            tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3451
                            switch (rd) {
3452
                            case 0: // hpstate
3453
                                // XXX gen_op_wrhpstate();
3454
                                save_state(dc, cpu_cond);
3455
                                gen_op_next_insn();
3456
                                tcg_gen_exit_tb(0);
3457
                                dc->is_br = 1;
3458
                                break;
3459
                            case 1: // htstate
3460
                                // XXX gen_op_wrhtstate();
3461
                                break;
3462
                            case 3: // hintp
3463
                                tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3464
                                break;
3465
                            case 5: // htba
3466
                                tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3467
                                break;
3468
                            case 31: // hstick_cmpr
3469
                                {
3470
                                    TCGv_ptr r_tickptr;
3471

    
3472
                                    tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3473
                                    r_tickptr = tcg_temp_new_ptr();
3474
                                    tcg_gen_ld_ptr(r_tickptr, cpu_env,
3475
                                                   offsetof(CPUState, hstick));
3476
                                    gen_helper_tick_set_limit(r_tickptr,
3477
                                                              cpu_hstick_cmpr);
3478
                                    tcg_temp_free_ptr(r_tickptr);
3479
                                }
3480
                                break;
3481
                            case 6: // hver readonly
3482
                            default:
3483
                                goto illegal_insn;
3484
                            }
3485
#endif
3486
                        }
3487
                        break;
3488
#endif
3489
#ifdef TARGET_SPARC64
3490
                    case 0x2c: /* V9 movcc */
3491
                        {
3492
                            int cc = GET_FIELD_SP(insn, 11, 12);
3493
                            int cond = GET_FIELD_SP(insn, 14, 17);
3494
                            TCGv r_cond;
3495
                            int l1;
3496

    
3497
                            r_cond = tcg_temp_new();
3498
                            if (insn & (1 << 18)) {
3499
                                if (cc == 0)
3500
                                    gen_cond(r_cond, 0, cond, dc);
3501
                                else if (cc == 2)
3502
                                    gen_cond(r_cond, 1, cond, dc);
3503
                                else
3504
                                    goto illegal_insn;
3505
                            } else {
3506
                                gen_fcond(r_cond, cc, cond);
3507
                            }
3508

    
3509
                            l1 = gen_new_label();
3510

    
3511
                            tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3512
                            if (IS_IMM) {       /* immediate */
3513
                                TCGv r_const;
3514

    
3515
                                simm = GET_FIELD_SPs(insn, 0, 10);
3516
                                r_const = tcg_const_tl(simm);
3517
                                gen_movl_TN_reg(rd, r_const);
3518
                                tcg_temp_free(r_const);
3519
                            } else {
3520
                                rs2 = GET_FIELD_SP(insn, 0, 4);
3521
                                gen_movl_reg_TN(rs2, cpu_tmp0);
3522
                                gen_movl_TN_reg(rd, cpu_tmp0);
3523
                            }
3524
                            gen_set_label(l1);
3525
                            tcg_temp_free(r_cond);
3526
                            break;
3527
                        }
3528
                    case 0x2d: /* V9 sdivx */
3529
                        gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3530
                        gen_movl_TN_reg(rd, cpu_dst);
3531
                        break;
3532
                    case 0x2e: /* V9 popc */
3533
                        {
3534
                            cpu_src2 = get_src2(insn, cpu_src2);
3535
                            gen_helper_popc(cpu_dst, cpu_src2);
3536
                            gen_movl_TN_reg(rd, cpu_dst);
3537
                        }
3538
                    case 0x2f: /* V9 movr */
3539
                        {
3540
                            int cond = GET_FIELD_SP(insn, 10, 12);
3541
                            int l1;
3542

    
3543
                            cpu_src1 = get_src1(insn, cpu_src1);
3544

    
3545
                            l1 = gen_new_label();
3546

    
3547
                            tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3548
                                              cpu_src1, 0, l1);
3549
                            if (IS_IMM) {       /* immediate */
3550
                                TCGv r_const;
3551

    
3552
                                simm = GET_FIELD_SPs(insn, 0, 9);
3553
                                r_const = tcg_const_tl(simm);
3554
                                gen_movl_TN_reg(rd, r_const);
3555
                                tcg_temp_free(r_const);
3556
                            } else {
3557
                                rs2 = GET_FIELD_SP(insn, 0, 4);
3558
                                gen_movl_reg_TN(rs2, cpu_tmp0);
3559
                                gen_movl_TN_reg(rd, cpu_tmp0);
3560
                            }
3561
                            gen_set_label(l1);
3562
                            break;
3563
                        }
3564
#endif
3565
                    default:
3566
                        goto illegal_insn;
3567
                    }
3568
                }
3569
            } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3570
#ifdef TARGET_SPARC64
3571
                int opf = GET_FIELD_SP(insn, 5, 13);
3572
                rs1 = GET_FIELD(insn, 13, 17);
3573
                rs2 = GET_FIELD(insn, 27, 31);
3574
                if (gen_trap_ifnofpu(dc, cpu_cond))
3575
                    goto jmp_insn;
3576

    
3577
                switch (opf) {
3578
                case 0x000: /* VIS I edge8cc */
3579
                case 0x001: /* VIS II edge8n */
3580
                case 0x002: /* VIS I edge8lcc */
3581
                case 0x003: /* VIS II edge8ln */
3582
                case 0x004: /* VIS I edge16cc */
3583
                case 0x005: /* VIS II edge16n */
3584
                case 0x006: /* VIS I edge16lcc */
3585
                case 0x007: /* VIS II edge16ln */
3586
                case 0x008: /* VIS I edge32cc */
3587
                case 0x009: /* VIS II edge32n */
3588
                case 0x00a: /* VIS I edge32lcc */
3589
                case 0x00b: /* VIS II edge32ln */
3590
                    // XXX
3591
                    goto illegal_insn;
3592
                case 0x010: /* VIS I array8 */
3593
                    CHECK_FPU_FEATURE(dc, VIS1);
3594
                    cpu_src1 = get_src1(insn, cpu_src1);
3595
                    gen_movl_reg_TN(rs2, cpu_src2);
3596
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3597
                    gen_movl_TN_reg(rd, cpu_dst);
3598
                    break;
3599
                case 0x012: /* VIS I array16 */
3600
                    CHECK_FPU_FEATURE(dc, VIS1);
3601
                    cpu_src1 = get_src1(insn, cpu_src1);
3602
                    gen_movl_reg_TN(rs2, cpu_src2);
3603
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3604
                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3605
                    gen_movl_TN_reg(rd, cpu_dst);
3606
                    break;
3607
                case 0x014: /* VIS I array32 */
3608
                    CHECK_FPU_FEATURE(dc, VIS1);
3609
                    cpu_src1 = get_src1(insn, cpu_src1);
3610
                    gen_movl_reg_TN(rs2, cpu_src2);
3611
                    gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3612
                    tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3613
                    gen_movl_TN_reg(rd, cpu_dst);
3614
                    break;
3615
                case 0x018: /* VIS I alignaddr */
3616
                    CHECK_FPU_FEATURE(dc, VIS1);
3617
                    cpu_src1 = get_src1(insn, cpu_src1);
3618
                    gen_movl_reg_TN(rs2, cpu_src2);
3619
                    gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3620
                    gen_movl_TN_reg(rd, cpu_dst);
3621
                    break;
3622
                case 0x019: /* VIS II bmask */
3623
                case 0x01a: /* VIS I alignaddrl */
3624
                    // XXX
3625
                    goto illegal_insn;
3626
                case 0x020: /* VIS I fcmple16 */
3627
                    CHECK_FPU_FEATURE(dc, VIS1);
3628
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3629
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3630
                    gen_helper_fcmple16();
3631
                    gen_op_store_DT0_fpr(DFPREG(rd));
3632
                    break;
3633
                case 0x022: /* VIS I fcmpne16 */
3634
                    CHECK_FPU_FEATURE(dc, VIS1);
3635
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3636
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3637
                    gen_helper_fcmpne16();
3638
                    gen_op_store_DT0_fpr(DFPREG(rd));
3639
                    break;
3640
                case 0x024: /* VIS I fcmple32 */
3641
                    CHECK_FPU_FEATURE(dc, VIS1);
3642
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3643
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3644
                    gen_helper_fcmple32();
3645
                    gen_op_store_DT0_fpr(DFPREG(rd));
3646
                    break;
3647
                case 0x026: /* VIS I fcmpne32 */
3648
                    CHECK_FPU_FEATURE(dc, VIS1);
3649
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3650
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3651
                    gen_helper_fcmpne32();
3652
                    gen_op_store_DT0_fpr(DFPREG(rd));
3653
                    break;
3654
                case 0x028: /* VIS I fcmpgt16 */
3655
                    CHECK_FPU_FEATURE(dc, VIS1);
3656
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3657
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3658
                    gen_helper_fcmpgt16();
3659
                    gen_op_store_DT0_fpr(DFPREG(rd));
3660
                    break;
3661
                case 0x02a: /* VIS I fcmpeq16 */
3662
                    CHECK_FPU_FEATURE(dc, VIS1);
3663
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3664
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3665
                    gen_helper_fcmpeq16();
3666
                    gen_op_store_DT0_fpr(DFPREG(rd));
3667
                    break;
3668
                case 0x02c: /* VIS I fcmpgt32 */
3669
                    CHECK_FPU_FEATURE(dc, VIS1);
3670
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3671
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3672
                    gen_helper_fcmpgt32();
3673
                    gen_op_store_DT0_fpr(DFPREG(rd));
3674
                    break;
3675
                case 0x02e: /* VIS I fcmpeq32 */
3676
                    CHECK_FPU_FEATURE(dc, VIS1);
3677
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3678
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3679
                    gen_helper_fcmpeq32();
3680
                    gen_op_store_DT0_fpr(DFPREG(rd));
3681
                    break;
3682
                case 0x031: /* VIS I fmul8x16 */
3683
                    CHECK_FPU_FEATURE(dc, VIS1);
3684
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3685
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3686
                    gen_helper_fmul8x16();
3687
                    gen_op_store_DT0_fpr(DFPREG(rd));
3688
                    break;
3689
                case 0x033: /* VIS I fmul8x16au */
3690
                    CHECK_FPU_FEATURE(dc, VIS1);
3691
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3692
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3693
                    gen_helper_fmul8x16au();
3694
                    gen_op_store_DT0_fpr(DFPREG(rd));
3695
                    break;
3696
                case 0x035: /* VIS I fmul8x16al */
3697
                    CHECK_FPU_FEATURE(dc, VIS1);
3698
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3699
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3700
                    gen_helper_fmul8x16al();
3701
                    gen_op_store_DT0_fpr(DFPREG(rd));
3702
                    break;
3703
                case 0x036: /* VIS I fmul8sux16 */
3704
                    CHECK_FPU_FEATURE(dc, VIS1);
3705
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3706
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3707
                    gen_helper_fmul8sux16();
3708
                    gen_op_store_DT0_fpr(DFPREG(rd));
3709
                    break;
3710
                case 0x037: /* VIS I fmul8ulx16 */
3711
                    CHECK_FPU_FEATURE(dc, VIS1);
3712
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3713
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3714
                    gen_helper_fmul8ulx16();
3715
                    gen_op_store_DT0_fpr(DFPREG(rd));
3716
                    break;
3717
                case 0x038: /* VIS I fmuld8sux16 */
3718
                    CHECK_FPU_FEATURE(dc, VIS1);
3719
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3720
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3721
                    gen_helper_fmuld8sux16();
3722
                    gen_op_store_DT0_fpr(DFPREG(rd));
3723
                    break;
3724
                case 0x039: /* VIS I fmuld8ulx16 */
3725
                    CHECK_FPU_FEATURE(dc, VIS1);
3726
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3727
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3728
                    gen_helper_fmuld8ulx16();
3729
                    gen_op_store_DT0_fpr(DFPREG(rd));
3730
                    break;
3731
                case 0x03a: /* VIS I fpack32 */
3732
                case 0x03b: /* VIS I fpack16 */
3733
                case 0x03d: /* VIS I fpackfix */
3734
                case 0x03e: /* VIS I pdist */
3735
                    // XXX
3736
                    goto illegal_insn;
3737
                case 0x048: /* VIS I faligndata */
3738
                    CHECK_FPU_FEATURE(dc, VIS1);
3739
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3740
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3741
                    gen_helper_faligndata();
3742
                    gen_op_store_DT0_fpr(DFPREG(rd));
3743
                    break;
3744
                case 0x04b: /* VIS I fpmerge */
3745
                    CHECK_FPU_FEATURE(dc, VIS1);
3746
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3747
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3748
                    gen_helper_fpmerge();
3749
                    gen_op_store_DT0_fpr(DFPREG(rd));
3750
                    break;
3751
                case 0x04c: /* VIS II bshuffle */
3752
                    // XXX
3753
                    goto illegal_insn;
3754
                case 0x04d: /* VIS I fexpand */
3755
                    CHECK_FPU_FEATURE(dc, VIS1);
3756
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3757
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3758
                    gen_helper_fexpand();
3759
                    gen_op_store_DT0_fpr(DFPREG(rd));
3760
                    break;
3761
                case 0x050: /* VIS I fpadd16 */
3762
                    CHECK_FPU_FEATURE(dc, VIS1);
3763
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3764
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3765
                    gen_helper_fpadd16();
3766
                    gen_op_store_DT0_fpr(DFPREG(rd));
3767
                    break;
3768
                case 0x051: /* VIS I fpadd16s */
3769
                    CHECK_FPU_FEATURE(dc, VIS1);
3770
                    gen_helper_fpadd16s(cpu_fpr[rd],
3771
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3772
                    break;
3773
                case 0x052: /* VIS I fpadd32 */
3774
                    CHECK_FPU_FEATURE(dc, VIS1);
3775
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3776
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3777
                    gen_helper_fpadd32();
3778
                    gen_op_store_DT0_fpr(DFPREG(rd));
3779
                    break;
3780
                case 0x053: /* VIS I fpadd32s */
3781
                    CHECK_FPU_FEATURE(dc, VIS1);
3782
                    gen_helper_fpadd32s(cpu_fpr[rd],
3783
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3784
                    break;
3785
                case 0x054: /* VIS I fpsub16 */
3786
                    CHECK_FPU_FEATURE(dc, VIS1);
3787
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3788
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3789
                    gen_helper_fpsub16();
3790
                    gen_op_store_DT0_fpr(DFPREG(rd));
3791
                    break;
3792
                case 0x055: /* VIS I fpsub16s */
3793
                    CHECK_FPU_FEATURE(dc, VIS1);
3794
                    gen_helper_fpsub16s(cpu_fpr[rd],
3795
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3796
                    break;
3797
                case 0x056: /* VIS I fpsub32 */
3798
                    CHECK_FPU_FEATURE(dc, VIS1);
3799
                    gen_op_load_fpr_DT0(DFPREG(rs1));
3800
                    gen_op_load_fpr_DT1(DFPREG(rs2));
3801
                    gen_helper_fpsub32();
3802
                    gen_op_store_DT0_fpr(DFPREG(rd));
3803
                    break;
3804
                case 0x057: /* VIS I fpsub32s */
3805
                    CHECK_FPU_FEATURE(dc, VIS1);
3806
                    gen_helper_fpsub32s(cpu_fpr[rd],
3807
                                        cpu_fpr[rs1], cpu_fpr[rs2]);
3808
                    break;
3809
                case 0x060: /* VIS I fzero */
3810
                    CHECK_FPU_FEATURE(dc, VIS1);
3811
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3812
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3813
                    break;
3814
                case 0x061: /* VIS I fzeros */
3815
                    CHECK_FPU_FEATURE(dc, VIS1);
3816
                    tcg_gen_movi_i32(cpu_fpr[rd], 0);
3817
                    break;
3818
                case 0x062: /* VIS I fnor */
3819
                    CHECK_FPU_FEATURE(dc, VIS1);
3820
                    tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3821
                                    cpu_fpr[DFPREG(rs2)]);
3822
                    tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3823
                                    cpu_fpr[DFPREG(rs2) + 1]);
3824
                    break;
3825
                case 0x063: /* VIS I fnors */
3826
                    CHECK_FPU_FEATURE(dc, VIS1);
3827
                    tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3828
                    break;
3829
                case 0x064: /* VIS I fandnot2 */
3830
                    CHECK_FPU_FEATURE(dc, VIS1);
3831
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3832
                                     cpu_fpr[DFPREG(rs2)]);
3833
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3834
                                     cpu_fpr[DFPREG(rs1) + 1],
3835
                                     cpu_fpr[DFPREG(rs2) + 1]);
3836
                    break;
3837
                case 0x065: /* VIS I fandnot2s */
3838
                    CHECK_FPU_FEATURE(dc, VIS1);
3839
                    tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3840
                    break;
3841
                case 0x066: /* VIS I fnot2 */
3842
                    CHECK_FPU_FEATURE(dc, VIS1);
3843
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
3844
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3845
                                    cpu_fpr[DFPREG(rs2) + 1]);
3846
                    break;
3847
                case 0x067: /* VIS I fnot2s */
3848
                    CHECK_FPU_FEATURE(dc, VIS1);
3849
                    tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3850
                    break;
3851
                case 0x068: /* VIS I fandnot1 */
3852
                    CHECK_FPU_FEATURE(dc, VIS1);
3853
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3854
                                     cpu_fpr[DFPREG(rs1)]);
3855
                    tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3856
                                     cpu_fpr[DFPREG(rs2) + 1],
3857
                                     cpu_fpr[DFPREG(rs1) + 1]);
3858
                    break;
3859
                case 0x069: /* VIS I fandnot1s */
3860
                    CHECK_FPU_FEATURE(dc, VIS1);
3861
                    tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3862
                    break;
3863
                case 0x06a: /* VIS I fnot1 */
3864
                    CHECK_FPU_FEATURE(dc, VIS1);
3865
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3866
                    tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3867
                                    cpu_fpr[DFPREG(rs1) + 1]);
3868
                    break;
3869
                case 0x06b: /* VIS I fnot1s */
3870
                    CHECK_FPU_FEATURE(dc, VIS1);
3871
                    tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3872
                    break;
3873
                case 0x06c: /* VIS I fxor */
3874
                    CHECK_FPU_FEATURE(dc, VIS1);
3875
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3876
                                    cpu_fpr[DFPREG(rs2)]);
3877
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
3878
                                    cpu_fpr[DFPREG(rs1) + 1],
3879
                                    cpu_fpr[DFPREG(rs2) + 1]);
3880
                    break;
3881
                case 0x06d: /* VIS I fxors */
3882
                    CHECK_FPU_FEATURE(dc, VIS1);
3883
                    tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3884
                    break;
3885
                case 0x06e: /* VIS I fnand */
3886
                    CHECK_FPU_FEATURE(dc, VIS1);
3887
                    tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3888
                                     cpu_fpr[DFPREG(rs2)]);
3889
                    tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3890
                                     cpu_fpr[DFPREG(rs2) + 1]);
3891
                    break;
3892
                case 0x06f: /* VIS I fnands */
3893
                    CHECK_FPU_FEATURE(dc, VIS1);
3894
                    tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3895
                    break;
3896
                case 0x070: /* VIS I fand */
3897
                    CHECK_FPU_FEATURE(dc, VIS1);
3898
                    tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3899
                                    cpu_fpr[DFPREG(rs2)]);
3900
                    tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
3901
                                    cpu_fpr[DFPREG(rs1) + 1],
3902
                                    cpu_fpr[DFPREG(rs2) + 1]);
3903
                    break;
3904
                case 0x071: /* VIS I fands */
3905
                    CHECK_FPU_FEATURE(dc, VIS1);
3906
                    tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3907
                    break;
3908
                case 0x072: /* VIS I fxnor */
3909
                    CHECK_FPU_FEATURE(dc, VIS1);
3910
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3911
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3912
                                    cpu_fpr[DFPREG(rs1)]);
3913
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
3914
                    tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3915
                                    cpu_fpr[DFPREG(rs1) + 1]);
3916
                    break;
3917
                case 0x073: /* VIS I fxnors */
3918
                    CHECK_FPU_FEATURE(dc, VIS1);
3919
                    tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
3920
                    tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
3921
                    break;
3922
                case 0x074: /* VIS I fsrc1 */
3923
                    CHECK_FPU_FEATURE(dc, VIS1);
3924
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3925
                    tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
3926
                                    cpu_fpr[DFPREG(rs1) + 1]);
3927
                    break;
3928
                case 0x075: /* VIS I fsrc1s */
3929
                    CHECK_FPU_FEATURE(dc, VIS1);
3930
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3931
                    break;
3932
                case 0x076: /* VIS I fornot2 */
3933
                    CHECK_FPU_FEATURE(dc, VIS1);
3934
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3935
                                    cpu_fpr[DFPREG(rs2)]);
3936
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
3937
                                    cpu_fpr[DFPREG(rs1) + 1],
3938
                                    cpu_fpr[DFPREG(rs2) + 1]);
3939
                    break;
3940
                case 0x077: /* VIS I fornot2s */
3941
                    CHECK_FPU_FEATURE(dc, VIS1);
3942
                    tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3943
                    break;
3944
                case 0x078: /* VIS I fsrc2 */
3945
                    CHECK_FPU_FEATURE(dc, VIS1);
3946
                    gen_op_load_fpr_DT0(DFPREG(rs2));
3947
                    gen_op_store_DT0_fpr(DFPREG(rd));
3948
                    break;
3949
                case 0x079: /* VIS I fsrc2s */
3950
                    CHECK_FPU_FEATURE(dc, VIS1);
3951
                    tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3952
                    break;
3953
                case 0x07a: /* VIS I fornot1 */
3954
                    CHECK_FPU_FEATURE(dc, VIS1);
3955
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3956
                                    cpu_fpr[DFPREG(rs1)]);
3957
                    tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
3958
                                    cpu_fpr[DFPREG(rs2) + 1],
3959
                                    cpu_fpr[DFPREG(rs1) + 1]);
3960
                    break;
3961
                case 0x07b: /* VIS I fornot1s */
3962
                    CHECK_FPU_FEATURE(dc, VIS1);
3963
                    tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3964
                    break;
3965
                case 0x07c: /* VIS I for */
3966
                    CHECK_FPU_FEATURE(dc, VIS1);
3967
                    tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3968
                                   cpu_fpr[DFPREG(rs2)]);
3969
                    tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
3970
                                   cpu_fpr[DFPREG(rs1) + 1],
3971
                                   cpu_fpr[DFPREG(rs2) + 1]);
3972
                    break;
3973
                case 0x07d: /* VIS I fors */
3974
                    CHECK_FPU_FEATURE(dc, VIS1);
3975
                    tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3976
                    break;
3977
                case 0x07e: /* VIS I fone */
3978
                    CHECK_FPU_FEATURE(dc, VIS1);
3979
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
3980
                    tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
3981
                    break;
3982
                case 0x07f: /* VIS I fones */
3983
                    CHECK_FPU_FEATURE(dc, VIS1);
3984
                    tcg_gen_movi_i32(cpu_fpr[rd], -1);
3985
                    break;
3986
                case 0x080: /* VIS I shutdown */
3987
                case 0x081: /* VIS II siam */
3988
                    // XXX
3989
                    goto illegal_insn;
3990
                default:
3991
                    goto illegal_insn;
3992
                }
3993
#else
3994
                goto ncp_insn;
3995
#endif
3996
            } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
3997
#ifdef TARGET_SPARC64
3998
                goto illegal_insn;
3999
#else
4000
                goto ncp_insn;
4001
#endif
4002
#ifdef TARGET_SPARC64
4003
            } else if (xop == 0x39) { /* V9 return */
4004
                TCGv_i32 r_const;
4005

    
4006
                save_state(dc, cpu_cond);
4007
                cpu_src1 = get_src1(insn, cpu_src1);
4008
                if (IS_IMM) {   /* immediate */
4009
                    simm = GET_FIELDs(insn, 19, 31);
4010
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4011
                } else {                /* register */
4012
                    rs2 = GET_FIELD(insn, 27, 31);
4013
                    if (rs2) {
4014
                        gen_movl_reg_TN(rs2, cpu_src2);
4015
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4016
                    } else
4017
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
4018
                }
4019
                gen_helper_restore();
4020
                gen_mov_pc_npc(dc, cpu_cond);
4021
                r_const = tcg_const_i32(3);
4022
                gen_helper_check_align(cpu_dst, r_const);
4023
                tcg_temp_free_i32(r_const);
4024
                tcg_gen_mov_tl(cpu_npc, cpu_dst);
4025
                dc->npc = DYNAMIC_PC;
4026
                goto jmp_insn;
4027
#endif
4028
            } else {
4029
                cpu_src1 = get_src1(insn, cpu_src1);
4030
                if (IS_IMM) {   /* immediate */
4031
                    simm = GET_FIELDs(insn, 19, 31);
4032
                    tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4033
                } else {                /* register */
4034
                    rs2 = GET_FIELD(insn, 27, 31);
4035
                    if (rs2) {
4036
                        gen_movl_reg_TN(rs2, cpu_src2);
4037
                        tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4038
                    } else
4039
                        tcg_gen_mov_tl(cpu_dst, cpu_src1);
4040
                }
4041
                switch (xop) {
4042
                case 0x38:      /* jmpl */
4043
                    {
4044
                        TCGv r_pc;
4045
                        TCGv_i32 r_const;
4046

    
4047
                        r_pc = tcg_const_tl(dc->pc);
4048
                        gen_movl_TN_reg(rd, r_pc);
4049
                        tcg_temp_free(r_pc);
4050
                        gen_mov_pc_npc(dc, cpu_cond);
4051
                        r_const = tcg_const_i32(3);
4052
                        gen_helper_check_align(cpu_dst, r_const);
4053
                        tcg_temp_free_i32(r_const);
4054
                        tcg_gen_mov_tl(cpu_npc, cpu_dst);
4055
                        dc->npc = DYNAMIC_PC;
4056
                    }
4057
                    goto jmp_insn;
4058
#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4059
                case 0x39:      /* rett, V9 return */
4060
                    {
4061
                        TCGv_i32 r_const;
4062

    
4063
                        if (!supervisor(dc))
4064
                            goto priv_insn;
4065
                        gen_mov_pc_npc(dc, cpu_cond);
4066
                        r_const = tcg_const_i32(3);
4067
                        gen_helper_check_align(cpu_dst, r_const);
4068
                        tcg_temp_free_i32(r_const);
4069
                        tcg_gen_mov_tl(cpu_npc, cpu_dst);
4070
                        dc->npc = DYNAMIC_PC;
4071
                        gen_helper_rett();
4072
                    }
4073
                    goto jmp_insn;
4074
#endif
4075
                case 0x3b: /* flush */
4076
                    if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4077
                        goto unimp_flush;
4078
                    gen_helper_flush(cpu_dst);
4079
                    break;
4080
                case 0x3c:      /* save */
4081
                    save_state(dc, cpu_cond);
4082
                    gen_helper_save();
4083
                    gen_movl_TN_reg(rd, cpu_dst);
4084
                    break;
4085
                case 0x3d:      /* restore */
4086
                    save_state(dc, cpu_cond);
4087
                    gen_helper_restore();
4088
                    gen_movl_TN_reg(rd, cpu_dst);
4089
                    break;
4090
#if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4091
                case 0x3e:      /* V9 done/retry */
4092
                    {
4093
                        switch (rd) {
4094
                        case 0:
4095
                            if (!supervisor(dc))
4096
                                goto priv_insn;
4097
                            dc->npc = DYNAMIC_PC;
4098
                            dc->pc = DYNAMIC_PC;
4099
                            gen_helper_done();
4100
                            goto jmp_insn;
4101
                        case 1:
4102
                            if (!supervisor(dc))
4103
                                goto priv_insn;
4104
                            dc->npc = DYNAMIC_PC;
4105
                            dc->pc = DYNAMIC_PC;
4106
                            gen_helper_retry();
4107
                            goto jmp_insn;
4108
                        default:
4109
                            goto illegal_insn;
4110
                        }
4111
                    }
4112
                    break;
4113
#endif
4114
                default:
4115
                    goto illegal_insn;
4116
                }
4117
            }
4118
            break;
4119
        }
4120
        break;
4121
    case 3:                     /* load/store instructions */
4122
        {
4123
            unsigned int xop = GET_FIELD(insn, 7, 12);
4124

    
4125
            /* flush pending conditional evaluations before exposing
4126
               cpu state */
4127
            if (dc->cc_op != CC_OP_FLAGS) {
4128
                dc->cc_op = CC_OP_FLAGS;
4129
                gen_helper_compute_psr();
4130
            }
4131
            cpu_src1 = get_src1(insn, cpu_src1);
4132
            if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4133
                rs2 = GET_FIELD(insn, 27, 31);
4134
                gen_movl_reg_TN(rs2, cpu_src2);
4135
                tcg_gen_mov_tl(cpu_addr, cpu_src1);
4136
            } else if (IS_IMM) {     /* immediate */
4137
                simm = GET_FIELDs(insn, 19, 31);
4138
                tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4139
            } else {            /* register */
4140
                rs2 = GET_FIELD(insn, 27, 31);
4141
                if (rs2 != 0) {
4142
                    gen_movl_reg_TN(rs2, cpu_src2);
4143
                    tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4144
                } else
4145
                    tcg_gen_mov_tl(cpu_addr, cpu_src1);
4146
            }
4147
            if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4148
                (xop > 0x17 && xop <= 0x1d ) ||
4149
                (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4150
                switch (xop) {
4151
                case 0x0:       /* ld, V9 lduw, load unsigned word */
4152
                    gen_address_mask(dc, cpu_addr);
4153
                    tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4154
                    break;
4155
                case 0x1:       /* ldub, load unsigned byte */
4156
                    gen_address_mask(dc, cpu_addr);
4157
                    tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4158
                    break;
4159
                case 0x2:       /* lduh, load unsigned halfword */
4160
                    gen_address_mask(dc, cpu_addr);
4161
                    tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4162
                    break;
4163
                case 0x3:       /* ldd, load double word */
4164
                    if (rd & 1)
4165
                        goto illegal_insn;
4166
                    else {
4167
                        TCGv_i32 r_const;
4168

    
4169
                        save_state(dc, cpu_cond);
4170
                        r_const = tcg_const_i32(7);
4171
                        gen_helper_check_align(cpu_addr, r_const); // XXX remove
4172
                        tcg_temp_free_i32(r_const);
4173
                        gen_address_mask(dc, cpu_addr);
4174
                        tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4175
                        tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4176
                        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4177
                        gen_movl_TN_reg(rd + 1, cpu_tmp0);
4178
                        tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4179
                        tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4180
                        tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4181
                    }
4182
                    break;
4183
                case 0x9:       /* ldsb, load signed byte */
4184
                    gen_address_mask(dc, cpu_addr);
4185
                    tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4186
                    break;
4187
                case 0xa:       /* ldsh, load signed halfword */
4188
                    gen_address_mask(dc, cpu_addr);
4189
                    tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4190
                    break;
4191
                case 0xd:       /* ldstub -- XXX: should be atomically */
4192
                    {
4193
                        TCGv r_const;
4194

    
4195
                        gen_address_mask(dc, cpu_addr);
4196
                        tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4197
                        r_const = tcg_const_tl(0xff);
4198
                        tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4199
                        tcg_temp_free(r_const);
4200
                    }
4201
                    break;
4202
                case 0x0f:      /* swap, swap register with memory. Also
4203
                                   atomically */
4204
                    CHECK_IU_FEATURE(dc, SWAP);
4205
                    gen_movl_reg_TN(rd, cpu_val);
4206
                    gen_address_mask(dc, cpu_addr);
4207
                    tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4208
                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4209
                    tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4210
                    break;
4211
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4212
                case 0x10:      /* lda, V9 lduwa, load word alternate */
4213
#ifndef TARGET_SPARC64
4214
                    if (IS_IMM)
4215
                        goto illegal_insn;
4216
                    if (!supervisor(dc))
4217
                        goto priv_insn;
4218
#endif
4219
                    save_state(dc, cpu_cond);
4220
                    gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4221
                    break;
4222
                case 0x11:      /* lduba, load unsigned byte alternate */
4223
#ifndef TARGET_SPARC64
4224
                    if (IS_IMM)
4225
                        goto illegal_insn;
4226
                    if (!supervisor(dc))
4227
                        goto priv_insn;
4228
#endif
4229
                    save_state(dc, cpu_cond);
4230
                    gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4231
                    break;
4232
                case 0x12:      /* lduha, load unsigned halfword alternate */
4233
#ifndef TARGET_SPARC64
4234
                    if (IS_IMM)
4235
                        goto illegal_insn;
4236
                    if (!supervisor(dc))
4237
                        goto priv_insn;
4238
#endif
4239
                    save_state(dc, cpu_cond);
4240
                    gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4241
                    break;
4242
                case 0x13:      /* ldda, load double word alternate */
4243
#ifndef TARGET_SPARC64
4244
                    if (IS_IMM)
4245
                        goto illegal_insn;
4246
                    if (!supervisor(dc))
4247
                        goto priv_insn;
4248
#endif
4249
                    if (rd & 1)
4250
                        goto illegal_insn;
4251
                    save_state(dc, cpu_cond);
4252
                    gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4253
                    goto skip_move;
4254
                case 0x19:      /* ldsba, load signed byte alternate */
4255
#ifndef TARGET_SPARC64
4256
                    if (IS_IMM)
4257
                        goto illegal_insn;
4258
                    if (!supervisor(dc))
4259
                        goto priv_insn;
4260
#endif
4261
                    save_state(dc, cpu_cond);
4262
                    gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4263
                    break;
4264
                case 0x1a:      /* ldsha, load signed halfword alternate */
4265
#ifndef TARGET_SPARC64
4266
                    if (IS_IMM)
4267
                        goto illegal_insn;
4268
                    if (!supervisor(dc))
4269
                        goto priv_insn;
4270
#endif
4271
                    save_state(dc, cpu_cond);
4272
                    gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4273
                    break;
4274
                case 0x1d:      /* ldstuba -- XXX: should be atomically */
4275
#ifndef TARGET_SPARC64
4276
                    if (IS_IMM)
4277
                        goto illegal_insn;
4278
                    if (!supervisor(dc))
4279
                        goto priv_insn;
4280
#endif
4281
                    save_state(dc, cpu_cond);
4282
                    gen_ldstub_asi(cpu_val, cpu_addr, insn);
4283
                    break;
4284
                case 0x1f:      /* swapa, swap reg with alt. memory. Also
4285
                                   atomically */
4286
                    CHECK_IU_FEATURE(dc, SWAP);
4287
#ifndef TARGET_SPARC64
4288
                    if (IS_IMM)
4289
                        goto illegal_insn;
4290
                    if (!supervisor(dc))
4291
                        goto priv_insn;
4292
#endif
4293
                    save_state(dc, cpu_cond);
4294
                    gen_movl_reg_TN(rd, cpu_val);
4295
                    gen_swap_asi(cpu_val, cpu_addr, insn);
4296
                    break;
4297

    
4298
#ifndef TARGET_SPARC64
4299
                case 0x30: /* ldc */
4300
                case 0x31: /* ldcsr */
4301
                case 0x33: /* lddc */
4302
                    goto ncp_insn;
4303
#endif
4304
#endif
4305
#ifdef TARGET_SPARC64
4306
                case 0x08: /* V9 ldsw */
4307
                    gen_address_mask(dc, cpu_addr);
4308
                    tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4309
                    break;
4310
                case 0x0b: /* V9 ldx */
4311
                    gen_address_mask(dc, cpu_addr);
4312
                    tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4313
                    break;
4314
                case 0x18: /* V9 ldswa */
4315
                    save_state(dc, cpu_cond);
4316
                    gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4317
                    break;
4318
                case 0x1b: /* V9 ldxa */
4319
                    save_state(dc, cpu_cond);
4320
                    gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4321
                    break;
4322
                case 0x2d: /* V9 prefetch, no effect */
4323
                    goto skip_move;
4324
                case 0x30: /* V9 ldfa */
4325
                    save_state(dc, cpu_cond);
4326
                    gen_ldf_asi(cpu_addr, insn, 4, rd);
4327
                    goto skip_move;
4328
                case 0x33: /* V9 lddfa */
4329
                    save_state(dc, cpu_cond);
4330
                    gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4331
                    goto skip_move;
4332
                case 0x3d: /* V9 prefetcha, no effect */
4333
                    goto skip_move;
4334
                case 0x32: /* V9 ldqfa */
4335
                    CHECK_FPU_FEATURE(dc, FLOAT128);
4336
                    save_state(dc, cpu_cond);
4337
                    gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4338
                    goto skip_move;
4339
#endif
4340
                default:
4341
                    goto illegal_insn;
4342
                }
4343
                gen_movl_TN_reg(rd, cpu_val);
4344
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4345
            skip_move: ;
4346
#endif
4347
            } else if (xop >= 0x20 && xop < 0x24) {
4348
                if (gen_trap_ifnofpu(dc, cpu_cond))
4349
                    goto jmp_insn;
4350
                save_state(dc, cpu_cond);
4351
                switch (xop) {
4352
                case 0x20:      /* ldf, load fpreg */
4353
                    gen_address_mask(dc, cpu_addr);
4354
                    tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4355
                    tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4356
                    break;
4357
                case 0x21:      /* ldfsr, V9 ldxfsr */
4358
#ifdef TARGET_SPARC64
4359
                    gen_address_mask(dc, cpu_addr);
4360
                    if (rd == 1) {
4361
                        tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4362
                        gen_helper_ldxfsr(cpu_tmp64);
4363
                    } else
4364
#else
4365
                    {
4366
                        tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4367
                        gen_helper_ldfsr(cpu_tmp32);
4368
                    }
4369
#endif
4370
                    break;
4371
                case 0x22:      /* ldqf, load quad fpreg */
4372
                    {
4373
                        TCGv_i32 r_const;
4374

    
4375
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4376
                        r_const = tcg_const_i32(dc->mem_idx);
4377
                        gen_helper_ldqf(cpu_addr, r_const);
4378
                        tcg_temp_free_i32(r_const);
4379
                        gen_op_store_QT0_fpr(QFPREG(rd));
4380
                    }
4381
                    break;
4382
                case 0x23:      /* lddf, load double fpreg */
4383
                    {
4384
                        TCGv_i32 r_const;
4385

    
4386
                        r_const = tcg_const_i32(dc->mem_idx);
4387
                        gen_helper_lddf(cpu_addr, r_const);
4388
                        tcg_temp_free_i32(r_const);
4389
                        gen_op_store_DT0_fpr(DFPREG(rd));
4390
                    }
4391
                    break;
4392
                default:
4393
                    goto illegal_insn;
4394
                }
4395
            } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4396
                       xop == 0xe || xop == 0x1e) {
4397
                gen_movl_reg_TN(rd, cpu_val);
4398
                switch (xop) {
4399
                case 0x4: /* st, store word */
4400
                    gen_address_mask(dc, cpu_addr);
4401
                    tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4402
                    break;
4403
                case 0x5: /* stb, store byte */
4404
                    gen_address_mask(dc, cpu_addr);
4405
                    tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4406
                    break;
4407
                case 0x6: /* sth, store halfword */
4408
                    gen_address_mask(dc, cpu_addr);
4409
                    tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4410
                    break;
4411
                case 0x7: /* std, store double word */
4412
                    if (rd & 1)
4413
                        goto illegal_insn;
4414
                    else {
4415
                        TCGv_i32 r_const;
4416

    
4417
                        save_state(dc, cpu_cond);
4418
                        gen_address_mask(dc, cpu_addr);
4419
                        r_const = tcg_const_i32(7);
4420
                        gen_helper_check_align(cpu_addr, r_const); // XXX remove
4421
                        tcg_temp_free_i32(r_const);
4422
                        gen_movl_reg_TN(rd + 1, cpu_tmp0);
4423
                        tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4424
                        tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4425
                    }
4426
                    break;
4427
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4428
                case 0x14: /* sta, V9 stwa, store word alternate */
4429
#ifndef TARGET_SPARC64
4430
                    if (IS_IMM)
4431
                        goto illegal_insn;
4432
                    if (!supervisor(dc))
4433
                        goto priv_insn;
4434
#endif
4435
                    save_state(dc, cpu_cond);
4436
                    gen_st_asi(cpu_val, cpu_addr, insn, 4);
4437
                    break;
4438
                case 0x15: /* stba, store byte alternate */
4439
#ifndef TARGET_SPARC64
4440
                    if (IS_IMM)
4441
                        goto illegal_insn;
4442
                    if (!supervisor(dc))
4443
                        goto priv_insn;
4444
#endif
4445
                    save_state(dc, cpu_cond);
4446
                    gen_st_asi(cpu_val, cpu_addr, insn, 1);
4447
                    break;
4448
                case 0x16: /* stha, store halfword alternate */
4449
#ifndef TARGET_SPARC64
4450
                    if (IS_IMM)
4451
                        goto illegal_insn;
4452
                    if (!supervisor(dc))
4453
                        goto priv_insn;
4454
#endif
4455
                    save_state(dc, cpu_cond);
4456
                    gen_st_asi(cpu_val, cpu_addr, insn, 2);
4457
                    break;
4458
                case 0x17: /* stda, store double word alternate */
4459
#ifndef TARGET_SPARC64
4460
                    if (IS_IMM)
4461
                        goto illegal_insn;
4462
                    if (!supervisor(dc))
4463
                        goto priv_insn;
4464
#endif
4465
                    if (rd & 1)
4466
                        goto illegal_insn;
4467
                    else {
4468
                        save_state(dc, cpu_cond);
4469
                        gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4470
                    }
4471
                    break;
4472
#endif
4473
#ifdef TARGET_SPARC64
4474
                case 0x0e: /* V9 stx */
4475
                    gen_address_mask(dc, cpu_addr);
4476
                    tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4477
                    break;
4478
                case 0x1e: /* V9 stxa */
4479
                    save_state(dc, cpu_cond);
4480
                    gen_st_asi(cpu_val, cpu_addr, insn, 8);
4481
                    break;
4482
#endif
4483
                default:
4484
                    goto illegal_insn;
4485
                }
4486
            } else if (xop > 0x23 && xop < 0x28) {
4487
                if (gen_trap_ifnofpu(dc, cpu_cond))
4488
                    goto jmp_insn;
4489
                save_state(dc, cpu_cond);
4490
                switch (xop) {
4491
                case 0x24: /* stf, store fpreg */
4492
                    gen_address_mask(dc, cpu_addr);
4493
                    tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4494
                    tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4495
                    break;
4496
                case 0x25: /* stfsr, V9 stxfsr */
4497
#ifdef TARGET_SPARC64
4498
                    gen_address_mask(dc, cpu_addr);
4499
                    tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4500
                    if (rd == 1)
4501
                        tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4502
                    else
4503
                        tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4504
#else
4505
                    tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4506
                    tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4507
#endif
4508
                    break;
4509
                case 0x26:
4510
#ifdef TARGET_SPARC64
4511
                    /* V9 stqf, store quad fpreg */
4512
                    {
4513
                        TCGv_i32 r_const;
4514

    
4515
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4516
                        gen_op_load_fpr_QT0(QFPREG(rd));
4517
                        r_const = tcg_const_i32(dc->mem_idx);
4518
                        gen_helper_stqf(cpu_addr, r_const);
4519
                        tcg_temp_free_i32(r_const);
4520
                    }
4521
                    break;
4522
#else /* !TARGET_SPARC64 */
4523
                    /* stdfq, store floating point queue */
4524
#if defined(CONFIG_USER_ONLY)
4525
                    goto illegal_insn;
4526
#else
4527
                    if (!supervisor(dc))
4528
                        goto priv_insn;
4529
                    if (gen_trap_ifnofpu(dc, cpu_cond))
4530
                        goto jmp_insn;
4531
                    goto nfq_insn;
4532
#endif
4533
#endif
4534
                case 0x27: /* stdf, store double fpreg */
4535
                    {
4536
                        TCGv_i32 r_const;
4537

    
4538
                        gen_op_load_fpr_DT0(DFPREG(rd));
4539
                        r_const = tcg_const_i32(dc->mem_idx);
4540
                        gen_helper_stdf(cpu_addr, r_const);
4541
                        tcg_temp_free_i32(r_const);
4542
                    }
4543
                    break;
4544
                default:
4545
                    goto illegal_insn;
4546
                }
4547
            } else if (xop > 0x33 && xop < 0x3f) {
4548
                save_state(dc, cpu_cond);
4549
                switch (xop) {
4550
#ifdef TARGET_SPARC64
4551
                case 0x34: /* V9 stfa */
4552
                    gen_stf_asi(cpu_addr, insn, 4, rd);
4553
                    break;
4554
                case 0x36: /* V9 stqfa */
4555
                    {
4556
                        TCGv_i32 r_const;
4557

    
4558
                        CHECK_FPU_FEATURE(dc, FLOAT128);
4559
                        r_const = tcg_const_i32(7);
4560
                        gen_helper_check_align(cpu_addr, r_const);
4561
                        tcg_temp_free_i32(r_const);
4562
                        gen_op_load_fpr_QT0(QFPREG(rd));
4563
                        gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4564
                    }
4565
                    break;
4566
                case 0x37: /* V9 stdfa */
4567
                    gen_op_load_fpr_DT0(DFPREG(rd));
4568
                    gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4569
                    break;
4570
                case 0x3c: /* V9 casa */
4571
                    gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4572
                    gen_movl_TN_reg(rd, cpu_val);
4573
                    break;
4574
                case 0x3e: /* V9 casxa */
4575
                    gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4576
                    gen_movl_TN_reg(rd, cpu_val);
4577
                    break;
4578
#else
4579
                case 0x34: /* stc */
4580
                case 0x35: /* stcsr */
4581
                case 0x36: /* stdcq */
4582
                case 0x37: /* stdc */
4583
                    goto ncp_insn;
4584
#endif
4585
                default:
4586
                    goto illegal_insn;
4587
                }
4588
            } else
4589
                goto illegal_insn;
4590
        }
4591
        break;
4592
    }
4593
    /* default case for non jump instructions */
4594
    if (dc->npc == DYNAMIC_PC) {
4595
        dc->pc = DYNAMIC_PC;
4596
        gen_op_next_insn();
4597
    } else if (dc->npc == JUMP_PC) {
4598
        /* we can do a static jump */
4599
        gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4600
        dc->is_br = 1;
4601
    } else {
4602
        dc->pc = dc->npc;
4603
        dc->npc = dc->npc + 4;
4604
    }
4605
 jmp_insn:
4606
    goto egress;
4607
 illegal_insn:
4608
    {
4609
        TCGv_i32 r_const;
4610

    
4611
        save_state(dc, cpu_cond);
4612
        r_const = tcg_const_i32(TT_ILL_INSN);
4613
        gen_helper_raise_exception(r_const);
4614
        tcg_temp_free_i32(r_const);
4615
        dc->is_br = 1;
4616
    }
4617
    goto egress;
4618
 unimp_flush:
4619
    {
4620
        TCGv_i32 r_const;
4621

    
4622
        save_state(dc, cpu_cond);
4623
        r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4624
        gen_helper_raise_exception(r_const);
4625
        tcg_temp_free_i32(r_const);
4626
        dc->is_br = 1;
4627
    }
4628
    goto egress;
4629
#if !defined(CONFIG_USER_ONLY)
4630
 priv_insn:
4631
    {
4632
        TCGv_i32 r_const;
4633

    
4634
        save_state(dc, cpu_cond);
4635
        r_const = tcg_const_i32(TT_PRIV_INSN);
4636
        gen_helper_raise_exception(r_const);
4637
        tcg_temp_free_i32(r_const);
4638
        dc->is_br = 1;
4639
    }
4640
    goto egress;
4641
#endif
4642
 nfpu_insn:
4643
    save_state(dc, cpu_cond);
4644
    gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4645
    dc->is_br = 1;
4646
    goto egress;
4647
#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4648
 nfq_insn:
4649
    save_state(dc, cpu_cond);
4650
    gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4651
    dc->is_br = 1;
4652
    goto egress;
4653
#endif
4654
#ifndef TARGET_SPARC64
4655
 ncp_insn:
4656
    {
4657
        TCGv r_const;
4658

    
4659
        save_state(dc, cpu_cond);
4660
        r_const = tcg_const_i32(TT_NCP_INSN);
4661
        gen_helper_raise_exception(r_const);
4662
        tcg_temp_free(r_const);
4663
        dc->is_br = 1;
4664
    }
4665
    goto egress;
4666
#endif
4667
 egress:
4668
    tcg_temp_free(cpu_tmp1);
4669
    tcg_temp_free(cpu_tmp2);
4670
}
4671

    
4672
static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4673
                                                  int spc, CPUSPARCState *env)
4674
{
4675
    target_ulong pc_start, last_pc;
4676
    uint16_t *gen_opc_end;
4677
    DisasContext dc1, *dc = &dc1;
4678
    CPUBreakpoint *bp;
4679
    int j, lj = -1;
4680
    int num_insns;
4681
    int max_insns;
4682

    
4683
    memset(dc, 0, sizeof(DisasContext));
4684
    dc->tb = tb;
4685
    pc_start = tb->pc;
4686
    dc->pc = pc_start;
4687
    last_pc = dc->pc;
4688
    dc->npc = (target_ulong) tb->cs_base;
4689
    dc->cc_op = CC_OP_DYNAMIC;
4690
    dc->mem_idx = cpu_mmu_index(env);
4691
    dc->def = env->def;
4692
    if ((dc->def->features & CPU_FEATURE_FLOAT))
4693
        dc->fpu_enabled = cpu_fpu_enabled(env);
4694
    else
4695
        dc->fpu_enabled = 0;
4696
#ifdef TARGET_SPARC64
4697
    dc->address_mask_32bit = env->pstate & PS_AM;
4698
#endif
4699
    dc->singlestep = (env->singlestep_enabled || singlestep);
4700
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4701

    
4702
    cpu_tmp0 = tcg_temp_new();
4703
    cpu_tmp32 = tcg_temp_new_i32();
4704
    cpu_tmp64 = tcg_temp_new_i64();
4705

    
4706
    cpu_dst = tcg_temp_local_new();
4707

    
4708
    // loads and stores
4709
    cpu_val = tcg_temp_local_new();
4710
    cpu_addr = tcg_temp_local_new();
4711

    
4712
    num_insns = 0;
4713
    max_insns = tb->cflags & CF_COUNT_MASK;
4714
    if (max_insns == 0)
4715
        max_insns = CF_COUNT_MASK;
4716
    gen_icount_start();
4717
    do {
4718
        if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
4719
            QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
4720
                if (bp->pc == dc->pc) {
4721
                    if (dc->pc != pc_start)
4722
                        save_state(dc, cpu_cond);
4723
                    gen_helper_debug();
4724
                    tcg_gen_exit_tb(0);
4725
                    dc->is_br = 1;
4726
                    goto exit_gen_loop;
4727
                }
4728
            }
4729
        }
4730
        if (spc) {
4731
            qemu_log("Search PC...\n");
4732
            j = gen_opc_ptr - gen_opc_buf;
4733
            if (lj < j) {
4734
                lj++;
4735
                while (lj < j)
4736
                    gen_opc_instr_start[lj++] = 0;
4737
                gen_opc_pc[lj] = dc->pc;
4738
                gen_opc_npc[lj] = dc->npc;
4739
                gen_opc_instr_start[lj] = 1;
4740
                gen_opc_icount[lj] = num_insns;
4741
            }
4742
        }
4743
        if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4744
            gen_io_start();
4745
        last_pc = dc->pc;
4746
        disas_sparc_insn(dc);
4747
        num_insns++;
4748

    
4749
        if (dc->is_br)
4750
            break;
4751
        /* if the next PC is different, we abort now */
4752
        if (dc->pc != (last_pc + 4))
4753
            break;
4754
        /* if we reach a page boundary, we stop generation so that the
4755
           PC of a TT_TFAULT exception is always in the right page */
4756
        if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4757
            break;
4758
        /* if single step mode, we generate only one instruction and
4759
           generate an exception */
4760
        if (dc->singlestep) {
4761
            break;
4762
        }
4763
    } while ((gen_opc_ptr < gen_opc_end) &&
4764
             (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4765
             num_insns < max_insns);
4766

    
4767
 exit_gen_loop:
4768
    tcg_temp_free(cpu_addr);
4769
    tcg_temp_free(cpu_val);
4770
    tcg_temp_free(cpu_dst);
4771
    tcg_temp_free_i64(cpu_tmp64);
4772
    tcg_temp_free_i32(cpu_tmp32);
4773
    tcg_temp_free(cpu_tmp0);
4774
    if (tb->cflags & CF_LAST_IO)
4775
        gen_io_end();
4776
    if (!dc->is_br) {
4777
        if (dc->pc != DYNAMIC_PC &&
4778
            (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4779
            /* static PC and NPC: we can use direct chaining */
4780
            gen_goto_tb(dc, 0, dc->pc, dc->npc);
4781
        } else {
4782
            if (dc->pc != DYNAMIC_PC)
4783
                tcg_gen_movi_tl(cpu_pc, dc->pc);
4784
            save_npc(dc, cpu_cond);
4785
            tcg_gen_exit_tb(0);
4786
        }
4787
    }
4788
    gen_icount_end(tb, num_insns);
4789
    *gen_opc_ptr = INDEX_op_end;
4790
    if (spc) {
4791
        j = gen_opc_ptr - gen_opc_buf;
4792
        lj++;
4793
        while (lj <= j)
4794
            gen_opc_instr_start[lj++] = 0;
4795
#if 0
4796
        log_page_dump();
4797
#endif
4798
        gen_opc_jump_pc[0] = dc->jump_pc[0];
4799
        gen_opc_jump_pc[1] = dc->jump_pc[1];
4800
    } else {
4801
        tb->size = last_pc + 4 - pc_start;
4802
        tb->icount = num_insns;
4803
    }
4804
#ifdef DEBUG_DISAS
4805
    if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
4806
        qemu_log("--------------\n");
4807
        qemu_log("IN: %s\n", lookup_symbol(pc_start));
4808
        log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
4809
        qemu_log("\n");
4810
    }
4811
#endif
4812
}
4813

    
4814
void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4815
{
4816
    gen_intermediate_code_internal(tb, 0, env);
4817
}
4818

    
4819
void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4820
{
4821
    gen_intermediate_code_internal(tb, 1, env);
4822
}
4823

    
4824
void gen_intermediate_code_init(CPUSPARCState *env)
4825
{
4826
    unsigned int i;
4827
    static int inited;
4828
    static const char * const gregnames[8] = {
4829
        NULL, // g0 not used
4830
        "g1",
4831
        "g2",
4832
        "g3",
4833
        "g4",
4834
        "g5",
4835
        "g6",
4836
        "g7",
4837
    };
4838
    static const char * const fregnames[64] = {
4839
        "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4840
        "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4841
        "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4842
        "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4843
        "f32", "f33", "f34