Statistics
| Branch: | Revision:

root / tcg / sparc / tcg-target.c @ b3db8758

History | View | Annotate | Download (20.1 kB)

1
/*
2
 * Tiny Code Generator for QEMU
3
 *
4
 * Copyright (c) 2008 Fabrice Bellard
5
 *
6
 * Permission is hereby granted, free of charge, to any person obtaining a copy
7
 * of this software and associated documentation files (the "Software"), to deal
8
 * in the Software without restriction, including without limitation the rights
9
 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10
 * copies of the Software, and to permit persons to whom the Software is
11
 * furnished to do so, subject to the following conditions:
12
 *
13
 * The above copyright notice and this permission notice shall be included in
14
 * all copies or substantial portions of the Software.
15
 *
16
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19
 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21
 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22
 * THE SOFTWARE.
23
 */
24

    
25
static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
26
    "%g0",
27
    "%g1",
28
    "%g2",
29
    "%g3",
30
    "%g4",
31
    "%g5",
32
    "%g6",
33
    "%g7",
34
    "%o0",
35
    "%o1",
36
    "%o2",
37
    "%o3",
38
    "%o4",
39
    "%o5",
40
    "%o6",
41
    "%o7",
42
    "%l0",
43
    "%l1",
44
    "%l2",
45
    "%l3",
46
    "%l4",
47
    "%l5",
48
    "%l6",
49
    "%l7",
50
    "%i0",
51
    "%i1",
52
    "%i2",
53
    "%i3",
54
    "%i4",
55
    "%i5",
56
    "%i6",
57
    "%i7",
58
};
59

    
60
static const int tcg_target_reg_alloc_order[TCG_TARGET_NB_REGS] = {
61
    TCG_REG_L0,
62
    TCG_REG_L1,
63
    TCG_REG_L2,
64
    TCG_REG_L3,
65
    TCG_REG_L4,
66
    TCG_REG_L5,
67
    TCG_REG_L6,
68
    TCG_REG_L7,
69
    TCG_REG_I0,
70
    TCG_REG_I1,
71
    TCG_REG_I2,
72
    TCG_REG_I3,
73
    TCG_REG_I4,
74
};
75

    
76
static const int tcg_target_call_iarg_regs[6] = {
77
    TCG_REG_O0,
78
    TCG_REG_O1,
79
    TCG_REG_O2,
80
    TCG_REG_O3,
81
    TCG_REG_O4,
82
    TCG_REG_O5,
83
};
84

    
85
static const int tcg_target_call_oarg_regs[2] = {
86
    TCG_REG_O0,
87
    TCG_REG_O1,
88
};
89

    
90
static void patch_reloc(uint8_t *code_ptr, int type,
91
                        tcg_target_long value)
92
{
93
    switch (type) {
94
    case R_SPARC_32:
95
        if (value != (uint32_t)value)
96
            tcg_abort();
97
        *(uint32_t *)code_ptr = value;
98
        break;
99
    default:
100
        tcg_abort();
101
    }
102
}
103

    
104
/* maximum number of register used for input function arguments */
105
static inline int tcg_target_get_call_iarg_regs_count(int flags)
106
{
107
    return 6;
108
}
109

    
110
/* parse target specific constraints */
111
static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
112
{
113
    const char *ct_str;
114

    
115
    ct_str = *pct_str;
116
    switch (ct_str[0]) {
117
    case 'r':
118
    case 'L': /* qemu_ld/st constraint */
119
        ct->ct |= TCG_CT_REG;
120
        tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
121
        break;
122
    case 'I':
123
        ct->ct |= TCG_CT_CONST_S11;
124
        break;
125
    case 'J':
126
        ct->ct |= TCG_CT_CONST_S13;
127
        break;
128
    default:
129
        return -1;
130
    }
131
    ct_str++;
132
    *pct_str = ct_str;
133
    return 0;
134
}
135

    
136
#define ABS(x) ((x) < 0? -(x) : (x))
137
/* test if a constant matches the constraint */
138
static inline int tcg_target_const_match(tcg_target_long val,
139
                                         const TCGArgConstraint *arg_ct)
140
{
141
    int ct;
142

    
143
    ct = arg_ct->ct;
144
    if (ct & TCG_CT_CONST)
145
        return 1;
146
    else if ((ct & TCG_CT_CONST_S11) && ABS(val) == (ABS(val) & 0x3ff))
147
        return 1;
148
    else if ((ct & TCG_CT_CONST_S13) && ABS(val) == (ABS(val) & 0xfff))
149
        return 1;
150
    else
151
        return 0;
152
}
153

    
154
#define INSN_OP(x)  ((x) << 30)
155
#define INSN_OP2(x) ((x) << 22)
156
#define INSN_OP3(x) ((x) << 19)
157
#define INSN_OPF(x) ((x) << 5)
158
#define INSN_RD(x)  ((x) << 25)
159
#define INSN_RS1(x) ((x) << 14)
160
#define INSN_RS2(x) (x)
161

    
162
#define INSN_IMM13(x) ((1 << 13) | ((x) & 0x1fff))
163
#define INSN_OFF22(x) (((x) >> 2) & 0x3fffff)
164

    
165
#define INSN_COND(x, a) (((x) << 25) | ((a) << 29))
166
#define COND_A     0x8
167
#define BA         (INSN_OP(0) | INSN_COND(COND_A, 0) | INSN_OP2(0x2))
168

    
169
#define ARITH_ADD  (INSN_OP(2) | INSN_OP3(0x00))
170
#define ARITH_AND  (INSN_OP(2) | INSN_OP3(0x01))
171
#define ARITH_OR   (INSN_OP(2) | INSN_OP3(0x02))
172
#define ARITH_XOR  (INSN_OP(2) | INSN_OP3(0x03))
173
#define ARITH_SUB  (INSN_OP(2) | INSN_OP3(0x08))
174
#define ARITH_ADDX (INSN_OP(2) | INSN_OP3(0x10))
175
#define ARITH_SUBX (INSN_OP(2) | INSN_OP3(0x0c))
176
#define ARITH_UMUL (INSN_OP(2) | INSN_OP3(0x0a))
177
#define ARITH_UDIV (INSN_OP(2) | INSN_OP3(0x0e))
178
#define ARITH_SDIV (INSN_OP(2) | INSN_OP3(0x0f))
179
#define ARITH_MULX (INSN_OP(2) | INSN_OP3(0x09))
180
#define ARITH_UDIVX (INSN_OP(2) | INSN_OP3(0x0d))
181
#define ARITH_SDIVX (INSN_OP(2) | INSN_OP3(0x2d))
182

    
183
#define SHIFT_SLL  (INSN_OP(2) | INSN_OP3(0x25))
184
#define SHIFT_SRL  (INSN_OP(2) | INSN_OP3(0x26))
185
#define SHIFT_SRA  (INSN_OP(2) | INSN_OP3(0x27))
186

    
187
#define SHIFT_SLLX (INSN_OP(2) | INSN_OP3(0x25) | (1 << 12))
188
#define SHIFT_SRLX (INSN_OP(2) | INSN_OP3(0x26) | (1 << 12))
189
#define SHIFT_SRAX (INSN_OP(2) | INSN_OP3(0x27) | (1 << 12))
190

    
191
#define WRY        (INSN_OP(2) | INSN_OP3(0x30))
192
#define JMPL       (INSN_OP(2) | INSN_OP3(0x38))
193
#define SAVE       (INSN_OP(2) | INSN_OP3(0x3c))
194
#define RESTORE    (INSN_OP(2) | INSN_OP3(0x3d))
195
#define SETHI      (INSN_OP(0) | INSN_OP2(0x4))
196
#define CALL       INSN_OP(1)
197
#define LDUB       (INSN_OP(3) | INSN_OP3(0x01))
198
#define LDSB       (INSN_OP(3) | INSN_OP3(0x09))
199
#define LDUH       (INSN_OP(3) | INSN_OP3(0x02))
200
#define LDSH       (INSN_OP(3) | INSN_OP3(0x0a))
201
#define LDUW       (INSN_OP(3) | INSN_OP3(0x00))
202
#define LDSW       (INSN_OP(3) | INSN_OP3(0x08))
203
#define LDX        (INSN_OP(3) | INSN_OP3(0x0b))
204
#define STB        (INSN_OP(3) | INSN_OP3(0x05))
205
#define STH        (INSN_OP(3) | INSN_OP3(0x06))
206
#define STW        (INSN_OP(3) | INSN_OP3(0x04))
207
#define STX        (INSN_OP(3) | INSN_OP3(0x0e))
208

    
209
static inline void tcg_out_mov(TCGContext *s, int ret, int arg)
210
{
211
    tcg_out32(s, ARITH_OR | INSN_RD(ret) | INSN_RS1(arg) |
212
              INSN_RS2(TCG_REG_G0));
213
}
214

    
215
static inline void tcg_out_movi(TCGContext *s, TCGType type,
216
                                int ret, tcg_target_long arg)
217
{
218
#if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
219
    if (arg != (arg & 0xffffffff))
220
        fprintf(stderr, "unimplemented %s with constant %ld\n", __func__, arg);
221
#endif
222
    if (arg == (arg & 0xfff))
223
        tcg_out32(s, ARITH_OR | INSN_RD(ret) | INSN_RS1(TCG_REG_G0) |
224
                  INSN_IMM13(arg));
225
    else {
226
        tcg_out32(s, SETHI | INSN_RD(ret) | ((arg & 0xfffffc00) >> 10));
227
        if (arg & 0x3ff)
228
            tcg_out32(s, ARITH_OR | INSN_RD(ret) | INSN_RS1(ret) |
229
                      INSN_IMM13(arg & 0x3ff));
230
    }
231
}
232

    
233
static inline void tcg_out_ld_raw(TCGContext *s, int ret,
234
                                  tcg_target_long arg)
235
{
236
    tcg_out32(s, SETHI | INSN_RD(ret) | (((uint32_t)arg & 0xfffffc00) >> 10));
237
    tcg_out32(s, LDUW | INSN_RD(ret) | INSN_RS1(ret) |
238
              INSN_IMM13(arg & 0x3ff));
239
}
240

    
241
static inline void tcg_out_ld_ptr(TCGContext *s, int ret,
242
                                  tcg_target_long arg)
243
{
244
#if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
245
    if (arg != (arg & 0xffffffff))
246
        fprintf(stderr, "unimplemented %s with offset %ld\n", __func__, arg);
247
    if (arg != (arg & 0xfff))
248
        tcg_out32(s, SETHI | INSN_RD(ret) | (((uint32_t)arg & 0xfffffc00) >> 10));
249
    tcg_out32(s, LDX | INSN_RD(ret) | INSN_RS1(ret) |
250
              INSN_IMM13(arg & 0x3ff));
251
#else
252
    tcg_out_ld_raw(s, ret, arg);
253
#endif
254
}
255

    
256
static inline void tcg_out_ldst(TCGContext *s, int ret, int addr, int offset, int op)
257
{
258
    if (offset == (offset & 0xfff))
259
        tcg_out32(s, op | INSN_RD(ret) | INSN_RS1(addr) |
260
                  INSN_IMM13(offset));
261
    else
262
        fprintf(stderr, "unimplemented %s with offset %d\n", __func__, offset);
263
}
264

    
265
static inline void tcg_out_ld(TCGContext *s, int ret,
266
                              int arg1, tcg_target_long arg2)
267
{
268
    fprintf(stderr, "unimplemented %s\n", __func__);
269
}
270

    
271
static inline void tcg_out_st(TCGContext *s, int arg,
272
                              int arg1, tcg_target_long arg2)
273
{
274
    fprintf(stderr, "unimplemented %s\n", __func__);
275
}
276

    
277
static inline void tcg_out_arith(TCGContext *s, int rd, int rs1, int rs2,
278
                                 int op)
279
{
280
    tcg_out32(s, op | INSN_RD(rd) | INSN_RS1(rs1) |
281
              INSN_RS2(rs2));
282
}
283

    
284
static inline void tcg_out_arithi(TCGContext *s, int rd, int rs1, int offset,
285
                                  int op)
286
{
287
    tcg_out32(s, op | INSN_RD(rd) | INSN_RS1(rs1) |
288
              INSN_IMM13(offset));
289
}
290

    
291
static inline void tcg_out_sety(TCGContext *s, tcg_target_long val)
292
{
293
    if (val == 0 || val == -1)
294
        tcg_out32(s, WRY | INSN_IMM13(val));
295
    else
296
        fprintf(stderr, "unimplemented sety %ld\n", (long)val);
297
}
298

    
299
static inline void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
300
{
301
    if (val != 0) {
302
        if (val == (val & 0xfff))
303
            tcg_out_arithi(s, reg, reg, val, ARITH_ADD);
304
        else
305
            fprintf(stderr, "unimplemented addi %ld\n", (long)val);
306
    }
307
}
308

    
309
static inline void tcg_out_nop(TCGContext *s)
310
{
311
    tcg_out32(s, SETHI | INSN_RD(TCG_REG_G0) | 0);
312
}
313

    
314
static inline void tcg_target_prologue(TCGContext *s)
315
{
316
    tcg_out32(s, SAVE | INSN_RD(TCG_REG_O6) | INSN_RS1(TCG_REG_O6) |
317
              INSN_IMM13(-TCG_TARGET_STACK_MINFRAME));
318
}
319

    
320
static inline void tcg_out_op(TCGContext *s, int opc, const TCGArg *args,
321
                              const int *const_args)
322
{
323
    int c;
324

    
325
    switch (opc) {
326
    case INDEX_op_exit_tb:
327
        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_I0, args[0]);
328
        tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I7) |
329
                  INSN_IMM13(8));
330
        tcg_out32(s, RESTORE | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_G0) |
331
                      INSN_RS2(TCG_REG_G0));
332
        break;
333
    case INDEX_op_goto_tb:
334
        if (s->tb_jmp_offset) {
335
            /* direct jump method */
336
            if (ABS(args[0] - (unsigned long)s->code_ptr) ==
337
                (ABS(args[0] - (unsigned long)s->code_ptr) & 0x1fffff)) {
338
                tcg_out32(s, BA |
339
                          INSN_OFF22(args[0] - (unsigned long)s->code_ptr));
340
            } else {
341
                tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_I5, args[0]);
342
                tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I5) |
343
                          INSN_RS2(TCG_REG_G0));
344
            }
345
            s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
346
        } else {
347
            /* indirect jump method */
348
            tcg_out_ld_ptr(s, TCG_REG_I5, (tcg_target_long)(s->tb_next + args[0]));
349
            tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I5) |
350
                      INSN_RS2(TCG_REG_G0));
351
        }
352
        tcg_out_nop(s);
353
        s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
354
        break;
355
    case INDEX_op_call:
356
        if (const_args[0]) {
357
            tcg_out32(s, CALL | ((((tcg_target_ulong)args[0]
358
                                  - (tcg_target_ulong)s->code_ptr) >> 2)
359
                                 & 0x3fffffff));
360
            tcg_out_nop(s);
361
        } else {
362
            tcg_out_ld_ptr(s, TCG_REG_O7, (tcg_target_long)(s->tb_next + args[0]));
363
            tcg_out32(s, JMPL | INSN_RD(TCG_REG_O7) | INSN_RS1(TCG_REG_O7) |
364
                      INSN_RS2(TCG_REG_G0));
365
            tcg_out_nop(s);
366
        }
367
        break;
368
    case INDEX_op_jmp:
369
        fprintf(stderr, "unimplemented jmp\n");
370
        break;
371
    case INDEX_op_br:
372
        fprintf(stderr, "unimplemented br\n");
373
        break;
374
    case INDEX_op_movi_i32:
375
        tcg_out_movi(s, TCG_TYPE_I32, args[0], (uint32_t)args[1]);
376
        break;
377

    
378
#if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
379
#define OP_32_64(x)                             \
380
        glue(glue(case INDEX_op_, x), _i32:)    \
381
        glue(glue(case INDEX_op_, x), _i64:)
382
#else
383
#define OP_32_64(x)                             \
384
        glue(glue(case INDEX_op_, x), _i32:)
385
#endif
386
        OP_32_64(ld8u);
387
        tcg_out_ldst(s, args[0], args[1], args[2], LDUB);
388
        break;
389
        OP_32_64(ld8s);
390
        tcg_out_ldst(s, args[0], args[1], args[2], LDSB);
391
        break;
392
        OP_32_64(ld16u);
393
        tcg_out_ldst(s, args[0], args[1], args[2], LDUH);
394
        break;
395
        OP_32_64(ld16s);
396
        tcg_out_ldst(s, args[0], args[1], args[2], LDSH);
397
        break;
398
    case INDEX_op_ld_i32:
399
#if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
400
    case INDEX_op_ld32u_i64:
401
#endif
402
        tcg_out_ldst(s, args[0], args[1], args[2], LDUW);
403
        break;
404
        OP_32_64(st8);
405
        tcg_out_ldst(s, args[0], args[1], args[2], STB);
406
        break;
407
        OP_32_64(st16);
408
        tcg_out_ldst(s, args[0], args[1], args[2], STH);
409
        break;
410
    case INDEX_op_st_i32:
411
#if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
412
    case INDEX_op_st32_i64:
413
#endif
414
        tcg_out_ldst(s, args[0], args[1], args[2], STW);
415
        break;
416
        OP_32_64(add);
417
        c = ARITH_ADD;
418
        goto gen_arith32;
419
        OP_32_64(sub);
420
        c = ARITH_SUB;
421
        goto gen_arith32;
422
        OP_32_64(and);
423
        c = ARITH_AND;
424
        goto gen_arith32;
425
        OP_32_64(or);
426
        c = ARITH_OR;
427
        goto gen_arith32;
428
        OP_32_64(xor);
429
        c = ARITH_XOR;
430
        goto gen_arith32;
431
    case INDEX_op_shl_i32:
432
        c = SHIFT_SLL;
433
        goto gen_arith32;
434
    case INDEX_op_shr_i32:
435
        c = SHIFT_SRL;
436
        goto gen_arith32;
437
    case INDEX_op_sar_i32:
438
        c = SHIFT_SRA;
439
        goto gen_arith32;
440
    case INDEX_op_mul_i32:
441
        c = ARITH_UMUL;
442
        goto gen_arith32;
443
    case INDEX_op_div2_i32:
444
#if defined(__sparc_v9__) || defined(__sparc_v8plus__)
445
        c = ARITH_SDIVX;
446
        goto gen_arith32;
447
#else
448
        tcg_out_sety(s, 0);
449
        c = ARITH_SDIV;
450
        goto gen_arith32;
451
#endif
452
    case INDEX_op_divu2_i32:
453
#if defined(__sparc_v9__) || defined(__sparc_v8plus__)
454
        c = ARITH_UDIVX;
455
        goto gen_arith32;
456
#else
457
        tcg_out_sety(s, 0);
458
        c = ARITH_UDIV;
459
        goto gen_arith32;
460
#endif
461

    
462
    case INDEX_op_brcond_i32:
463
        fprintf(stderr, "unimplemented brcond\n");
464
        break;
465

    
466
    case INDEX_op_qemu_ld8u:
467
        fprintf(stderr, "unimplemented qld\n");
468
        break;
469
    case INDEX_op_qemu_ld8s:
470
        fprintf(stderr, "unimplemented qld\n");
471
        break;
472
    case INDEX_op_qemu_ld16u:
473
        fprintf(stderr, "unimplemented qld\n");
474
        break;
475
    case INDEX_op_qemu_ld16s:
476
        fprintf(stderr, "unimplemented qld\n");
477
        break;
478
    case INDEX_op_qemu_ld32u:
479
        fprintf(stderr, "unimplemented qld\n");
480
        break;
481
    case INDEX_op_qemu_ld32s:
482
        fprintf(stderr, "unimplemented qld\n");
483
        break;
484
    case INDEX_op_qemu_st8:
485
        fprintf(stderr, "unimplemented qst\n");
486
        break;
487
    case INDEX_op_qemu_st16:
488
        fprintf(stderr, "unimplemented qst\n");
489
        break;
490
    case INDEX_op_qemu_st32:
491
        fprintf(stderr, "unimplemented qst\n");
492
        break;
493

    
494
#if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
495
    case INDEX_op_movi_i64:
496
        tcg_out_movi(s, TCG_TYPE_I64, args[0], args[1]);
497
        break;
498
    case INDEX_op_ld32s_i64:
499
        tcg_out_ldst(s, args[0], args[1], args[2], LDSW);
500
        break;
501
    case INDEX_op_ld_i64:
502
        tcg_out_ldst(s, args[0], args[1], args[2], LDX);
503
        break;
504
    case INDEX_op_st_i64:
505
        tcg_out_ldst(s, args[0], args[1], args[2], STX);
506
        break;
507
    case INDEX_op_shl_i64:
508
        c = SHIFT_SLLX;
509
        goto gen_arith32;
510
    case INDEX_op_shr_i64:
511
        c = SHIFT_SRLX;
512
        goto gen_arith32;
513
    case INDEX_op_sar_i64:
514
        c = SHIFT_SRAX;
515
        goto gen_arith32;
516
    case INDEX_op_mul_i64:
517
        c = ARITH_MULX;
518
        goto gen_arith32;
519
    case INDEX_op_div2_i64:
520
        c = ARITH_SDIVX;
521
        goto gen_arith32;
522
    case INDEX_op_divu2_i64:
523
        c = ARITH_UDIVX;
524
        goto gen_arith32;
525

    
526
    case INDEX_op_brcond_i64:
527
        fprintf(stderr, "unimplemented brcond\n");
528
        break;
529
    case INDEX_op_qemu_ld64:
530
        fprintf(stderr, "unimplemented qld\n");
531
        break;
532
    case INDEX_op_qemu_st64:
533
        fprintf(stderr, "unimplemented qst\n");
534
        break;
535

    
536
#endif
537
    gen_arith32:
538
        if (const_args[2]) {
539
            tcg_out_arithi(s, args[0], args[1], args[2], c);
540
        } else {
541
            tcg_out_arith(s, args[0], args[1], args[2], c);
542
        }
543
        break;
544

    
545
    default:
546
        fprintf(stderr, "unknown opcode 0x%x\n", opc);
547
        tcg_abort();
548
    }
549
}
550

    
551
static const TCGTargetOpDef sparc_op_defs[] = {
552
    { INDEX_op_exit_tb, { } },
553
    { INDEX_op_goto_tb, { } },
554
    { INDEX_op_call, { "ri" } },
555
    { INDEX_op_jmp, { "ri" } },
556
    { INDEX_op_br, { } },
557

    
558
    { INDEX_op_mov_i32, { "r", "r" } },
559
    { INDEX_op_movi_i32, { "r" } },
560
    { INDEX_op_ld8u_i32, { "r", "r" } },
561
    { INDEX_op_ld8s_i32, { "r", "r" } },
562
    { INDEX_op_ld16u_i32, { "r", "r" } },
563
    { INDEX_op_ld16s_i32, { "r", "r" } },
564
    { INDEX_op_ld_i32, { "r", "r" } },
565
    { INDEX_op_st8_i32, { "r", "r" } },
566
    { INDEX_op_st16_i32, { "r", "r" } },
567
    { INDEX_op_st_i32, { "r", "r" } },
568

    
569
    { INDEX_op_add_i32, { "r", "r", "rJ" } },
570
    { INDEX_op_mul_i32, { "r", "r", "rJ" } },
571
    { INDEX_op_div2_i32, { "r", "r", "0", "1", "r" } },
572
    { INDEX_op_divu2_i32, { "r", "r", "0", "1", "r" } },
573
    { INDEX_op_sub_i32, { "r", "r", "rJ" } },
574
    { INDEX_op_and_i32, { "r", "r", "rJ" } },
575
    { INDEX_op_or_i32, { "r", "r", "rJ" } },
576
    { INDEX_op_xor_i32, { "r", "r", "rJ" } },
577

    
578
    { INDEX_op_shl_i32, { "r", "r", "rJ" } },
579
    { INDEX_op_shr_i32, { "r", "r", "rJ" } },
580
    { INDEX_op_sar_i32, { "r", "r", "rJ" } },
581

    
582
    { INDEX_op_brcond_i32, { "r", "ri" } },
583

    
584
    { INDEX_op_qemu_ld8u, { "r", "L" } },
585
    { INDEX_op_qemu_ld8s, { "r", "L" } },
586
    { INDEX_op_qemu_ld16u, { "r", "L" } },
587
    { INDEX_op_qemu_ld16s, { "r", "L" } },
588
    { INDEX_op_qemu_ld32u, { "r", "L" } },
589
    { INDEX_op_qemu_ld32s, { "r", "L" } },
590

    
591
    { INDEX_op_qemu_st8, { "L", "L" } },
592
    { INDEX_op_qemu_st16, { "L", "L" } },
593
    { INDEX_op_qemu_st32, { "L", "L" } },
594

    
595
#if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
596
    { INDEX_op_mov_i64, { "r", "r" } },
597
    { INDEX_op_movi_i64, { "r" } },
598
    { INDEX_op_ld8u_i64, { "r", "r" } },
599
    { INDEX_op_ld8s_i64, { "r", "r" } },
600
    { INDEX_op_ld16u_i64, { "r", "r" } },
601
    { INDEX_op_ld16s_i64, { "r", "r" } },
602
    { INDEX_op_ld32u_i64, { "r", "r" } },
603
    { INDEX_op_ld32s_i64, { "r", "r" } },
604
    { INDEX_op_ld_i64, { "r", "r" } },
605
    { INDEX_op_st8_i64, { "r", "r" } },
606
    { INDEX_op_st16_i64, { "r", "r" } },
607
    { INDEX_op_st32_i64, { "r", "r" } },
608
    { INDEX_op_st_i64, { "r", "r" } },
609

    
610
    { INDEX_op_add_i64, { "r", "r", "rJ" } },
611
    { INDEX_op_mul_i64, { "r", "r", "rJ" } },
612
    { INDEX_op_div2_i64, { "r", "r", "0", "1", "r" } },
613
    { INDEX_op_divu2_i64, { "r", "r", "0", "1", "r" } },
614
    { INDEX_op_sub_i64, { "r", "r", "rJ" } },
615
    { INDEX_op_and_i64, { "r", "r", "rJ" } },
616
    { INDEX_op_or_i64, { "r", "r", "rJ" } },
617
    { INDEX_op_xor_i64, { "r", "r", "rJ" } },
618

    
619
    { INDEX_op_shl_i64, { "r", "r", "rJ" } },
620
    { INDEX_op_shr_i64, { "r", "r", "rJ" } },
621
    { INDEX_op_sar_i64, { "r", "r", "rJ" } },
622

    
623
    { INDEX_op_brcond_i64, { "r", "ri" } },
624
#endif
625
    { -1 },
626
};
627

    
628
void tcg_target_init(TCGContext *s)
629
{
630
    tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xffffffff);
631
#if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
632
    tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0, 0xffffffff);
633
#endif
634
    tcg_regset_set32(tcg_target_call_clobber_regs, 0,
635
                     (1 << TCG_REG_G1) |
636
                     (1 << TCG_REG_G2) |
637
                     (1 << TCG_REG_G3) |
638
                     (1 << TCG_REG_G4) |
639
                     (1 << TCG_REG_G5) |
640
                     (1 << TCG_REG_G6) |
641
                     (1 << TCG_REG_G7) |
642
                     (1 << TCG_REG_O0) |
643
                     (1 << TCG_REG_O1) |
644
                     (1 << TCG_REG_O2) |
645
                     (1 << TCG_REG_O3) |
646
                     (1 << TCG_REG_O4) |
647
                     (1 << TCG_REG_O5) |
648
                     (1 << TCG_REG_O7));
649

    
650
    tcg_regset_clear(s->reserved_regs);
651
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_G0);
652
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_I5); // for internal use
653
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_I6);
654
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_I7);
655
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_O6);
656
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_O7);
657
    tcg_add_target_add_op_defs(sparc_op_defs);
658
}