Statistics
| Branch: | Revision:

root / tcg / sparc / tcg-target.c @ c6f7e4fb

History | View | Annotate | Download (51.2 kB)

1
/*
2
 * Tiny Code Generator for QEMU
3
 *
4
 * Copyright (c) 2008 Fabrice Bellard
5
 *
6
 * Permission is hereby granted, free of charge, to any person obtaining a copy
7
 * of this software and associated documentation files (the "Software"), to deal
8
 * in the Software without restriction, including without limitation the rights
9
 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10
 * copies of the Software, and to permit persons to whom the Software is
11
 * furnished to do so, subject to the following conditions:
12
 *
13
 * The above copyright notice and this permission notice shall be included in
14
 * all copies or substantial portions of the Software.
15
 *
16
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19
 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21
 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22
 * THE SOFTWARE.
23
 */
24

    
25
#ifndef NDEBUG
26
static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
27
    "%g0",
28
    "%g1",
29
    "%g2",
30
    "%g3",
31
    "%g4",
32
    "%g5",
33
    "%g6",
34
    "%g7",
35
    "%o0",
36
    "%o1",
37
    "%o2",
38
    "%o3",
39
    "%o4",
40
    "%o5",
41
    "%o6",
42
    "%o7",
43
    "%l0",
44
    "%l1",
45
    "%l2",
46
    "%l3",
47
    "%l4",
48
    "%l5",
49
    "%l6",
50
    "%l7",
51
    "%i0",
52
    "%i1",
53
    "%i2",
54
    "%i3",
55
    "%i4",
56
    "%i5",
57
    "%i6",
58
    "%i7",
59
};
60
#endif
61

    
62
#ifdef CONFIG_USE_GUEST_BASE
63
# define TCG_GUEST_BASE_REG TCG_REG_I3
64
#else
65
# define TCG_GUEST_BASE_REG TCG_REG_G0
66
#endif
67

    
68
static const int tcg_target_reg_alloc_order[] = {
69
    TCG_REG_L0,
70
    TCG_REG_L1,
71
    TCG_REG_L2,
72
    TCG_REG_L3,
73
    TCG_REG_L4,
74
    TCG_REG_L5,
75
    TCG_REG_L6,
76
    TCG_REG_L7,
77
    TCG_REG_I0,
78
    TCG_REG_I1,
79
    TCG_REG_I2,
80
    TCG_REG_I3,
81
    TCG_REG_I4,
82
};
83

    
84
static const int tcg_target_call_iarg_regs[6] = {
85
    TCG_REG_O0,
86
    TCG_REG_O1,
87
    TCG_REG_O2,
88
    TCG_REG_O3,
89
    TCG_REG_O4,
90
    TCG_REG_O5,
91
};
92

    
93
static const int tcg_target_call_oarg_regs[] = {
94
    TCG_REG_O0,
95
    TCG_REG_O1,
96
    TCG_REG_O2,
97
    TCG_REG_O3,
98
};
99

    
100
static inline int check_fit_tl(tcg_target_long val, unsigned int bits)
101
{
102
    return (val << ((sizeof(tcg_target_long) * 8 - bits))
103
            >> (sizeof(tcg_target_long) * 8 - bits)) == val;
104
}
105

    
106
static inline int check_fit_i32(uint32_t val, unsigned int bits)
107
{
108
    return ((val << (32 - bits)) >> (32 - bits)) == val;
109
}
110

    
111
static void patch_reloc(uint8_t *code_ptr, int type,
112
                        tcg_target_long value, tcg_target_long addend)
113
{
114
    value += addend;
115
    switch (type) {
116
    case R_SPARC_32:
117
        if (value != (uint32_t)value)
118
            tcg_abort();
119
        *(uint32_t *)code_ptr = value;
120
        break;
121
    case R_SPARC_WDISP22:
122
        value -= (long)code_ptr;
123
        value >>= 2;
124
        if (!check_fit_tl(value, 22))
125
            tcg_abort();
126
        *(uint32_t *)code_ptr = ((*(uint32_t *)code_ptr) & ~0x3fffff) | value;
127
        break;
128
    case R_SPARC_WDISP19:
129
        value -= (long)code_ptr;
130
        value >>= 2;
131
        if (!check_fit_tl(value, 19))
132
            tcg_abort();
133
        *(uint32_t *)code_ptr = ((*(uint32_t *)code_ptr) & ~0x7ffff) | value;
134
        break;
135
    default:
136
        tcg_abort();
137
    }
138
}
139

    
140
/* maximum number of register used for input function arguments */
141
static inline int tcg_target_get_call_iarg_regs_count(int flags)
142
{
143
    return 6;
144
}
145

    
146
/* parse target specific constraints */
147
static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
148
{
149
    const char *ct_str;
150

    
151
    ct_str = *pct_str;
152
    switch (ct_str[0]) {
153
    case 'r':
154
        ct->ct |= TCG_CT_REG;
155
        tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
156
        break;
157
    case 'L': /* qemu_ld/st constraint */
158
        ct->ct |= TCG_CT_REG;
159
        tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
160
        // Helper args
161
        tcg_regset_reset_reg(ct->u.regs, TCG_REG_O0);
162
        tcg_regset_reset_reg(ct->u.regs, TCG_REG_O1);
163
        tcg_regset_reset_reg(ct->u.regs, TCG_REG_O2);
164
        tcg_regset_reset_reg(ct->u.regs, TCG_REG_O3);
165
        break;
166
    case 'I':
167
        ct->ct |= TCG_CT_CONST_S11;
168
        break;
169
    case 'J':
170
        ct->ct |= TCG_CT_CONST_S13;
171
        break;
172
    default:
173
        return -1;
174
    }
175
    ct_str++;
176
    *pct_str = ct_str;
177
    return 0;
178
}
179

    
180
/* test if a constant matches the constraint */
181
static inline int tcg_target_const_match(tcg_target_long val,
182
                                         const TCGArgConstraint *arg_ct)
183
{
184
    int ct;
185

    
186
    ct = arg_ct->ct;
187
    if (ct & TCG_CT_CONST)
188
        return 1;
189
    else if ((ct & TCG_CT_CONST_S11) && check_fit_tl(val, 11))
190
        return 1;
191
    else if ((ct & TCG_CT_CONST_S13) && check_fit_tl(val, 13))
192
        return 1;
193
    else
194
        return 0;
195
}
196

    
197
#define INSN_OP(x)  ((x) << 30)
198
#define INSN_OP2(x) ((x) << 22)
199
#define INSN_OP3(x) ((x) << 19)
200
#define INSN_OPF(x) ((x) << 5)
201
#define INSN_RD(x)  ((x) << 25)
202
#define INSN_RS1(x) ((x) << 14)
203
#define INSN_RS2(x) (x)
204
#define INSN_ASI(x) ((x) << 5)
205

    
206
#define INSN_IMM11(x) ((1 << 13) | ((x) & 0x7ff))
207
#define INSN_IMM13(x) ((1 << 13) | ((x) & 0x1fff))
208
#define INSN_OFF19(x) (((x) >> 2) & 0x07ffff)
209
#define INSN_OFF22(x) (((x) >> 2) & 0x3fffff)
210

    
211
#define INSN_COND(x, a) (((x) << 25) | ((a) << 29))
212
#define COND_N     0x0
213
#define COND_E     0x1
214
#define COND_LE    0x2
215
#define COND_L     0x3
216
#define COND_LEU   0x4
217
#define COND_CS    0x5
218
#define COND_NEG   0x6
219
#define COND_VS    0x7
220
#define COND_A     0x8
221
#define COND_NE    0x9
222
#define COND_G     0xa
223
#define COND_GE    0xb
224
#define COND_GU    0xc
225
#define COND_CC    0xd
226
#define COND_POS   0xe
227
#define COND_VC    0xf
228
#define BA         (INSN_OP(0) | INSN_COND(COND_A, 0) | INSN_OP2(0x2))
229

    
230
#define MOVCC_ICC  (1 << 18)
231
#define MOVCC_XCC  (1 << 18 | 1 << 12)
232

    
233
#define ARITH_ADD  (INSN_OP(2) | INSN_OP3(0x00))
234
#define ARITH_ADDCC (INSN_OP(2) | INSN_OP3(0x10))
235
#define ARITH_AND  (INSN_OP(2) | INSN_OP3(0x01))
236
#define ARITH_ANDN (INSN_OP(2) | INSN_OP3(0x05))
237
#define ARITH_OR   (INSN_OP(2) | INSN_OP3(0x02))
238
#define ARITH_ORCC (INSN_OP(2) | INSN_OP3(0x12))
239
#define ARITH_ORN  (INSN_OP(2) | INSN_OP3(0x06))
240
#define ARITH_XOR  (INSN_OP(2) | INSN_OP3(0x03))
241
#define ARITH_SUB  (INSN_OP(2) | INSN_OP3(0x04))
242
#define ARITH_SUBCC (INSN_OP(2) | INSN_OP3(0x14))
243
#define ARITH_ADDX (INSN_OP(2) | INSN_OP3(0x08))
244
#define ARITH_SUBX (INSN_OP(2) | INSN_OP3(0x0c))
245
#define ARITH_UMUL (INSN_OP(2) | INSN_OP3(0x0a))
246
#define ARITH_UDIV (INSN_OP(2) | INSN_OP3(0x0e))
247
#define ARITH_SDIV (INSN_OP(2) | INSN_OP3(0x0f))
248
#define ARITH_MULX (INSN_OP(2) | INSN_OP3(0x09))
249
#define ARITH_UDIVX (INSN_OP(2) | INSN_OP3(0x0d))
250
#define ARITH_SDIVX (INSN_OP(2) | INSN_OP3(0x2d))
251
#define ARITH_MOVCC (INSN_OP(2) | INSN_OP3(0x2c))
252

    
253
#define SHIFT_SLL  (INSN_OP(2) | INSN_OP3(0x25))
254
#define SHIFT_SRL  (INSN_OP(2) | INSN_OP3(0x26))
255
#define SHIFT_SRA  (INSN_OP(2) | INSN_OP3(0x27))
256

    
257
#define SHIFT_SLLX (INSN_OP(2) | INSN_OP3(0x25) | (1 << 12))
258
#define SHIFT_SRLX (INSN_OP(2) | INSN_OP3(0x26) | (1 << 12))
259
#define SHIFT_SRAX (INSN_OP(2) | INSN_OP3(0x27) | (1 << 12))
260

    
261
#define RDY        (INSN_OP(2) | INSN_OP3(0x28) | INSN_RS1(0))
262
#define WRY        (INSN_OP(2) | INSN_OP3(0x30) | INSN_RD(0))
263
#define JMPL       (INSN_OP(2) | INSN_OP3(0x38))
264
#define SAVE       (INSN_OP(2) | INSN_OP3(0x3c))
265
#define RESTORE    (INSN_OP(2) | INSN_OP3(0x3d))
266
#define SETHI      (INSN_OP(0) | INSN_OP2(0x4))
267
#define CALL       INSN_OP(1)
268
#define LDUB       (INSN_OP(3) | INSN_OP3(0x01))
269
#define LDSB       (INSN_OP(3) | INSN_OP3(0x09))
270
#define LDUH       (INSN_OP(3) | INSN_OP3(0x02))
271
#define LDSH       (INSN_OP(3) | INSN_OP3(0x0a))
272
#define LDUW       (INSN_OP(3) | INSN_OP3(0x00))
273
#define LDSW       (INSN_OP(3) | INSN_OP3(0x08))
274
#define LDX        (INSN_OP(3) | INSN_OP3(0x0b))
275
#define STB        (INSN_OP(3) | INSN_OP3(0x05))
276
#define STH        (INSN_OP(3) | INSN_OP3(0x06))
277
#define STW        (INSN_OP(3) | INSN_OP3(0x04))
278
#define STX        (INSN_OP(3) | INSN_OP3(0x0e))
279
#define LDUBA      (INSN_OP(3) | INSN_OP3(0x11))
280
#define LDSBA      (INSN_OP(3) | INSN_OP3(0x19))
281
#define LDUHA      (INSN_OP(3) | INSN_OP3(0x12))
282
#define LDSHA      (INSN_OP(3) | INSN_OP3(0x1a))
283
#define LDUWA      (INSN_OP(3) | INSN_OP3(0x10))
284
#define LDSWA      (INSN_OP(3) | INSN_OP3(0x18))
285
#define LDXA       (INSN_OP(3) | INSN_OP3(0x1b))
286
#define STBA       (INSN_OP(3) | INSN_OP3(0x15))
287
#define STHA       (INSN_OP(3) | INSN_OP3(0x16))
288
#define STWA       (INSN_OP(3) | INSN_OP3(0x14))
289
#define STXA       (INSN_OP(3) | INSN_OP3(0x1e))
290

    
291
#ifndef ASI_PRIMARY_LITTLE
292
#define ASI_PRIMARY_LITTLE 0x88
293
#endif
294

    
295
#define LDUH_LE    (LDUHA | INSN_ASI(ASI_PRIMARY_LITTLE))
296
#define LDSH_LE    (LDSHA | INSN_ASI(ASI_PRIMARY_LITTLE))
297
#define LDUW_LE    (LDUWA | INSN_ASI(ASI_PRIMARY_LITTLE))
298
#define LDSW_LE    (LDSWA | INSN_ASI(ASI_PRIMARY_LITTLE))
299
#define LDX_LE     (LDXA  | INSN_ASI(ASI_PRIMARY_LITTLE))
300

    
301
#define STH_LE     (STHA  | INSN_ASI(ASI_PRIMARY_LITTLE))
302
#define STW_LE     (STWA  | INSN_ASI(ASI_PRIMARY_LITTLE))
303
#define STX_LE     (STXA  | INSN_ASI(ASI_PRIMARY_LITTLE))
304

    
305
static inline void tcg_out_arith(TCGContext *s, int rd, int rs1, int rs2,
306
                                 int op)
307
{
308
    tcg_out32(s, op | INSN_RD(rd) | INSN_RS1(rs1) |
309
              INSN_RS2(rs2));
310
}
311

    
312
static inline void tcg_out_arithi(TCGContext *s, int rd, int rs1,
313
                                  uint32_t offset, int op)
314
{
315
    tcg_out32(s, op | INSN_RD(rd) | INSN_RS1(rs1) |
316
              INSN_IMM13(offset));
317
}
318

    
319
static void tcg_out_arithc(TCGContext *s, int rd, int rs1,
320
                           int val2, int val2const, int op)
321
{
322
    tcg_out32(s, op | INSN_RD(rd) | INSN_RS1(rs1)
323
              | (val2const ? INSN_IMM13(val2) : INSN_RS2(val2)));
324
}
325

    
326
static inline void tcg_out_mov(TCGContext *s, TCGType type,
327
                               TCGReg ret, TCGReg arg)
328
{
329
    tcg_out_arith(s, ret, arg, TCG_REG_G0, ARITH_OR);
330
}
331

    
332
static inline void tcg_out_sethi(TCGContext *s, int ret, uint32_t arg)
333
{
334
    tcg_out32(s, SETHI | INSN_RD(ret) | ((arg & 0xfffffc00) >> 10));
335
}
336

    
337
static inline void tcg_out_movi_imm13(TCGContext *s, int ret, uint32_t arg)
338
{
339
    tcg_out_arithi(s, ret, TCG_REG_G0, arg, ARITH_OR);
340
}
341

    
342
static inline void tcg_out_movi_imm32(TCGContext *s, int ret, uint32_t arg)
343
{
344
    if (check_fit_tl(arg, 13))
345
        tcg_out_movi_imm13(s, ret, arg);
346
    else {
347
        tcg_out_sethi(s, ret, arg);
348
        if (arg & 0x3ff)
349
            tcg_out_arithi(s, ret, ret, arg & 0x3ff, ARITH_OR);
350
    }
351
}
352

    
353
static inline void tcg_out_movi(TCGContext *s, TCGType type,
354
                                TCGReg ret, tcg_target_long arg)
355
{
356
    /* All 32-bit constants, as well as 64-bit constants with
357
       no high bits set go through movi_imm32.  */
358
    if (TCG_TARGET_REG_BITS == 32
359
        || type == TCG_TYPE_I32
360
        || (arg & ~(tcg_target_long)0xffffffff) == 0) {
361
        tcg_out_movi_imm32(s, ret, arg);
362
    } else if (check_fit_tl(arg, 13)) {
363
        /* A 13-bit constant sign-extended to 64-bits.  */
364
        tcg_out_movi_imm13(s, ret, arg);
365
    } else if (check_fit_tl(arg, 32)) {
366
        /* A 32-bit constant sign-extended to 64-bits.  */
367
        tcg_out_sethi(s, ret, ~arg);
368
        tcg_out_arithi(s, ret, ret, (arg & 0x3ff) | -0x400, ARITH_XOR);
369
    } else {
370
        tcg_out_movi_imm32(s, TCG_REG_I4, arg >> (TCG_TARGET_REG_BITS / 2));
371
        tcg_out_arithi(s, TCG_REG_I4, TCG_REG_I4, 32, SHIFT_SLLX);
372
        tcg_out_movi_imm32(s, ret, arg);
373
        tcg_out_arith(s, ret, ret, TCG_REG_I4, ARITH_OR);
374
    }
375
}
376

    
377
static inline void tcg_out_ldst_rr(TCGContext *s, int data, int a1,
378
                                   int a2, int op)
379
{
380
    tcg_out32(s, op | INSN_RD(data) | INSN_RS1(a1) | INSN_RS2(a2));
381
}
382

    
383
static inline void tcg_out_ldst(TCGContext *s, int ret, int addr,
384
                                int offset, int op)
385
{
386
    if (check_fit_tl(offset, 13)) {
387
        tcg_out32(s, op | INSN_RD(ret) | INSN_RS1(addr) |
388
                  INSN_IMM13(offset));
389
    } else {
390
        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_I5, offset);
391
        tcg_out_ldst_rr(s, ret, addr, TCG_REG_I5, op);
392
    }
393
}
394

    
395
static inline void tcg_out_ld(TCGContext *s, TCGType type, TCGReg ret,
396
                              TCGReg arg1, tcg_target_long arg2)
397
{
398
    tcg_out_ldst(s, ret, arg1, arg2, (type == TCG_TYPE_I32 ? LDUW : LDX));
399
}
400

    
401
static inline void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg,
402
                              TCGReg arg1, tcg_target_long arg2)
403
{
404
    tcg_out_ldst(s, arg, arg1, arg2, (type == TCG_TYPE_I32 ? STW : STX));
405
}
406

    
407
static inline void tcg_out_ld_ptr(TCGContext *s, int ret,
408
                                  tcg_target_long arg)
409
{
410
    if (!check_fit_tl(arg, 10)) {
411
        tcg_out_movi(s, TCG_TYPE_PTR, ret, arg & ~0x3ff);
412
    }
413
    tcg_out_ld(s, TCG_TYPE_PTR, ret, ret, arg & 0x3ff);
414
}
415

    
416
static inline void tcg_out_sety(TCGContext *s, int rs)
417
{
418
    tcg_out32(s, WRY | INSN_RS1(TCG_REG_G0) | INSN_RS2(rs));
419
}
420

    
421
static inline void tcg_out_rdy(TCGContext *s, int rd)
422
{
423
    tcg_out32(s, RDY | INSN_RD(rd));
424
}
425

    
426
static inline void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
427
{
428
    if (val != 0) {
429
        if (check_fit_tl(val, 13))
430
            tcg_out_arithi(s, reg, reg, val, ARITH_ADD);
431
        else {
432
            tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_I5, val);
433
            tcg_out_arith(s, reg, reg, TCG_REG_I5, ARITH_ADD);
434
        }
435
    }
436
}
437

    
438
static inline void tcg_out_andi(TCGContext *s, int rd, int rs,
439
                                tcg_target_long val)
440
{
441
    if (val != 0) {
442
        if (check_fit_tl(val, 13))
443
            tcg_out_arithi(s, rd, rs, val, ARITH_AND);
444
        else {
445
            tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_I5, val);
446
            tcg_out_arith(s, rd, rs, TCG_REG_I5, ARITH_AND);
447
        }
448
    }
449
}
450

    
451
static void tcg_out_div32(TCGContext *s, int rd, int rs1,
452
                          int val2, int val2const, int uns)
453
{
454
    /* Load Y with the sign/zero extension of RS1 to 64-bits.  */
455
    if (uns) {
456
        tcg_out_sety(s, TCG_REG_G0);
457
    } else {
458
        tcg_out_arithi(s, TCG_REG_I5, rs1, 31, SHIFT_SRA);
459
        tcg_out_sety(s, TCG_REG_I5);
460
    }
461

    
462
    tcg_out_arithc(s, rd, rs1, val2, val2const,
463
                   uns ? ARITH_UDIV : ARITH_SDIV);
464
}
465

    
466
static inline void tcg_out_nop(TCGContext *s)
467
{
468
    tcg_out_sethi(s, TCG_REG_G0, 0);
469
}
470

    
471
static void tcg_out_branch_i32(TCGContext *s, int opc, int label_index)
472
{
473
    TCGLabel *l = &s->labels[label_index];
474

    
475
    if (l->has_value) {
476
        tcg_out32(s, (INSN_OP(0) | INSN_COND(opc, 0) | INSN_OP2(0x2)
477
                      | INSN_OFF22(l->u.value - (unsigned long)s->code_ptr)));
478
    } else {
479
        tcg_out_reloc(s, s->code_ptr, R_SPARC_WDISP22, label_index, 0);
480
        tcg_out32(s, (INSN_OP(0) | INSN_COND(opc, 0) | INSN_OP2(0x2) | 0));
481
    }
482
}
483

    
484
#if TCG_TARGET_REG_BITS == 64
485
static void tcg_out_branch_i64(TCGContext *s, int opc, int label_index)
486
{
487
    TCGLabel *l = &s->labels[label_index];
488

    
489
    if (l->has_value) {
490
        tcg_out32(s, (INSN_OP(0) | INSN_COND(opc, 0) | INSN_OP2(0x1) |
491
                      (0x5 << 19) |
492
                      INSN_OFF19(l->u.value - (unsigned long)s->code_ptr)));
493
    } else {
494
        tcg_out_reloc(s, s->code_ptr, R_SPARC_WDISP19, label_index, 0);
495
        tcg_out32(s, (INSN_OP(0) | INSN_COND(opc, 0) | INSN_OP2(0x1) |
496
                      (0x5 << 19) | 0));
497
    }
498
}
499
#endif
500

    
501
static const uint8_t tcg_cond_to_bcond[10] = {
502
    [TCG_COND_EQ] = COND_E,
503
    [TCG_COND_NE] = COND_NE,
504
    [TCG_COND_LT] = COND_L,
505
    [TCG_COND_GE] = COND_GE,
506
    [TCG_COND_LE] = COND_LE,
507
    [TCG_COND_GT] = COND_G,
508
    [TCG_COND_LTU] = COND_CS,
509
    [TCG_COND_GEU] = COND_CC,
510
    [TCG_COND_LEU] = COND_LEU,
511
    [TCG_COND_GTU] = COND_GU,
512
};
513

    
514
static void tcg_out_cmp(TCGContext *s, TCGArg c1, TCGArg c2, int c2const)
515
{
516
    tcg_out_arithc(s, TCG_REG_G0, c1, c2, c2const, ARITH_SUBCC);
517
}
518

    
519
static void tcg_out_brcond_i32(TCGContext *s, TCGCond cond,
520
                               TCGArg arg1, TCGArg arg2, int const_arg2,
521
                               int label_index)
522
{
523
    tcg_out_cmp(s, arg1, arg2, const_arg2);
524
    tcg_out_branch_i32(s, tcg_cond_to_bcond[cond], label_index);
525
    tcg_out_nop(s);
526
}
527

    
528
#if TCG_TARGET_REG_BITS == 64
529
static void tcg_out_brcond_i64(TCGContext *s, TCGCond cond,
530
                               TCGArg arg1, TCGArg arg2, int const_arg2,
531
                               int label_index)
532
{
533
    tcg_out_cmp(s, arg1, arg2, const_arg2);
534
    tcg_out_branch_i64(s, tcg_cond_to_bcond[cond], label_index);
535
    tcg_out_nop(s);
536
}
537
#else
538
static void tcg_out_brcond2_i32(TCGContext *s, TCGCond cond,
539
                                TCGArg al, TCGArg ah,
540
                                TCGArg bl, int blconst,
541
                                TCGArg bh, int bhconst, int label_dest)
542
{
543
    int cc, label_next = gen_new_label();
544

    
545
    tcg_out_cmp(s, ah, bh, bhconst);
546

    
547
    /* Note that we fill one of the delay slots with the second compare.  */
548
    switch (cond) {
549
    case TCG_COND_EQ:
550
        cc = INSN_COND(tcg_cond_to_bcond[TCG_COND_NE], 0);
551
        tcg_out_branch_i32(s, cc, label_next);
552
        tcg_out_cmp(s, al, bl, blconst);
553
        cc = INSN_COND(tcg_cond_to_bcond[TCG_COND_EQ], 0);
554
        tcg_out_branch_i32(s, cc, label_dest);
555
        break;
556

    
557
    case TCG_COND_NE:
558
        cc = INSN_COND(tcg_cond_to_bcond[TCG_COND_NE], 0);
559
        tcg_out_branch_i32(s, cc, label_dest);
560
        tcg_out_cmp(s, al, bl, blconst);
561
        tcg_out_branch_i32(s, cc, label_dest);
562
        break;
563

    
564
    default:
565
        /* ??? One could fairly easily special-case 64-bit unsigned
566
           compares against 32-bit zero-extended constants.  For instance,
567
           we know that (unsigned)AH < 0 is false and need not emit it.
568
           Similarly, (unsigned)AH > 0 being true implies AH != 0, so the
569
           second branch will never be taken.  */
570
        cc = INSN_COND(tcg_cond_to_bcond[cond], 0);
571
        tcg_out_branch_i32(s, cc, label_dest);
572
        tcg_out_nop(s);
573
        cc = INSN_COND(tcg_cond_to_bcond[TCG_COND_NE], 0);
574
        tcg_out_branch_i32(s, cc, label_next);
575
        tcg_out_cmp(s, al, bl, blconst);
576
        cc = INSN_COND(tcg_cond_to_bcond[tcg_unsigned_cond(cond)], 0);
577
        tcg_out_branch_i32(s, cc, label_dest);
578
        break;
579
    }
580
    tcg_out_nop(s);
581

    
582
    tcg_out_label(s, label_next, s->code_ptr);
583
}
584
#endif
585

    
586
static void tcg_out_setcond_i32(TCGContext *s, TCGCond cond, TCGArg ret,
587
                                TCGArg c1, TCGArg c2, int c2const)
588
{
589
    TCGArg t;
590

    
591
    /* For 32-bit comparisons, we can play games with ADDX/SUBX.  */
592
    switch (cond) {
593
    case TCG_COND_EQ:
594
    case TCG_COND_NE:
595
        if (c2 != 0) {
596
            tcg_out_arithc(s, ret, c1, c2, c2const, ARITH_XOR);
597
        }
598
        c1 = TCG_REG_G0, c2 = ret, c2const = 0;
599
        cond = (cond == TCG_COND_EQ ? TCG_COND_LEU : TCG_COND_LTU);
600
        break;
601

    
602
    case TCG_COND_GTU:
603
    case TCG_COND_GEU:
604
        if (c2const && c2 != 0) {
605
            tcg_out_movi_imm13(s, TCG_REG_I5, c2);
606
            c2 = TCG_REG_I5;
607
        }
608
        t = c1, c1 = c2, c2 = t, c2const = 0;
609
        cond = tcg_swap_cond(cond);
610
        break;
611

    
612
    case TCG_COND_LTU:
613
    case TCG_COND_LEU:
614
        break;
615

    
616
    default:
617
        tcg_out_cmp(s, c1, c2, c2const);
618
        tcg_out_movi_imm13(s, ret, 0);
619
        tcg_out32(s, ARITH_MOVCC | INSN_RD(ret)
620
                  | INSN_RS1(tcg_cond_to_bcond[cond])
621
                  | MOVCC_ICC | INSN_IMM11(1));
622
        return;
623
    }
624

    
625
    tcg_out_cmp(s, c1, c2, c2const);
626
    if (cond == TCG_COND_LTU) {
627
        tcg_out_arithi(s, ret, TCG_REG_G0, 0, ARITH_ADDX);
628
    } else {
629
        tcg_out_arithi(s, ret, TCG_REG_G0, -1, ARITH_SUBX);
630
    }
631
}
632

    
633
#if TCG_TARGET_REG_BITS == 64
634
static void tcg_out_setcond_i64(TCGContext *s, TCGCond cond, TCGArg ret,
635
                                TCGArg c1, TCGArg c2, int c2const)
636
{
637
    tcg_out_cmp(s, c1, c2, c2const);
638
    tcg_out_movi_imm13(s, ret, 0);
639
    tcg_out32 (s, ARITH_MOVCC | INSN_RD(ret)
640
               | INSN_RS1(tcg_cond_to_bcond[cond])
641
               | MOVCC_XCC | INSN_IMM11(1));
642
}
643
#else
644
static void tcg_out_setcond2_i32(TCGContext *s, TCGCond cond, TCGArg ret,
645
                                 TCGArg al, TCGArg ah,
646
                                 TCGArg bl, int blconst,
647
                                 TCGArg bh, int bhconst)
648
{
649
    int lab;
650

    
651
    switch (cond) {
652
    case TCG_COND_EQ:
653
        tcg_out_setcond_i32(s, TCG_COND_EQ, TCG_REG_I5, al, bl, blconst);
654
        tcg_out_setcond_i32(s, TCG_COND_EQ, ret, ah, bh, bhconst);
655
        tcg_out_arith(s, ret, ret, TCG_REG_I5, ARITH_AND);
656
        break;
657

    
658
    case TCG_COND_NE:
659
        tcg_out_setcond_i32(s, TCG_COND_NE, TCG_REG_I5, al, al, blconst);
660
        tcg_out_setcond_i32(s, TCG_COND_NE, ret, ah, bh, bhconst);
661
        tcg_out_arith(s, ret, ret, TCG_REG_I5, ARITH_OR);
662
        break;
663

    
664
    default:
665
        lab = gen_new_label();
666

    
667
        tcg_out_cmp(s, ah, bh, bhconst);
668
        tcg_out_branch_i32(s, INSN_COND(tcg_cond_to_bcond[cond], 1), lab);
669
        tcg_out_movi_imm13(s, ret, 1);
670
        tcg_out_branch_i32(s, INSN_COND(COND_NE, 1), lab);
671
        tcg_out_movi_imm13(s, ret, 0);
672

    
673
        tcg_out_setcond_i32(s, tcg_unsigned_cond(cond), ret, al, bl, blconst);
674

    
675
        tcg_out_label(s, lab, s->code_ptr);
676
        break;
677
    }
678
}
679
#endif
680

    
681
/* Generate global QEMU prologue and epilogue code */
682
static void tcg_target_qemu_prologue(TCGContext *s)
683
{
684
    tcg_set_frame(s, TCG_REG_I6, TCG_TARGET_CALL_STACK_OFFSET,
685
                  CPU_TEMP_BUF_NLONGS * (int)sizeof(long));
686
    tcg_out32(s, SAVE | INSN_RD(TCG_REG_O6) | INSN_RS1(TCG_REG_O6) |
687
              INSN_IMM13(-(TCG_TARGET_STACK_MINFRAME +
688
                           CPU_TEMP_BUF_NLONGS * (int)sizeof(long))));
689

    
690
#ifdef CONFIG_USE_GUEST_BASE
691
    if (GUEST_BASE != 0) {
692
        tcg_out_movi(s, TCG_TYPE_PTR, TCG_GUEST_BASE_REG, GUEST_BASE);
693
        tcg_regset_set_reg(s->reserved_regs, TCG_GUEST_BASE_REG);
694
    }
695
#endif
696

    
697
    tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I1) |
698
              INSN_RS2(TCG_REG_G0));
699
    tcg_out_mov(s, TCG_TYPE_PTR, TCG_AREG0, TCG_REG_I0);
700
}
701

    
702
#if defined(CONFIG_SOFTMMU)
703

    
704
#include "../../softmmu_defs.h"
705

    
706
/* helper signature: helper_ld_mmu(CPUState *env, target_ulong addr,
707
   int mmu_idx) */
708
static const void * const qemu_ld_helpers[4] = {
709
    helper_ldb_mmu,
710
    helper_ldw_mmu,
711
    helper_ldl_mmu,
712
    helper_ldq_mmu,
713
};
714

    
715
/* helper signature: helper_st_mmu(CPUState *env, target_ulong addr,
716
   uintxx_t val, int mmu_idx) */
717
static const void * const qemu_st_helpers[4] = {
718
    helper_stb_mmu,
719
    helper_stw_mmu,
720
    helper_stl_mmu,
721
    helper_stq_mmu,
722
};
723

    
724
/* Perform the TLB load and compare.
725

726
   Inputs:
727
   ADDRLO_IDX contains the index into ARGS of the low part of the
728
   address; the high part of the address is at ADDR_LOW_IDX+1.
729

730
   MEM_INDEX and S_BITS are the memory context and log2 size of the load.
731

732
   WHICH is the offset into the CPUTLBEntry structure of the slot to read.
733
   This should be offsetof addr_read or addr_write.
734

735
   The result of the TLB comparison is in %[ix]cc.  The sanitized address
736
   is in the returned register, maybe %o0.  The TLB addend is in %o1.  */
737

    
738
static int tcg_out_tlb_load(TCGContext *s, int addrlo_idx, int mem_index,
739
                            int s_bits, const TCGArg *args, int which)
740
{
741
    const int addrlo = args[addrlo_idx];
742
    const int r0 = TCG_REG_O0;
743
    const int r1 = TCG_REG_O1;
744
    const int r2 = TCG_REG_O2;
745
    int addr = addrlo;
746
    int tlb_ofs;
747

    
748
    if (TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 64) {
749
        /* Assemble the 64-bit address in R0.  */
750
        tcg_out_arithi(s, r0, addrlo, 0, SHIFT_SRL);
751
        tcg_out_arithi(s, r1, args[addrlo_idx + 1], 32, SHIFT_SLLX);
752
        tcg_out_arith(s, r0, r0, r1, ARITH_OR);
753
    }
754

    
755
    /* Shift the page number down to tlb-entry.  */
756
    tcg_out_arithi(s, r1, addrlo,
757
                   TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS, SHIFT_SRL);
758

    
759
    /* Mask out the page offset, except for the required alignment.  */
760
    tcg_out_andi(s, r0, addr, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
761

    
762
    /* Compute tlb index, modulo tlb size.  */
763
    tcg_out_andi(s, r1, r1, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
764

    
765
    /* Relative to the current ENV.  */
766
    tcg_out_arith(s, r1, TCG_AREG0, r1, ARITH_ADD);
767

    
768
    /* Find a base address that can load both tlb comparator and addend.  */
769
    tlb_ofs = offsetof(CPUArchState, tlb_table[mem_index][0]);
770
    if (!check_fit_tl(tlb_ofs + sizeof(CPUTLBEntry), 13)) {
771
        tcg_out_addi(s, r1, tlb_ofs);
772
        tlb_ofs = 0;
773
    }
774

    
775
    /* Load the tlb comparator and the addend.  */
776
    tcg_out_ld(s, TCG_TYPE_TL, r2, r1, tlb_ofs + which);
777
    tcg_out_ld(s, TCG_TYPE_PTR, r1, r1, tlb_ofs+offsetof(CPUTLBEntry, addend));
778

    
779
    /* subcc arg0, arg2, %g0 */
780
    tcg_out_cmp(s, r0, r2, 0);
781

    
782
    /* If the guest address must be zero-extended, do so now.  */
783
    if (TCG_TARGET_REG_BITS == 64 && TARGET_LONG_BITS == 32) {
784
        tcg_out_arithi(s, r0, addrlo, 0, SHIFT_SRL);
785
        return r0;
786
    }
787
    return addrlo;
788
}
789
#endif /* CONFIG_SOFTMMU */
790

    
791
static const int qemu_ld_opc[8] = {
792
#ifdef TARGET_WORDS_BIGENDIAN
793
    LDUB, LDUH, LDUW, LDX, LDSB, LDSH, LDSW, LDX
794
#else
795
    LDUB, LDUH_LE, LDUW_LE, LDX_LE, LDSB, LDSH_LE, LDSW_LE, LDX_LE
796
#endif
797
};
798

    
799
static const int qemu_st_opc[4] = {
800
#ifdef TARGET_WORDS_BIGENDIAN
801
    STB, STH, STW, STX
802
#else
803
    STB, STH_LE, STW_LE, STX_LE
804
#endif
805
};
806

    
807
static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args, int sizeop)
808
{
809
    int addrlo_idx = 1, datalo, datahi, addr_reg;
810
#if defined(CONFIG_SOFTMMU)
811
    int memi_idx, memi, s_bits, n;
812
    uint32_t *label_ptr[2];
813
#endif
814

    
815
    datahi = datalo = args[0];
816
    if (TCG_TARGET_REG_BITS == 32 && sizeop == 3) {
817
        datahi = args[1];
818
        addrlo_idx = 2;
819
    }
820

    
821
#if defined(CONFIG_SOFTMMU)
822
    memi_idx = addrlo_idx + 1 + (TARGET_LONG_BITS > TCG_TARGET_REG_BITS);
823
    memi = args[memi_idx];
824
    s_bits = sizeop & 3;
825

    
826
    addr_reg = tcg_out_tlb_load(s, addrlo_idx, memi, s_bits, args,
827
                                offsetof(CPUTLBEntry, addr_read));
828

    
829
    if (TCG_TARGET_REG_BITS == 32 && sizeop == 3) {
830
        int reg64;
831

    
832
        /* bne,pn %[xi]cc, label0 */
833
        label_ptr[0] = (uint32_t *)s->code_ptr;
834
        tcg_out32(s, (INSN_OP(0) | INSN_COND(COND_NE, 0) | INSN_OP2(0x1)
835
                      | ((TARGET_LONG_BITS == 64) << 21)));
836

    
837
        /* TLB Hit.  */
838
        /* Load all 64-bits into an O/G register.  */
839
        reg64 = (datalo < 16 ? datalo : TCG_REG_O0);
840
        tcg_out_ldst_rr(s, reg64, addr_reg, TCG_REG_O1, qemu_ld_opc[sizeop]);
841

    
842
        /* Move the two 32-bit pieces into the destination registers.  */
843
        tcg_out_arithi(s, datahi, reg64, 32, SHIFT_SRLX);
844
        if (reg64 != datalo) {
845
            tcg_out_mov(s, TCG_TYPE_I32, datalo, reg64);
846
        }
847

    
848
        /* b,a,pt label1 */
849
        label_ptr[1] = (uint32_t *)s->code_ptr;
850
        tcg_out32(s, (INSN_OP(0) | INSN_COND(COND_A, 0) | INSN_OP2(0x1)
851
                      | (1 << 29) | (1 << 19)));
852
    } else {
853
        /* The fast path is exactly one insn.  Thus we can perform the
854
           entire TLB Hit in the (annulled) delay slot of the branch
855
           over the TLB Miss case.  */
856

    
857
        /* beq,a,pt %[xi]cc, label0 */
858
        label_ptr[0] = NULL;
859
        label_ptr[1] = (uint32_t *)s->code_ptr;
860
        tcg_out32(s, (INSN_OP(0) | INSN_COND(COND_E, 0) | INSN_OP2(0x1)
861
                      | ((TARGET_LONG_BITS == 64) << 21)
862
                      | (1 << 29) | (1 << 19)));
863
        /* delay slot */
864
        tcg_out_ldst_rr(s, datalo, addr_reg, TCG_REG_O1, qemu_ld_opc[sizeop]);
865
    }
866

    
867
    /* TLB Miss.  */
868

    
869
    if (label_ptr[0]) {
870
        *label_ptr[0] |= INSN_OFF19((unsigned long)s->code_ptr -
871
                                    (unsigned long)label_ptr[0]);
872
    }
873
    n = 0;
874
    tcg_out_mov(s, TCG_TYPE_PTR, tcg_target_call_iarg_regs[n++], TCG_AREG0);
875
    if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
876
        tcg_out_mov(s, TCG_TYPE_REG, tcg_target_call_iarg_regs[n++],
877
                    args[addrlo_idx + 1]);
878
    }
879
    tcg_out_mov(s, TCG_TYPE_REG, tcg_target_call_iarg_regs[n++],
880
                args[addrlo_idx]);
881

    
882
    /* Store AREG0 in stack to avoid ugly glibc bugs that mangle
883
       global registers */
884
    tcg_out_st(s, TCG_TYPE_REG, TCG_AREG0, TCG_REG_CALL_STACK,
885
               TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
886
               sizeof(long));
887

    
888
    /* qemu_ld_helper[s_bits](arg0, arg1) */
889
    tcg_out32(s, CALL | ((((tcg_target_ulong)qemu_ld_helpers[s_bits]
890
                           - (tcg_target_ulong)s->code_ptr) >> 2)
891
                         & 0x3fffffff));
892
    /* delay slot */
893
    tcg_out_movi(s, TCG_TYPE_I32, tcg_target_call_iarg_regs[n], memi);
894

    
895
    /* Reload AREG0.  */
896
    tcg_out_ld(s, TCG_TYPE_REG, TCG_AREG0, TCG_REG_CALL_STACK,
897
               TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
898
               sizeof(long));
899

    
900
    n = tcg_target_call_oarg_regs[0];
901
    /* datalo = sign_extend(arg0) */
902
    switch (sizeop) {
903
    case 0 | 4:
904
        /* Recall that SRA sign extends from bit 31 through bit 63.  */
905
        tcg_out_arithi(s, datalo, n, 24, SHIFT_SLL);
906
        tcg_out_arithi(s, datalo, datalo, 24, SHIFT_SRA);
907
        break;
908
    case 1 | 4:
909
        tcg_out_arithi(s, datalo, n, 16, SHIFT_SLL);
910
        tcg_out_arithi(s, datalo, datalo, 16, SHIFT_SRA);
911
        break;
912
    case 2 | 4:
913
        tcg_out_arithi(s, datalo, n, 0, SHIFT_SRA);
914
        break;
915
    case 3:
916
        if (TCG_TARGET_REG_BITS == 32) {
917
            tcg_out_mov(s, TCG_TYPE_REG, datahi, n);
918
            tcg_out_mov(s, TCG_TYPE_REG, datalo, n + 1);
919
            break;
920
        }
921
        /* FALLTHRU */
922
    case 0:
923
    case 1:
924
    case 2:
925
    default:
926
        /* mov */
927
        tcg_out_mov(s, TCG_TYPE_REG, datalo, n);
928
        break;
929
    }
930

    
931
    *label_ptr[1] |= INSN_OFF19((unsigned long)s->code_ptr -
932
                                (unsigned long)label_ptr[1]);
933
#else
934
    addr_reg = args[addrlo_idx];
935
    if (TCG_TARGET_REG_BITS == 64 && TARGET_LONG_BITS == 32) {
936
        tcg_out_arithi(s, TCG_REG_I5, addr_reg, 0, SHIFT_SRL);
937
        addr_reg = TCG_REG_I5;
938
    }
939
    if (TCG_TARGET_REG_BITS == 32 && sizeop == 3) {
940
        int reg64 = (datalo < 16 ? datalo : TCG_REG_O0);
941

    
942
        tcg_out_ldst_rr(s, reg64, addr_reg,
943
                        (GUEST_BASE ? TCG_GUEST_BASE_REG : TCG_REG_G0),
944
                        qemu_ld_opc[sizeop]);
945

    
946
        tcg_out_arithi(s, datahi, reg64, 32, SHIFT_SRLX);
947
        if (reg64 != datalo) {
948
            tcg_out_mov(s, TCG_TYPE_I32, datalo, reg64);
949
        }
950
    } else {
951
        tcg_out_ldst_rr(s, datalo, addr_reg,
952
                        (GUEST_BASE ? TCG_GUEST_BASE_REG : TCG_REG_G0),
953
                        qemu_ld_opc[sizeop]);
954
    }
955
#endif /* CONFIG_SOFTMMU */
956
}
957

    
958
static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args, int sizeop)
959
{
960
    int addrlo_idx = 1, datalo, datahi, addr_reg;
961
#if defined(CONFIG_SOFTMMU)
962
    int memi_idx, memi, n;
963
    uint32_t *label_ptr;
964
#endif
965

    
966
    datahi = datalo = args[0];
967
    if (TCG_TARGET_REG_BITS == 32 && sizeop == 3) {
968
        datahi = args[1];
969
        addrlo_idx = 2;
970
    }
971

    
972
#if defined(CONFIG_SOFTMMU)
973
    memi_idx = addrlo_idx + 1 + (TARGET_LONG_BITS > TCG_TARGET_REG_BITS);
974
    memi = args[memi_idx];
975

    
976
    addr_reg = tcg_out_tlb_load(s, addrlo_idx, memi, sizeop, args,
977
                                offsetof(CPUTLBEntry, addr_write));
978

    
979
    if (TCG_TARGET_REG_BITS == 32 && sizeop == 3) {
980
        /* Reconstruct the full 64-bit value in %g1, using %o2 as temp.  */
981
        /* ??? Redefine the temps from %i4/%i5 so that we have a o/g temp. */
982
        tcg_out_arithi(s, TCG_REG_G1, datalo, 0, SHIFT_SRL);
983
        tcg_out_arithi(s, TCG_REG_O2, datahi, 32, SHIFT_SLLX);
984
        tcg_out_arith(s, TCG_REG_G1, TCG_REG_G1, TCG_REG_O2, ARITH_OR);
985
        datalo = TCG_REG_G1;
986
    }
987

    
988
    /* The fast path is exactly one insn.  Thus we can perform the entire
989
       TLB Hit in the (annulled) delay slot of the branch over TLB Miss.  */
990
    /* beq,a,pt %[xi]cc, label0 */
991
    label_ptr = (uint32_t *)s->code_ptr;
992
    tcg_out32(s, (INSN_OP(0) | INSN_COND(COND_E, 0) | INSN_OP2(0x1)
993
                  | ((TARGET_LONG_BITS == 64) << 21)
994
                  | (1 << 29) | (1 << 19)));
995
    /* delay slot */
996
    tcg_out_ldst_rr(s, datalo, addr_reg, TCG_REG_O1, qemu_st_opc[sizeop]);
997

    
998
    /* TLB Miss.  */
999

    
1000
    n = 0;
1001
    tcg_out_mov(s, TCG_TYPE_PTR, tcg_target_call_iarg_regs[n++], TCG_AREG0);
1002
    if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
1003
        tcg_out_mov(s, TCG_TYPE_REG, tcg_target_call_iarg_regs[n++],
1004
                    args[addrlo_idx + 1]);
1005
    }
1006
    tcg_out_mov(s, TCG_TYPE_REG, tcg_target_call_iarg_regs[n++],
1007
                args[addrlo_idx]);
1008
    if (TCG_TARGET_REG_BITS == 32 && sizeop == 3) {
1009
        tcg_out_mov(s, TCG_TYPE_REG, tcg_target_call_iarg_regs[n++], datahi);
1010
    }
1011
    tcg_out_mov(s, TCG_TYPE_REG, tcg_target_call_iarg_regs[n++], datalo);
1012

    
1013
    /* Store AREG0 in stack to avoid ugly glibc bugs that mangle
1014
       global registers */
1015
    tcg_out_st(s, TCG_TYPE_REG, TCG_AREG0, TCG_REG_CALL_STACK,
1016
               TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
1017
               sizeof(long));
1018

    
1019
    /* qemu_st_helper[s_bits](arg0, arg1, arg2) */
1020
    tcg_out32(s, CALL | ((((tcg_target_ulong)qemu_st_helpers[sizeop]
1021
                           - (tcg_target_ulong)s->code_ptr) >> 2)
1022
                         & 0x3fffffff));
1023
    /* delay slot */
1024
    tcg_out_movi(s, TCG_TYPE_REG, tcg_target_call_iarg_regs[n], memi);
1025

    
1026
    /* Reload AREG0.  */
1027
    tcg_out_ld(s, TCG_TYPE_REG, TCG_AREG0, TCG_REG_CALL_STACK,
1028
               TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
1029
               sizeof(long));
1030

    
1031
    *label_ptr |= INSN_OFF19((unsigned long)s->code_ptr -
1032
                             (unsigned long)label_ptr);
1033
#else
1034
    addr_reg = args[addrlo_idx];
1035
    if (TCG_TARGET_REG_BITS == 64 && TARGET_LONG_BITS == 32) {
1036
        tcg_out_arithi(s, TCG_REG_I5, addr_reg, 0, SHIFT_SRL);
1037
        addr_reg = TCG_REG_I5;
1038
    }
1039
    if (TCG_TARGET_REG_BITS == 32 && sizeop == 3) {
1040
        /* Reconstruct the full 64-bit value in %g1, using %o2 as temp.  */
1041
        /* ??? Redefine the temps from %i4/%i5 so that we have a o/g temp. */
1042
        tcg_out_arithi(s, TCG_REG_G1, datalo, 0, SHIFT_SRL);
1043
        tcg_out_arithi(s, TCG_REG_O2, datahi, 32, SHIFT_SLLX);
1044
        tcg_out_arith(s, TCG_REG_G1, TCG_REG_G1, TCG_REG_O2, ARITH_OR);
1045
        datalo = TCG_REG_G1;
1046
    }
1047
    tcg_out_ldst_rr(s, datalo, addr_reg,
1048
                    (GUEST_BASE ? TCG_GUEST_BASE_REG : TCG_REG_G0),
1049
                    qemu_st_opc[sizeop]);
1050
#endif /* CONFIG_SOFTMMU */
1051
}
1052

    
1053
static inline void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
1054
                              const int *const_args)
1055
{
1056
    int c;
1057

    
1058
    switch (opc) {
1059
    case INDEX_op_exit_tb:
1060
        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_I0, args[0]);
1061
        tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I7) |
1062
                  INSN_IMM13(8));
1063
        tcg_out32(s, RESTORE | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_G0) |
1064
                      INSN_RS2(TCG_REG_G0));
1065
        break;
1066
    case INDEX_op_goto_tb:
1067
        if (s->tb_jmp_offset) {
1068
            /* direct jump method */
1069
            tcg_out_sethi(s, TCG_REG_I5, args[0] & 0xffffe000);
1070
            tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I5) |
1071
                      INSN_IMM13((args[0] & 0x1fff)));
1072
            s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
1073
        } else {
1074
            /* indirect jump method */
1075
            tcg_out_ld_ptr(s, TCG_REG_I5, (tcg_target_long)(s->tb_next + args[0]));
1076
            tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I5) |
1077
                      INSN_RS2(TCG_REG_G0));
1078
        }
1079
        tcg_out_nop(s);
1080
        s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
1081
        break;
1082
    case INDEX_op_call:
1083
        if (const_args[0])
1084
            tcg_out32(s, CALL | ((((tcg_target_ulong)args[0]
1085
                                   - (tcg_target_ulong)s->code_ptr) >> 2)
1086
                                 & 0x3fffffff));
1087
        else {
1088
            tcg_out_ld_ptr(s, TCG_REG_I5,
1089
                           (tcg_target_long)(s->tb_next + args[0]));
1090
            tcg_out32(s, JMPL | INSN_RD(TCG_REG_O7) | INSN_RS1(TCG_REG_I5) |
1091
                      INSN_RS2(TCG_REG_G0));
1092
        }
1093
        /* Store AREG0 in stack to avoid ugly glibc bugs that mangle
1094
           global registers */
1095
        // delay slot
1096
        tcg_out_st(s, TCG_TYPE_REG, TCG_AREG0, TCG_REG_CALL_STACK,
1097
                   TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
1098
                   sizeof(long));
1099
        tcg_out_ld(s, TCG_TYPE_REG, TCG_AREG0, TCG_REG_CALL_STACK,
1100
                   TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
1101
                   sizeof(long));
1102
        break;
1103
    case INDEX_op_jmp:
1104
    case INDEX_op_br:
1105
        tcg_out_branch_i32(s, COND_A, args[0]);
1106
        tcg_out_nop(s);
1107
        break;
1108
    case INDEX_op_movi_i32:
1109
        tcg_out_movi(s, TCG_TYPE_I32, args[0], (uint32_t)args[1]);
1110
        break;
1111

    
1112
#if TCG_TARGET_REG_BITS == 64
1113
#define OP_32_64(x)                             \
1114
        glue(glue(case INDEX_op_, x), _i32):    \
1115
        glue(glue(case INDEX_op_, x), _i64)
1116
#else
1117
#define OP_32_64(x)                             \
1118
        glue(glue(case INDEX_op_, x), _i32)
1119
#endif
1120
    OP_32_64(ld8u):
1121
        tcg_out_ldst(s, args[0], args[1], args[2], LDUB);
1122
        break;
1123
    OP_32_64(ld8s):
1124
        tcg_out_ldst(s, args[0], args[1], args[2], LDSB);
1125
        break;
1126
    OP_32_64(ld16u):
1127
        tcg_out_ldst(s, args[0], args[1], args[2], LDUH);
1128
        break;
1129
    OP_32_64(ld16s):
1130
        tcg_out_ldst(s, args[0], args[1], args[2], LDSH);
1131
        break;
1132
    case INDEX_op_ld_i32:
1133
#if TCG_TARGET_REG_BITS == 64
1134
    case INDEX_op_ld32u_i64:
1135
#endif
1136
        tcg_out_ldst(s, args[0], args[1], args[2], LDUW);
1137
        break;
1138
    OP_32_64(st8):
1139
        tcg_out_ldst(s, args[0], args[1], args[2], STB);
1140
        break;
1141
    OP_32_64(st16):
1142
        tcg_out_ldst(s, args[0], args[1], args[2], STH);
1143
        break;
1144
    case INDEX_op_st_i32:
1145
#if TCG_TARGET_REG_BITS == 64
1146
    case INDEX_op_st32_i64:
1147
#endif
1148
        tcg_out_ldst(s, args[0], args[1], args[2], STW);
1149
        break;
1150
    OP_32_64(add):
1151
        c = ARITH_ADD;
1152
        goto gen_arith;
1153
    OP_32_64(sub):
1154
        c = ARITH_SUB;
1155
        goto gen_arith;
1156
    OP_32_64(and):
1157
        c = ARITH_AND;
1158
        goto gen_arith;
1159
    OP_32_64(andc):
1160
        c = ARITH_ANDN;
1161
        goto gen_arith;
1162
    OP_32_64(or):
1163
        c = ARITH_OR;
1164
        goto gen_arith;
1165
    OP_32_64(orc):
1166
        c = ARITH_ORN;
1167
        goto gen_arith;
1168
    OP_32_64(xor):
1169
        c = ARITH_XOR;
1170
        goto gen_arith;
1171
    case INDEX_op_shl_i32:
1172
        c = SHIFT_SLL;
1173
        goto gen_arith;
1174
    case INDEX_op_shr_i32:
1175
        c = SHIFT_SRL;
1176
        goto gen_arith;
1177
    case INDEX_op_sar_i32:
1178
        c = SHIFT_SRA;
1179
        goto gen_arith;
1180
    case INDEX_op_mul_i32:
1181
        c = ARITH_UMUL;
1182
        goto gen_arith;
1183

    
1184
    OP_32_64(neg):
1185
        c = ARITH_SUB;
1186
        goto gen_arith1;
1187
    OP_32_64(not):
1188
        c = ARITH_ORN;
1189
        goto gen_arith1;
1190

    
1191
    case INDEX_op_div_i32:
1192
        tcg_out_div32(s, args[0], args[1], args[2], const_args[2], 0);
1193
        break;
1194
    case INDEX_op_divu_i32:
1195
        tcg_out_div32(s, args[0], args[1], args[2], const_args[2], 1);
1196
        break;
1197

    
1198
    case INDEX_op_rem_i32:
1199
    case INDEX_op_remu_i32:
1200
        tcg_out_div32(s, TCG_REG_I5, args[1], args[2], const_args[2],
1201
                      opc == INDEX_op_remu_i32);
1202
        tcg_out_arithc(s, TCG_REG_I5, TCG_REG_I5, args[2], const_args[2],
1203
                       ARITH_UMUL);
1204
        tcg_out_arith(s, args[0], args[1], TCG_REG_I5, ARITH_SUB);
1205
        break;
1206

    
1207
    case INDEX_op_brcond_i32:
1208
        tcg_out_brcond_i32(s, args[2], args[0], args[1], const_args[1],
1209
                           args[3]);
1210
        break;
1211
    case INDEX_op_setcond_i32:
1212
        tcg_out_setcond_i32(s, args[3], args[0], args[1],
1213
                            args[2], const_args[2]);
1214
        break;
1215

    
1216
#if TCG_TARGET_REG_BITS == 32
1217
    case INDEX_op_brcond2_i32:
1218
        tcg_out_brcond2_i32(s, args[4], args[0], args[1],
1219
                            args[2], const_args[2],
1220
                            args[3], const_args[3], args[5]);
1221
        break;
1222
    case INDEX_op_setcond2_i32:
1223
        tcg_out_setcond2_i32(s, args[5], args[0], args[1], args[2],
1224
                             args[3], const_args[3],
1225
                             args[4], const_args[4]);
1226
        break;
1227
    case INDEX_op_add2_i32:
1228
        tcg_out_arithc(s, args[0], args[2], args[4], const_args[4],
1229
                       ARITH_ADDCC);
1230
        tcg_out_arithc(s, args[1], args[3], args[5], const_args[5],
1231
                       ARITH_ADDX);
1232
        break;
1233
    case INDEX_op_sub2_i32:
1234
        tcg_out_arithc(s, args[0], args[2], args[4], const_args[4],
1235
                       ARITH_SUBCC);
1236
        tcg_out_arithc(s, args[1], args[3], args[5], const_args[5],
1237
                       ARITH_SUBX);
1238
        break;
1239
    case INDEX_op_mulu2_i32:
1240
        tcg_out_arithc(s, args[0], args[2], args[3], const_args[3],
1241
                       ARITH_UMUL);
1242
        tcg_out_rdy(s, args[1]);
1243
        break;
1244
#endif
1245

    
1246
    case INDEX_op_qemu_ld8u:
1247
        tcg_out_qemu_ld(s, args, 0);
1248
        break;
1249
    case INDEX_op_qemu_ld8s:
1250
        tcg_out_qemu_ld(s, args, 0 | 4);
1251
        break;
1252
    case INDEX_op_qemu_ld16u:
1253
        tcg_out_qemu_ld(s, args, 1);
1254
        break;
1255
    case INDEX_op_qemu_ld16s:
1256
        tcg_out_qemu_ld(s, args, 1 | 4);
1257
        break;
1258
    case INDEX_op_qemu_ld32:
1259
#if TCG_TARGET_REG_BITS == 64
1260
    case INDEX_op_qemu_ld32u:
1261
#endif
1262
        tcg_out_qemu_ld(s, args, 2);
1263
        break;
1264
#if TCG_TARGET_REG_BITS == 64
1265
    case INDEX_op_qemu_ld32s:
1266
        tcg_out_qemu_ld(s, args, 2 | 4);
1267
        break;
1268
#endif
1269
    case INDEX_op_qemu_ld64:
1270
        tcg_out_qemu_ld(s, args, 3);
1271
        break;
1272
    case INDEX_op_qemu_st8:
1273
        tcg_out_qemu_st(s, args, 0);
1274
        break;
1275
    case INDEX_op_qemu_st16:
1276
        tcg_out_qemu_st(s, args, 1);
1277
        break;
1278
    case INDEX_op_qemu_st32:
1279
        tcg_out_qemu_st(s, args, 2);
1280
        break;
1281
    case INDEX_op_qemu_st64:
1282
        tcg_out_qemu_st(s, args, 3);
1283
        break;
1284

    
1285
#if TCG_TARGET_REG_BITS == 64
1286
    case INDEX_op_movi_i64:
1287
        tcg_out_movi(s, TCG_TYPE_I64, args[0], args[1]);
1288
        break;
1289
    case INDEX_op_ld32s_i64:
1290
        tcg_out_ldst(s, args[0], args[1], args[2], LDSW);
1291
        break;
1292
    case INDEX_op_ld_i64:
1293
        tcg_out_ldst(s, args[0], args[1], args[2], LDX);
1294
        break;
1295
    case INDEX_op_st_i64:
1296
        tcg_out_ldst(s, args[0], args[1], args[2], STX);
1297
        break;
1298
    case INDEX_op_shl_i64:
1299
        c = SHIFT_SLLX;
1300
        goto gen_arith;
1301
    case INDEX_op_shr_i64:
1302
        c = SHIFT_SRLX;
1303
        goto gen_arith;
1304
    case INDEX_op_sar_i64:
1305
        c = SHIFT_SRAX;
1306
        goto gen_arith;
1307
    case INDEX_op_mul_i64:
1308
        c = ARITH_MULX;
1309
        goto gen_arith;
1310
    case INDEX_op_div_i64:
1311
        c = ARITH_SDIVX;
1312
        goto gen_arith;
1313
    case INDEX_op_divu_i64:
1314
        c = ARITH_UDIVX;
1315
        goto gen_arith;
1316
    case INDEX_op_rem_i64:
1317
    case INDEX_op_remu_i64:
1318
        tcg_out_arithc(s, TCG_REG_I5, args[1], args[2], const_args[2],
1319
                       opc == INDEX_op_rem_i64 ? ARITH_SDIVX : ARITH_UDIVX);
1320
        tcg_out_arithc(s, TCG_REG_I5, TCG_REG_I5, args[2], const_args[2],
1321
                       ARITH_MULX);
1322
        tcg_out_arith(s, args[0], args[1], TCG_REG_I5, ARITH_SUB);
1323
        break;
1324
    case INDEX_op_ext32s_i64:
1325
        if (const_args[1]) {
1326
            tcg_out_movi(s, TCG_TYPE_I64, args[0], (int32_t)args[1]);
1327
        } else {
1328
            tcg_out_arithi(s, args[0], args[1], 0, SHIFT_SRA);
1329
        }
1330
        break;
1331
    case INDEX_op_ext32u_i64:
1332
        if (const_args[1]) {
1333
            tcg_out_movi_imm32(s, args[0], args[1]);
1334
        } else {
1335
            tcg_out_arithi(s, args[0], args[1], 0, SHIFT_SRL);
1336
        }
1337
        break;
1338

    
1339
    case INDEX_op_brcond_i64:
1340
        tcg_out_brcond_i64(s, args[2], args[0], args[1], const_args[1],
1341
                           args[3]);
1342
        break;
1343
    case INDEX_op_setcond_i64:
1344
        tcg_out_setcond_i64(s, args[3], args[0], args[1],
1345
                            args[2], const_args[2]);
1346
        break;
1347

    
1348
#endif
1349
    gen_arith:
1350
        tcg_out_arithc(s, args[0], args[1], args[2], const_args[2], c);
1351
        break;
1352

    
1353
    gen_arith1:
1354
        tcg_out_arithc(s, args[0], TCG_REG_G0, args[1], const_args[1], c);
1355
        break;
1356

    
1357
    default:
1358
        fprintf(stderr, "unknown opcode 0x%x\n", opc);
1359
        tcg_abort();
1360
    }
1361
}
1362

    
1363
static const TCGTargetOpDef sparc_op_defs[] = {
1364
    { INDEX_op_exit_tb, { } },
1365
    { INDEX_op_goto_tb, { } },
1366
    { INDEX_op_call, { "ri" } },
1367
    { INDEX_op_jmp, { "ri" } },
1368
    { INDEX_op_br, { } },
1369

    
1370
    { INDEX_op_mov_i32, { "r", "r" } },
1371
    { INDEX_op_movi_i32, { "r" } },
1372
    { INDEX_op_ld8u_i32, { "r", "r" } },
1373
    { INDEX_op_ld8s_i32, { "r", "r" } },
1374
    { INDEX_op_ld16u_i32, { "r", "r" } },
1375
    { INDEX_op_ld16s_i32, { "r", "r" } },
1376
    { INDEX_op_ld_i32, { "r", "r" } },
1377
    { INDEX_op_st8_i32, { "r", "r" } },
1378
    { INDEX_op_st16_i32, { "r", "r" } },
1379
    { INDEX_op_st_i32, { "r", "r" } },
1380

    
1381
    { INDEX_op_add_i32, { "r", "r", "rJ" } },
1382
    { INDEX_op_mul_i32, { "r", "r", "rJ" } },
1383
    { INDEX_op_div_i32, { "r", "r", "rJ" } },
1384
    { INDEX_op_divu_i32, { "r", "r", "rJ" } },
1385
    { INDEX_op_rem_i32, { "r", "r", "rJ" } },
1386
    { INDEX_op_remu_i32, { "r", "r", "rJ" } },
1387
    { INDEX_op_sub_i32, { "r", "r", "rJ" } },
1388
    { INDEX_op_and_i32, { "r", "r", "rJ" } },
1389
    { INDEX_op_andc_i32, { "r", "r", "rJ" } },
1390
    { INDEX_op_or_i32, { "r", "r", "rJ" } },
1391
    { INDEX_op_orc_i32, { "r", "r", "rJ" } },
1392
    { INDEX_op_xor_i32, { "r", "r", "rJ" } },
1393

    
1394
    { INDEX_op_shl_i32, { "r", "r", "rJ" } },
1395
    { INDEX_op_shr_i32, { "r", "r", "rJ" } },
1396
    { INDEX_op_sar_i32, { "r", "r", "rJ" } },
1397

    
1398
    { INDEX_op_neg_i32, { "r", "rJ" } },
1399
    { INDEX_op_not_i32, { "r", "rJ" } },
1400

    
1401
    { INDEX_op_brcond_i32, { "r", "rJ" } },
1402
    { INDEX_op_setcond_i32, { "r", "r", "rJ" } },
1403

    
1404
#if TCG_TARGET_REG_BITS == 32
1405
    { INDEX_op_brcond2_i32, { "r", "r", "rJ", "rJ" } },
1406
    { INDEX_op_setcond2_i32, { "r", "r", "r", "rJ", "rJ" } },
1407
    { INDEX_op_add2_i32, { "r", "r", "r", "r", "rJ", "rJ" } },
1408
    { INDEX_op_sub2_i32, { "r", "r", "r", "r", "rJ", "rJ" } },
1409
    { INDEX_op_mulu2_i32, { "r", "r", "r", "rJ" } },
1410
#endif
1411

    
1412
#if TCG_TARGET_REG_BITS == 64
1413
    { INDEX_op_mov_i64, { "r", "r" } },
1414
    { INDEX_op_movi_i64, { "r" } },
1415
    { INDEX_op_ld8u_i64, { "r", "r" } },
1416
    { INDEX_op_ld8s_i64, { "r", "r" } },
1417
    { INDEX_op_ld16u_i64, { "r", "r" } },
1418
    { INDEX_op_ld16s_i64, { "r", "r" } },
1419
    { INDEX_op_ld32u_i64, { "r", "r" } },
1420
    { INDEX_op_ld32s_i64, { "r", "r" } },
1421
    { INDEX_op_ld_i64, { "r", "r" } },
1422
    { INDEX_op_st8_i64, { "r", "r" } },
1423
    { INDEX_op_st16_i64, { "r", "r" } },
1424
    { INDEX_op_st32_i64, { "r", "r" } },
1425
    { INDEX_op_st_i64, { "r", "r" } },
1426

    
1427
    { INDEX_op_add_i64, { "r", "r", "rJ" } },
1428
    { INDEX_op_mul_i64, { "r", "r", "rJ" } },
1429
    { INDEX_op_div_i64, { "r", "r", "rJ" } },
1430
    { INDEX_op_divu_i64, { "r", "r", "rJ" } },
1431
    { INDEX_op_rem_i64, { "r", "r", "rJ" } },
1432
    { INDEX_op_remu_i64, { "r", "r", "rJ" } },
1433
    { INDEX_op_sub_i64, { "r", "r", "rJ" } },
1434
    { INDEX_op_and_i64, { "r", "r", "rJ" } },
1435
    { INDEX_op_andc_i64, { "r", "r", "rJ" } },
1436
    { INDEX_op_or_i64, { "r", "r", "rJ" } },
1437
    { INDEX_op_orc_i64, { "r", "r", "rJ" } },
1438
    { INDEX_op_xor_i64, { "r", "r", "rJ" } },
1439

    
1440
    { INDEX_op_shl_i64, { "r", "r", "rJ" } },
1441
    { INDEX_op_shr_i64, { "r", "r", "rJ" } },
1442
    { INDEX_op_sar_i64, { "r", "r", "rJ" } },
1443

    
1444
    { INDEX_op_neg_i64, { "r", "rJ" } },
1445
    { INDEX_op_not_i64, { "r", "rJ" } },
1446

    
1447
    { INDEX_op_ext32s_i64, { "r", "ri" } },
1448
    { INDEX_op_ext32u_i64, { "r", "ri" } },
1449

    
1450
    { INDEX_op_brcond_i64, { "r", "rJ" } },
1451
    { INDEX_op_setcond_i64, { "r", "r", "rJ" } },
1452
#endif
1453

    
1454
#if TCG_TARGET_REG_BITS == 64
1455
    { INDEX_op_qemu_ld8u, { "r", "L" } },
1456
    { INDEX_op_qemu_ld8s, { "r", "L" } },
1457
    { INDEX_op_qemu_ld16u, { "r", "L" } },
1458
    { INDEX_op_qemu_ld16s, { "r", "L" } },
1459
    { INDEX_op_qemu_ld32, { "r", "L" } },
1460
    { INDEX_op_qemu_ld32u, { "r", "L" } },
1461
    { INDEX_op_qemu_ld32s, { "r", "L" } },
1462
    { INDEX_op_qemu_ld64, { "r", "L" } },
1463

    
1464
    { INDEX_op_qemu_st8, { "L", "L" } },
1465
    { INDEX_op_qemu_st16, { "L", "L" } },
1466
    { INDEX_op_qemu_st32, { "L", "L" } },
1467
    { INDEX_op_qemu_st64, { "L", "L" } },
1468
#elif TARGET_LONG_BITS <= TCG_TARGET_REG_BITS
1469
    { INDEX_op_qemu_ld8u, { "r", "L" } },
1470
    { INDEX_op_qemu_ld8s, { "r", "L" } },
1471
    { INDEX_op_qemu_ld16u, { "r", "L" } },
1472
    { INDEX_op_qemu_ld16s, { "r", "L" } },
1473
    { INDEX_op_qemu_ld32, { "r", "L" } },
1474
    { INDEX_op_qemu_ld64, { "r", "r", "L" } },
1475

    
1476
    { INDEX_op_qemu_st8, { "L", "L" } },
1477
    { INDEX_op_qemu_st16, { "L", "L" } },
1478
    { INDEX_op_qemu_st32, { "L", "L" } },
1479
    { INDEX_op_qemu_st64, { "L", "L", "L" } },
1480
#else
1481
    { INDEX_op_qemu_ld8u, { "r", "L", "L" } },
1482
    { INDEX_op_qemu_ld8s, { "r", "L", "L" } },
1483
    { INDEX_op_qemu_ld16u, { "r", "L", "L" } },
1484
    { INDEX_op_qemu_ld16s, { "r", "L", "L" } },
1485
    { INDEX_op_qemu_ld32, { "r", "L", "L" } },
1486
    { INDEX_op_qemu_ld64, { "L", "L", "L", "L" } },
1487

    
1488
    { INDEX_op_qemu_st8, { "L", "L", "L" } },
1489
    { INDEX_op_qemu_st16, { "L", "L", "L" } },
1490
    { INDEX_op_qemu_st32, { "L", "L", "L" } },
1491
    { INDEX_op_qemu_st64, { "L", "L", "L", "L" } },
1492
#endif
1493

    
1494
    { -1 },
1495
};
1496

    
1497
static void tcg_target_init(TCGContext *s)
1498
{
1499
    tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xffffffff);
1500
#if TCG_TARGET_REG_BITS == 64
1501
    tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0, 0xffffffff);
1502
#endif
1503
    tcg_regset_set32(tcg_target_call_clobber_regs, 0,
1504
                     (1 << TCG_REG_G1) |
1505
                     (1 << TCG_REG_G2) |
1506
                     (1 << TCG_REG_G3) |
1507
                     (1 << TCG_REG_G4) |
1508
                     (1 << TCG_REG_G5) |
1509
                     (1 << TCG_REG_G6) |
1510
                     (1 << TCG_REG_G7) |
1511
                     (1 << TCG_REG_O0) |
1512
                     (1 << TCG_REG_O1) |
1513
                     (1 << TCG_REG_O2) |
1514
                     (1 << TCG_REG_O3) |
1515
                     (1 << TCG_REG_O4) |
1516
                     (1 << TCG_REG_O5) |
1517
                     (1 << TCG_REG_O7));
1518

    
1519
    tcg_regset_clear(s->reserved_regs);
1520
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_G0);
1521
#if TCG_TARGET_REG_BITS == 64
1522
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_I4); // for internal use
1523
#endif
1524
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_I5); // for internal use
1525
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_I6);
1526
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_I7);
1527
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_O6);
1528
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_O7);
1529
    tcg_add_target_add_op_defs(sparc_op_defs);
1530
}
1531

    
1532
#if TCG_TARGET_REG_BITS == 64
1533
# define ELF_HOST_MACHINE  EM_SPARCV9
1534
#else
1535
# define ELF_HOST_MACHINE  EM_SPARC32PLUS
1536
# define ELF_HOST_FLAGS    EF_SPARC_32PLUS
1537
#endif
1538

    
1539
typedef struct {
1540
    uint32_t len __attribute__((aligned((sizeof(void *)))));
1541
    uint32_t id;
1542
    uint8_t version;
1543
    char augmentation[1];
1544
    uint8_t code_align;
1545
    uint8_t data_align;
1546
    uint8_t return_column;
1547
} DebugFrameCIE;
1548

    
1549
typedef struct {
1550
    uint32_t len __attribute__((aligned((sizeof(void *)))));
1551
    uint32_t cie_offset;
1552
    tcg_target_long func_start __attribute__((packed));
1553
    tcg_target_long func_len __attribute__((packed));
1554
    uint8_t def_cfa[TCG_TARGET_REG_BITS == 64 ? 4 : 2];
1555
    uint8_t win_save;
1556
    uint8_t ret_save[3];
1557
} DebugFrameFDE;
1558

    
1559
typedef struct {
1560
    DebugFrameCIE cie;
1561
    DebugFrameFDE fde;
1562
} DebugFrame;
1563

    
1564
static DebugFrame debug_frame = {
1565
    .cie.len = sizeof(DebugFrameCIE)-4, /* length after .len member */
1566
    .cie.id = -1,
1567
    .cie.version = 1,
1568
    .cie.code_align = 1,
1569
    .cie.data_align = -sizeof(void *) & 0x7f,
1570
    .cie.return_column = 15,            /* o7 */
1571

    
1572
    .fde.len = sizeof(DebugFrameFDE)-4, /* length after .len member */
1573
    .fde.def_cfa = {
1574
#if TCG_TARGET_REG_BITS == 64
1575
        12, 30,                         /* DW_CFA_def_cfa i6, 2047 */
1576
        (2047 & 0x7f) | 0x80, (2047 >> 7)
1577
#else
1578
        13, 30                          /* DW_CFA_def_cfa_register i6 */
1579
#endif
1580
    },
1581
    .fde.win_save = 0x2d,               /* DW_CFA_GNU_window_save */
1582
    .fde.ret_save = { 9, 15, 31 },      /* DW_CFA_register o7, i7 */
1583
};
1584

    
1585
void tcg_register_jit(void *buf, size_t buf_size)
1586
{
1587
    debug_frame.fde.func_start = (tcg_target_long) buf;
1588
    debug_frame.fde.func_len = buf_size;
1589

    
1590
    tcg_register_jit_int(buf, buf_size, &debug_frame, sizeof(debug_frame));
1591
}