Statistics
| Branch: | Revision:

root / tcg / hppa / tcg-target.c @ fd76e73a

History | View | Annotate | Download (50.9 kB)

1
/*
2
 * Tiny Code Generator for QEMU
3
 *
4
 * Copyright (c) 2008 Fabrice Bellard
5
 *
6
 * Permission is hereby granted, free of charge, to any person obtaining a copy
7
 * of this software and associated documentation files (the "Software"), to deal
8
 * in the Software without restriction, including without limitation the rights
9
 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10
 * copies of the Software, and to permit persons to whom the Software is
11
 * furnished to do so, subject to the following conditions:
12
 *
13
 * The above copyright notice and this permission notice shall be included in
14
 * all copies or substantial portions of the Software.
15
 *
16
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19
 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21
 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22
 * THE SOFTWARE.
23
 */
24

    
25
#ifndef NDEBUG
26
static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
27
    "%r0", "%r1", "%rp", "%r3", "%r4", "%r5", "%r6", "%r7",
28
    "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
29
    "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
30
    "%r24", "%r25", "%r26", "%dp", "%ret0", "%ret1", "%sp", "%r31",
31
};
32
#endif
33

    
34
/* This is an 8 byte temp slot in the stack frame.  */
35
#define STACK_TEMP_OFS -16
36

    
37
#ifndef GUEST_BASE
38
#define GUEST_BASE 0
39
#endif
40

    
41
#ifdef CONFIG_USE_GUEST_BASE
42
#define TCG_GUEST_BASE_REG TCG_REG_R16
43
#else
44
#define TCG_GUEST_BASE_REG TCG_REG_R0
45
#endif
46

    
47
static const int tcg_target_reg_alloc_order[] = {
48
    TCG_REG_R4,
49
    TCG_REG_R5,
50
    TCG_REG_R6,
51
    TCG_REG_R7,
52
    TCG_REG_R8,
53
    TCG_REG_R9,
54
    TCG_REG_R10,
55
    TCG_REG_R11,
56
    TCG_REG_R12,
57
    TCG_REG_R13,
58

    
59
    TCG_REG_R17,
60
    TCG_REG_R14,
61
    TCG_REG_R15,
62
    TCG_REG_R16,
63

    
64
    TCG_REG_R26,
65
    TCG_REG_R25,
66
    TCG_REG_R24,
67
    TCG_REG_R23,
68

    
69
    TCG_REG_RET0,
70
    TCG_REG_RET1,
71
};
72

    
73
static const int tcg_target_call_iarg_regs[4] = {
74
    TCG_REG_R26,
75
    TCG_REG_R25,
76
    TCG_REG_R24,
77
    TCG_REG_R23,
78
};
79

    
80
static const int tcg_target_call_oarg_regs[2] = {
81
    TCG_REG_RET0,
82
    TCG_REG_RET1,
83
};
84

    
85
/* True iff val fits a signed field of width BITS.  */
86
static inline int check_fit_tl(tcg_target_long val, unsigned int bits)
87
{
88
    return (val << ((sizeof(tcg_target_long) * 8 - bits))
89
            >> (sizeof(tcg_target_long) * 8 - bits)) == val;
90
}
91

    
92
/* True iff depi can be used to compute (reg | MASK).
93
   Accept a bit pattern like:
94
      0....01....1
95
      1....10....0
96
      0..01..10..0
97
   Copied from gcc sources.  */
98
static inline int or_mask_p(tcg_target_ulong mask)
99
{
100
    mask += mask & -mask;
101
    return (mask & (mask - 1)) == 0;
102
}
103

    
104
/* True iff depi or extru can be used to compute (reg & mask).
105
   Accept a bit pattern like these:
106
      0....01....1
107
      1....10....0
108
      1..10..01..1
109
   Copied from gcc sources.  */
110
static inline int and_mask_p(tcg_target_ulong mask)
111
{
112
    return or_mask_p(~mask);
113
}
114

    
115
static int low_sign_ext(int val, int len)
116
{
117
    return (((val << 1) & ~(-1u << len)) | ((val >> (len - 1)) & 1));
118
}
119

    
120
static int reassemble_12(int as12)
121
{
122
    return (((as12 & 0x800) >> 11) |
123
            ((as12 & 0x400) >> 8) |
124
            ((as12 & 0x3ff) << 3));
125
}
126

    
127
static int reassemble_17(int as17)
128
{
129
    return (((as17 & 0x10000) >> 16) |
130
            ((as17 & 0x0f800) << 5) |
131
            ((as17 & 0x00400) >> 8) |
132
            ((as17 & 0x003ff) << 3));
133
}
134

    
135
static int reassemble_21(int as21)
136
{
137
    return (((as21 & 0x100000) >> 20) |
138
            ((as21 & 0x0ffe00) >> 8) |
139
            ((as21 & 0x000180) << 7) |
140
            ((as21 & 0x00007c) << 14) |
141
            ((as21 & 0x000003) << 12));
142
}
143

    
144
/* ??? Bizzarely, there is no PCREL12F relocation type.  I guess all
145
   such relocations are simply fully handled by the assembler.  */
146
#define R_PARISC_PCREL12F  R_PARISC_NONE
147

    
148
static void patch_reloc(uint8_t *code_ptr, int type,
149
                        tcg_target_long value, tcg_target_long addend)
150
{
151
    uint32_t *insn_ptr = (uint32_t *)code_ptr;
152
    uint32_t insn = *insn_ptr;
153
    tcg_target_long pcrel;
154

    
155
    value += addend;
156
    pcrel = (value - ((tcg_target_long)code_ptr + 8)) >> 2;
157

    
158
    switch (type) {
159
    case R_PARISC_PCREL12F:
160
        assert(check_fit_tl(pcrel, 12));
161
        /* ??? We assume all patches are forward.  See tcg_out_brcond
162
           re setting the NUL bit on the branch and eliding the nop.  */
163
        assert(pcrel >= 0);
164
        insn &= ~0x1ffdu;
165
        insn |= reassemble_12(pcrel);
166
        break;
167
    case R_PARISC_PCREL17F:
168
        assert(check_fit_tl(pcrel, 17));
169
        insn &= ~0x1f1ffdu;
170
        insn |= reassemble_17(pcrel);
171
        break;
172
    default:
173
        tcg_abort();
174
    }
175

    
176
    *insn_ptr = insn;
177
}
178

    
179
/* maximum number of register used for input function arguments */
180
static inline int tcg_target_get_call_iarg_regs_count(int flags)
181
{
182
    return 4;
183
}
184

    
185
/* parse target specific constraints */
186
static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
187
{
188
    const char *ct_str;
189

    
190
    ct_str = *pct_str;
191
    switch (ct_str[0]) {
192
    case 'r':
193
        ct->ct |= TCG_CT_REG;
194
        tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
195
        break;
196
    case 'L': /* qemu_ld/st constraint */
197
        ct->ct |= TCG_CT_REG;
198
        tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
199
        tcg_regset_reset_reg(ct->u.regs, TCG_REG_R26);
200
        tcg_regset_reset_reg(ct->u.regs, TCG_REG_R25);
201
        tcg_regset_reset_reg(ct->u.regs, TCG_REG_R24);
202
        tcg_regset_reset_reg(ct->u.regs, TCG_REG_R23);
203
        break;
204
    case 'Z':
205
        ct->ct |= TCG_CT_CONST_0;
206
        break;
207
    case 'I':
208
        ct->ct |= TCG_CT_CONST_S11;
209
        break;
210
    case 'J':
211
        ct->ct |= TCG_CT_CONST_S5;
212
        break;
213
    default:
214
        return -1;
215
    }
216
    ct_str++;
217
    *pct_str = ct_str;
218
    return 0;
219
}
220

    
221
/* test if a constant matches the constraint */
222
static int tcg_target_const_match(tcg_target_long val,
223
                                  const TCGArgConstraint *arg_ct)
224
{
225
    int ct = arg_ct->ct;
226
    if (ct & TCG_CT_CONST) {
227
        return 1;
228
    } else if (ct & TCG_CT_CONST_0) {
229
        return val == 0;
230
    } else if (ct & TCG_CT_CONST_S5) {
231
        return check_fit_tl(val, 5);
232
    } else if (ct & TCG_CT_CONST_S11) {
233
        return check_fit_tl(val, 11);
234
    }
235
    return 0;
236
}
237

    
238
#define INSN_OP(x)       ((x) << 26)
239
#define INSN_EXT3BR(x)   ((x) << 13)
240
#define INSN_EXT3SH(x)   ((x) << 10)
241
#define INSN_EXT4(x)     ((x) << 6)
242
#define INSN_EXT5(x)     (x)
243
#define INSN_EXT6(x)     ((x) << 6)
244
#define INSN_EXT7(x)     ((x) << 6)
245
#define INSN_EXT8A(x)    ((x) << 6)
246
#define INSN_EXT8B(x)    ((x) << 5)
247
#define INSN_T(x)        (x)
248
#define INSN_R1(x)       ((x) << 16)
249
#define INSN_R2(x)       ((x) << 21)
250
#define INSN_DEP_LEN(x)  (32 - (x))
251
#define INSN_SHDEP_CP(x) ((31 - (x)) << 5)
252
#define INSN_SHDEP_P(x)  ((x) << 5)
253
#define INSN_COND(x)     ((x) << 13)
254
#define INSN_IM11(x)     low_sign_ext(x, 11)
255
#define INSN_IM14(x)     low_sign_ext(x, 14)
256
#define INSN_IM5(x)      (low_sign_ext(x, 5) << 16)
257

    
258
#define COND_NEVER   0
259
#define COND_EQ      1
260
#define COND_LT      2
261
#define COND_LE      3
262
#define COND_LTU     4
263
#define COND_LEU     5
264
#define COND_SV      6
265
#define COND_OD      7
266
#define COND_FALSE   8
267

    
268
#define INSN_ADD        (INSN_OP(0x02) | INSN_EXT6(0x18))
269
#define INSN_ADDC        (INSN_OP(0x02) | INSN_EXT6(0x1c))
270
#define INSN_ADDI        (INSN_OP(0x2d))
271
#define INSN_ADDIL        (INSN_OP(0x0a))
272
#define INSN_ADDL        (INSN_OP(0x02) | INSN_EXT6(0x28))
273
#define INSN_AND        (INSN_OP(0x02) | INSN_EXT6(0x08))
274
#define INSN_ANDCM        (INSN_OP(0x02) | INSN_EXT6(0x00))
275
#define INSN_COMCLR        (INSN_OP(0x02) | INSN_EXT6(0x22))
276
#define INSN_COMICLR        (INSN_OP(0x24))
277
#define INSN_DEP        (INSN_OP(0x35) | INSN_EXT3SH(3))
278
#define INSN_DEPI        (INSN_OP(0x35) | INSN_EXT3SH(7))
279
#define INSN_EXTRS        (INSN_OP(0x34) | INSN_EXT3SH(7))
280
#define INSN_EXTRU        (INSN_OP(0x34) | INSN_EXT3SH(6))
281
#define INSN_LDIL        (INSN_OP(0x08))
282
#define INSN_LDO        (INSN_OP(0x0d))
283
#define INSN_MTCTL        (INSN_OP(0x00) | INSN_EXT8B(0xc2))
284
#define INSN_OR                (INSN_OP(0x02) | INSN_EXT6(0x09))
285
#define INSN_SHD        (INSN_OP(0x34) | INSN_EXT3SH(2))
286
#define INSN_SUB        (INSN_OP(0x02) | INSN_EXT6(0x10))
287
#define INSN_SUBB        (INSN_OP(0x02) | INSN_EXT6(0x14))
288
#define INSN_SUBI        (INSN_OP(0x25))
289
#define INSN_VEXTRS        (INSN_OP(0x34) | INSN_EXT3SH(5))
290
#define INSN_VEXTRU        (INSN_OP(0x34) | INSN_EXT3SH(4))
291
#define INSN_VSHD        (INSN_OP(0x34) | INSN_EXT3SH(0))
292
#define INSN_XOR        (INSN_OP(0x02) | INSN_EXT6(0x0a))
293
#define INSN_ZDEP        (INSN_OP(0x35) | INSN_EXT3SH(2))
294
#define INSN_ZVDEP        (INSN_OP(0x35) | INSN_EXT3SH(0))
295

    
296
#define INSN_BL         (INSN_OP(0x3a) | INSN_EXT3BR(0))
297
#define INSN_BL_N       (INSN_OP(0x3a) | INSN_EXT3BR(0) | 2)
298
#define INSN_BLR        (INSN_OP(0x3a) | INSN_EXT3BR(2))
299
#define INSN_BV         (INSN_OP(0x3a) | INSN_EXT3BR(6))
300
#define INSN_BV_N       (INSN_OP(0x3a) | INSN_EXT3BR(6) | 2)
301
#define INSN_BLE_SR4    (INSN_OP(0x39) | (1 << 13))
302

    
303
#define INSN_LDB        (INSN_OP(0x10))
304
#define INSN_LDH        (INSN_OP(0x11))
305
#define INSN_LDW        (INSN_OP(0x12))
306
#define INSN_LDWM       (INSN_OP(0x13))
307
#define INSN_FLDDS      (INSN_OP(0x0b) | INSN_EXT4(0) | (1 << 12))
308

    
309
#define INSN_LDBX        (INSN_OP(0x03) | INSN_EXT4(0))
310
#define INSN_LDHX        (INSN_OP(0x03) | INSN_EXT4(1))
311
#define INSN_LDWX       (INSN_OP(0x03) | INSN_EXT4(2))
312

    
313
#define INSN_STB        (INSN_OP(0x18))
314
#define INSN_STH        (INSN_OP(0x19))
315
#define INSN_STW        (INSN_OP(0x1a))
316
#define INSN_STWM       (INSN_OP(0x1b))
317
#define INSN_FSTDS      (INSN_OP(0x0b) | INSN_EXT4(8) | (1 << 12))
318

    
319
#define INSN_COMBT      (INSN_OP(0x20))
320
#define INSN_COMBF      (INSN_OP(0x22))
321
#define INSN_COMIBT     (INSN_OP(0x21))
322
#define INSN_COMIBF     (INSN_OP(0x23))
323

    
324
/* supplied by libgcc */
325
extern void *__canonicalize_funcptr_for_compare(void *);
326

    
327
static void tcg_out_mov(TCGContext *s, int ret, int arg)
328
{
329
    /* PA1.1 defines COPY as OR r,0,t; PA2.0 defines COPY as LDO 0(r),t
330
       but hppa-dis.c is unaware of this definition */
331
    if (ret != arg) {
332
        tcg_out32(s, INSN_OR | INSN_T(ret) | INSN_R1(arg)
333
                  | INSN_R2(TCG_REG_R0));
334
    }
335
}
336

    
337
static void tcg_out_movi(TCGContext *s, TCGType type,
338
                         int ret, tcg_target_long arg)
339
{
340
    if (check_fit_tl(arg, 14)) {
341
        tcg_out32(s, INSN_LDO | INSN_R1(ret)
342
                  | INSN_R2(TCG_REG_R0) | INSN_IM14(arg));
343
    } else {
344
        uint32_t hi, lo;
345
        hi = arg >> 11;
346
        lo = arg & 0x7ff;
347

    
348
        tcg_out32(s, INSN_LDIL | INSN_R2(ret) | reassemble_21(hi));
349
        if (lo) {
350
            tcg_out32(s, INSN_LDO | INSN_R1(ret)
351
                      | INSN_R2(ret) | INSN_IM14(lo));
352
        }
353
    }
354
}
355

    
356
static void tcg_out_ldst(TCGContext *s, int ret, int addr,
357
                         tcg_target_long offset, int op)
358
{
359
    if (!check_fit_tl(offset, 14)) {
360
        uint32_t hi, lo, op;
361

    
362
        hi = offset >> 11;
363
        lo = offset & 0x7ff;
364

    
365
        if (addr == TCG_REG_R0) {
366
            op = INSN_LDIL | INSN_R2(TCG_REG_R1);
367
        } else {
368
            op = INSN_ADDIL | INSN_R2(addr);
369
        }
370
        tcg_out32(s, op | reassemble_21(hi));
371

    
372
        addr = TCG_REG_R1;
373
        offset = lo;
374
    }
375

    
376
    if (ret != addr || offset != 0 || op != INSN_LDO) {
377
        tcg_out32(s, op | INSN_R1(ret) | INSN_R2(addr) | INSN_IM14(offset));
378
    }
379
}
380

    
381
/* This function is required by tcg.c.  */
382
static inline void tcg_out_ld(TCGContext *s, TCGType type, int ret,
383
                              int arg1, tcg_target_long arg2)
384
{
385
    tcg_out_ldst(s, ret, arg1, arg2, INSN_LDW);
386
}
387

    
388
/* This function is required by tcg.c.  */
389
static inline void tcg_out_st(TCGContext *s, TCGType type, int ret,
390
                              int arg1, tcg_target_long arg2)
391
{
392
    tcg_out_ldst(s, ret, arg1, arg2, INSN_STW);
393
}
394

    
395
static void tcg_out_ldst_index(TCGContext *s, int data,
396
                               int base, int index, int op)
397
{
398
    tcg_out32(s, op | INSN_T(data) | INSN_R1(index) | INSN_R2(base));
399
}
400

    
401
static inline void tcg_out_addi2(TCGContext *s, int ret, int arg1,
402
                                 tcg_target_long val)
403
{
404
    tcg_out_ldst(s, ret, arg1, val, INSN_LDO);
405
}
406

    
407
/* This function is required by tcg.c.  */
408
static inline void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
409
{
410
    tcg_out_addi2(s, reg, reg, val);
411
}
412

    
413
static inline void tcg_out_arith(TCGContext *s, int t, int r1, int r2, int op)
414
{
415
    tcg_out32(s, op | INSN_T(t) | INSN_R1(r1) | INSN_R2(r2));
416
}
417

    
418
static inline void tcg_out_arithi(TCGContext *s, int t, int r1,
419
                                  tcg_target_long val, int op)
420
{
421
    assert(check_fit_tl(val, 11));
422
    tcg_out32(s, op | INSN_R1(t) | INSN_R2(r1) | INSN_IM11(val));
423
}
424

    
425
static inline void tcg_out_nop(TCGContext *s)
426
{
427
    tcg_out_arith(s, TCG_REG_R0, TCG_REG_R0, TCG_REG_R0, INSN_OR);
428
}
429

    
430
static inline void tcg_out_mtctl_sar(TCGContext *s, int arg)
431
{
432
    tcg_out32(s, INSN_MTCTL | INSN_R2(11) | INSN_R1(arg));
433
}
434

    
435
/* Extract LEN bits at position OFS from ARG and place in RET.
436
   Note that here the bit ordering is reversed from the PA-RISC
437
   standard, such that the right-most bit is 0.  */
438
static inline void tcg_out_extr(TCGContext *s, int ret, int arg,
439
                                unsigned ofs, unsigned len, int sign)
440
{
441
    assert(ofs < 32 && len <= 32 - ofs);
442
    tcg_out32(s, (sign ? INSN_EXTRS : INSN_EXTRU)
443
              | INSN_R1(ret) | INSN_R2(arg)
444
              | INSN_SHDEP_P(31 - ofs) | INSN_DEP_LEN(len));
445
}
446

    
447
/* Likewise with OFS interpreted little-endian.  */
448
static inline void tcg_out_dep(TCGContext *s, int ret, int arg,
449
                               unsigned ofs, unsigned len)
450
{
451
    assert(ofs < 32 && len <= 32 - ofs);
452
    tcg_out32(s, INSN_DEP | INSN_R2(ret) | INSN_R1(arg)
453
              | INSN_SHDEP_CP(31 - ofs) | INSN_DEP_LEN(len));
454
}
455

    
456
static inline void tcg_out_shd(TCGContext *s, int ret, int hi, int lo,
457
                               unsigned count)
458
{
459
    assert(count < 32);
460
    tcg_out32(s, INSN_SHD | INSN_R1(hi) | INSN_R2(lo) | INSN_T(ret)
461
              | INSN_SHDEP_CP(count));
462
}
463

    
464
static void tcg_out_vshd(TCGContext *s, int ret, int hi, int lo, int creg)
465
{
466
    tcg_out_mtctl_sar(s, creg);
467
    tcg_out32(s, INSN_VSHD | INSN_T(ret) | INSN_R1(hi) | INSN_R2(lo));
468
}
469

    
470
static void tcg_out_ori(TCGContext *s, int ret, int arg, tcg_target_ulong m)
471
{
472
    if (m == 0) {
473
        tcg_out_mov(s, ret, arg);
474
    } else if (m == -1) {
475
        tcg_out_movi(s, TCG_TYPE_I32, ret, -1);
476
    } else if (or_mask_p(m)) {
477
        int bs0, bs1;
478

    
479
        for (bs0 = 0; bs0 < 32; bs0++) {
480
            if ((m & (1u << bs0)) != 0) {
481
                break;
482
            }
483
        }
484
        for (bs1 = bs0; bs1 < 32; bs1++) {
485
            if ((m & (1u << bs1)) == 0) {
486
                break;
487
            }
488
        }
489
        assert(bs1 == 32 || (1ul << bs1) > m);
490

    
491
        tcg_out_mov(s, ret, arg);
492
        tcg_out32(s, INSN_DEPI | INSN_R2(ret) | INSN_IM5(-1)
493
                  | INSN_SHDEP_CP(31 - bs0) | INSN_DEP_LEN(bs1 - bs0));
494
    } else {
495
        tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_R1, m);
496
        tcg_out_arith(s, ret, arg, TCG_REG_R1, INSN_OR);
497
    }
498
}
499

    
500
static void tcg_out_andi(TCGContext *s, int ret, int arg, tcg_target_ulong m)
501
{
502
    if (m == 0) {
503
        tcg_out_mov(s, ret, TCG_REG_R0);
504
    } else if (m == -1) {
505
        tcg_out_mov(s, ret, arg);
506
    } else if (and_mask_p(m)) {
507
        int ls0, ls1, ms0;
508

    
509
        for (ls0 = 0; ls0 < 32; ls0++) {
510
            if ((m & (1u << ls0)) == 0) {
511
                break;
512
            }
513
        }
514
        for (ls1 = ls0; ls1 < 32; ls1++) {
515
            if ((m & (1u << ls1)) != 0) {
516
                break;
517
            }
518
        }
519
        for (ms0 = ls1; ms0 < 32; ms0++) {
520
            if ((m & (1u << ms0)) == 0) {
521
                break;
522
            }
523
        }
524
        assert (ms0 == 32);
525

    
526
        if (ls1 == 32) {
527
            tcg_out_extr(s, ret, arg, 0, ls0, 0);
528
        } else {
529
            tcg_out_mov(s, ret, arg);
530
            tcg_out32(s, INSN_DEPI | INSN_R2(ret) | INSN_IM5(0)
531
                      | INSN_SHDEP_CP(31 - ls0) | INSN_DEP_LEN(ls1 - ls0));
532
        }
533
    } else {
534
        tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_R1, m);
535
        tcg_out_arith(s, ret, arg, TCG_REG_R1, INSN_AND);
536
    }
537
}
538

    
539
static inline void tcg_out_ext8s(TCGContext *s, int ret, int arg)
540
{
541
    tcg_out_extr(s, ret, arg, 0, 8, 1);
542
}
543

    
544
static inline void tcg_out_ext16s(TCGContext *s, int ret, int arg)
545
{
546
    tcg_out_extr(s, ret, arg, 0, 16, 1);
547
}
548

    
549
static void tcg_out_shli(TCGContext *s, int ret, int arg, int count)
550
{
551
    count &= 31;
552
    tcg_out32(s, INSN_ZDEP | INSN_R2(ret) | INSN_R1(arg)
553
              | INSN_SHDEP_CP(31 - count) | INSN_DEP_LEN(32 - count));
554
}
555

    
556
static void tcg_out_shl(TCGContext *s, int ret, int arg, int creg)
557
{
558
    tcg_out_arithi(s, TCG_REG_R20, creg, 31, INSN_SUBI);
559
    tcg_out_mtctl_sar(s, TCG_REG_R20);
560
    tcg_out32(s, INSN_ZVDEP | INSN_R2(ret) | INSN_R1(arg) | INSN_DEP_LEN(32));
561
}
562

    
563
static void tcg_out_shri(TCGContext *s, int ret, int arg, int count)
564
{
565
    count &= 31;
566
    tcg_out_extr(s, ret, arg, count, 32 - count, 0);
567
}
568

    
569
static void tcg_out_shr(TCGContext *s, int ret, int arg, int creg)
570
{
571
    tcg_out_vshd(s, ret, TCG_REG_R0, arg, creg);
572
}
573

    
574
static void tcg_out_sari(TCGContext *s, int ret, int arg, int count)
575
{
576
    count &= 31;
577
    tcg_out_extr(s, ret, arg, count, 32 - count, 1);
578
}
579

    
580
static void tcg_out_sar(TCGContext *s, int ret, int arg, int creg)
581
{
582
    tcg_out_arithi(s, TCG_REG_R20, creg, 31, INSN_SUBI);
583
    tcg_out_mtctl_sar(s, TCG_REG_R20);
584
    tcg_out32(s, INSN_VEXTRS | INSN_R1(ret) | INSN_R2(arg) | INSN_DEP_LEN(32));
585
}
586

    
587
static void tcg_out_rotli(TCGContext *s, int ret, int arg, int count)
588
{
589
    count &= 31;
590
    tcg_out_shd(s, ret, arg, arg, 32 - count);
591
}
592

    
593
static void tcg_out_rotl(TCGContext *s, int ret, int arg, int creg)
594
{
595
    tcg_out_arithi(s, TCG_REG_R20, creg, 32, INSN_SUBI);
596
    tcg_out_vshd(s, ret, arg, arg, TCG_REG_R20);
597
}
598

    
599
static void tcg_out_rotri(TCGContext *s, int ret, int arg, int count)
600
{
601
    count &= 31;
602
    tcg_out_shd(s, ret, arg, arg, count);
603
}
604

    
605
static void tcg_out_rotr(TCGContext *s, int ret, int arg, int creg)
606
{
607
    tcg_out_vshd(s, ret, arg, arg, creg);
608
}
609

    
610
static void tcg_out_bswap16(TCGContext *s, int ret, int arg, int sign)
611
{
612
    if (ret != arg) {
613
        tcg_out_mov(s, ret, arg);             /* arg =  xxAB */
614
    }
615
    tcg_out_dep(s, ret, ret, 16, 8);          /* ret =  xBAB */
616
    tcg_out_extr(s, ret, ret, 8, 16, sign);   /* ret =  ..BA */
617
}
618

    
619
static void tcg_out_bswap32(TCGContext *s, int ret, int arg, int temp)
620
{
621
                                          /* arg =  ABCD */
622
    tcg_out_rotri(s, temp, arg, 16);      /* temp = CDAB */
623
    tcg_out_dep(s, temp, temp, 16, 8);    /* temp = CBAB */
624
    tcg_out_shd(s, ret, arg, temp, 8);    /* ret =  DCBA */
625
}
626

    
627
static void tcg_out_call(TCGContext *s, void *func)
628
{
629
    tcg_target_long val, hi, lo, disp;
630

    
631
    val = (uint32_t)__canonicalize_funcptr_for_compare(func);
632
    disp = (val - ((tcg_target_long)s->code_ptr + 8)) >> 2;
633

    
634
    if (check_fit_tl(disp, 17)) {
635
        tcg_out32(s, INSN_BL_N | INSN_R2(TCG_REG_RP) | reassemble_17(disp));
636
    } else {
637
        hi = val >> 11;
638
        lo = val & 0x7ff;
639

    
640
        tcg_out32(s, INSN_LDIL | INSN_R2(TCG_REG_R20) | reassemble_21(hi));
641
        tcg_out32(s, INSN_BLE_SR4 | INSN_R2(TCG_REG_R20)
642
                  | reassemble_17(lo >> 2));
643
        tcg_out_mov(s, TCG_REG_RP, TCG_REG_R31);
644
    }
645
}
646

    
647
static void tcg_out_xmpyu(TCGContext *s, int retl, int reth,
648
                          int arg1, int arg2)
649
{
650
    /* Store both words into the stack for copy to the FPU.  */
651
    tcg_out_ldst(s, arg1, TCG_REG_SP, STACK_TEMP_OFS, INSN_STW);
652
    tcg_out_ldst(s, arg2, TCG_REG_SP, STACK_TEMP_OFS + 4, INSN_STW);
653

    
654
    /* Load both words into the FPU at the same time.  We get away
655
       with this because we can address the left and right half of the
656
       FPU registers individually once loaded.  */
657
    /* fldds stack_temp(sp),fr22 */
658
    tcg_out32(s, INSN_FLDDS | INSN_R2(TCG_REG_SP)
659
              | INSN_IM5(STACK_TEMP_OFS) | INSN_T(22));
660

    
661
    /* xmpyu fr22r,fr22,fr22 */
662
    tcg_out32(s, 0x3ad64796);
663

    
664
    /* Store the 64-bit result back into the stack.  */
665
    /* fstds stack_temp(sp),fr22 */
666
    tcg_out32(s, INSN_FSTDS | INSN_R2(TCG_REG_SP)
667
              | INSN_IM5(STACK_TEMP_OFS) | INSN_T(22));
668

    
669
    /* Load the pieces of the result that the caller requested.  */
670
    if (reth) {
671
        tcg_out_ldst(s, reth, TCG_REG_SP, STACK_TEMP_OFS, INSN_LDW);
672
    }
673
    if (retl) {
674
        tcg_out_ldst(s, retl, TCG_REG_SP, STACK_TEMP_OFS + 4, INSN_LDW);
675
    }
676
}
677

    
678
static void tcg_out_branch(TCGContext *s, int label_index, int nul)
679
{
680
    TCGLabel *l = &s->labels[label_index];
681
    uint32_t op = nul ? INSN_BL_N : INSN_BL;
682

    
683
    if (l->has_value) {
684
        tcg_target_long val = l->u.value;
685

    
686
        val -= (tcg_target_long)s->code_ptr + 8;
687
        val >>= 2;
688
        assert(check_fit_tl(val, 17));
689

    
690
        tcg_out32(s, op | reassemble_17(val));
691
    } else {
692
        tcg_out_reloc(s, s->code_ptr, R_PARISC_PCREL17F, label_index, 0);
693
        tcg_out32(s, op);
694
    }
695
}
696

    
697
static const uint8_t tcg_cond_to_cmp_cond[10] =
698
{
699
    [TCG_COND_EQ] = COND_EQ,
700
    [TCG_COND_NE] = COND_EQ | COND_FALSE,
701
    [TCG_COND_LT] = COND_LT,
702
    [TCG_COND_GE] = COND_LT | COND_FALSE,
703
    [TCG_COND_LE] = COND_LE,
704
    [TCG_COND_GT] = COND_LE | COND_FALSE,
705
    [TCG_COND_LTU] = COND_LTU,
706
    [TCG_COND_GEU] = COND_LTU | COND_FALSE,
707
    [TCG_COND_LEU] = COND_LEU,
708
    [TCG_COND_GTU] = COND_LEU | COND_FALSE,
709
};
710

    
711
static void tcg_out_brcond(TCGContext *s, int cond, TCGArg c1,
712
                           TCGArg c2, int c2const, int label_index)
713
{
714
    TCGLabel *l = &s->labels[label_index];
715
    int op, pacond;
716

    
717
    /* Note that COMIB operates as if the immediate is the first
718
       operand.  We model brcond with the immediate in the second
719
       to better match what targets are likely to give us.  For
720
       consistency, model COMB with reversed operands as well.  */
721
    pacond = tcg_cond_to_cmp_cond[tcg_swap_cond(cond)];
722

    
723
    if (c2const) {
724
        op = (pacond & COND_FALSE ? INSN_COMIBF : INSN_COMIBT);
725
        op |= INSN_IM5(c2);
726
    } else {
727
        op = (pacond & COND_FALSE ? INSN_COMBF : INSN_COMBT);
728
        op |= INSN_R1(c2);
729
    }
730
    op |= INSN_R2(c1);
731
    op |= INSN_COND(pacond & 7);
732

    
733
    if (l->has_value) {
734
        tcg_target_long val = l->u.value;
735

    
736
        val -= (tcg_target_long)s->code_ptr + 8;
737
        val >>= 2;
738
        assert(check_fit_tl(val, 12));
739

    
740
        /* ??? Assume that all branches to defined labels are backward.
741
           Which means that if the nul bit is set, the delay slot is
742
           executed if the branch is taken, and not executed in fallthru.  */
743
        tcg_out32(s, op | reassemble_12(val));
744
        tcg_out_nop(s);
745
    } else {
746
        tcg_out_reloc(s, s->code_ptr, R_PARISC_PCREL12F, label_index, 0);
747
        /* ??? Assume that all branches to undefined labels are forward.
748
           Which means that if the nul bit is set, the delay slot is
749
           not executed if the branch is taken, which is what we want.  */
750
        tcg_out32(s, op | 2);
751
    }
752
}
753

    
754
static void tcg_out_comclr(TCGContext *s, int cond, TCGArg ret,
755
                           TCGArg c1, TCGArg c2, int c2const)
756
{
757
    int op, pacond;
758

    
759
    /* Note that COMICLR operates as if the immediate is the first
760
       operand.  We model setcond with the immediate in the second
761
       to better match what targets are likely to give us.  For
762
       consistency, model COMCLR with reversed operands as well.  */
763
    pacond = tcg_cond_to_cmp_cond[tcg_swap_cond(cond)];
764

    
765
    if (c2const) {
766
        op = INSN_COMICLR | INSN_R2(c1) | INSN_R1(ret) | INSN_IM11(c2);
767
    } else {
768
        op = INSN_COMCLR | INSN_R2(c1) | INSN_R1(c2) | INSN_T(ret);
769
    }
770
    op |= INSN_COND(pacond & 7);
771
    op |= pacond & COND_FALSE ? 1 << 12 : 0;
772

    
773
    tcg_out32(s, op);
774
}
775

    
776
static void tcg_out_brcond2(TCGContext *s, int cond, TCGArg al, TCGArg ah,
777
                            TCGArg bl, int blconst, TCGArg bh, int bhconst,
778
                            int label_index)
779
{
780
    switch (cond) {
781
    case TCG_COND_EQ:
782
    case TCG_COND_NE:
783
        tcg_out_comclr(s, tcg_invert_cond(cond), TCG_REG_R0, al, bl, blconst);
784
        tcg_out_brcond(s, cond, ah, bh, bhconst, label_index);
785
        break;
786

    
787
    default:
788
        tcg_out_brcond(s, cond, ah, bh, bhconst, label_index);
789
        tcg_out_comclr(s, TCG_COND_NE, TCG_REG_R0, ah, bh, bhconst);
790
        tcg_out_brcond(s, tcg_unsigned_cond(cond),
791
                       al, bl, blconst, label_index);
792
        break;
793
    }
794
}
795

    
796
static void tcg_out_setcond(TCGContext *s, int cond, TCGArg ret,
797
                            TCGArg c1, TCGArg c2, int c2const)
798
{
799
    tcg_out_comclr(s, tcg_invert_cond(cond), ret, c1, c2, c2const);
800
    tcg_out_movi(s, TCG_TYPE_I32, ret, 1);
801
}
802

    
803
static void tcg_out_setcond2(TCGContext *s, int cond, TCGArg ret,
804
                             TCGArg al, TCGArg ah, TCGArg bl, int blconst,
805
                             TCGArg bh, int bhconst)
806
{
807
    int scratch = TCG_REG_R20;
808

    
809
    if (ret != al && ret != ah
810
        && (blconst || ret != bl)
811
        && (bhconst || ret != bh)) {
812
        scratch = ret;
813
    }
814

    
815
    switch (cond) {
816
    case TCG_COND_EQ:
817
    case TCG_COND_NE:
818
        tcg_out_setcond(s, cond, scratch, al, bl, blconst);
819
        tcg_out_comclr(s, TCG_COND_EQ, TCG_REG_R0, ah, bh, bhconst);
820
        tcg_out_movi(s, TCG_TYPE_I32, scratch, cond == TCG_COND_NE);
821
        break;
822

    
823
    default:
824
        tcg_out_setcond(s, tcg_unsigned_cond(cond), scratch, al, bl, blconst);
825
        tcg_out_comclr(s, TCG_COND_EQ, TCG_REG_R0, ah, bh, bhconst);
826
        tcg_out_movi(s, TCG_TYPE_I32, scratch, 0);
827
        tcg_out_comclr(s, cond, TCG_REG_R0, ah, bh, bhconst);
828
        tcg_out_movi(s, TCG_TYPE_I32, scratch, 1);
829
        break;
830
    }
831

    
832
    tcg_out_mov(s, ret, scratch);
833
}
834

    
835
#if defined(CONFIG_SOFTMMU)
836
#include "../../softmmu_defs.h"
837

    
838
static void *qemu_ld_helpers[4] = {
839
    __ldb_mmu,
840
    __ldw_mmu,
841
    __ldl_mmu,
842
    __ldq_mmu,
843
};
844

    
845
static void *qemu_st_helpers[4] = {
846
    __stb_mmu,
847
    __stw_mmu,
848
    __stl_mmu,
849
    __stq_mmu,
850
};
851

    
852
/* Load and compare a TLB entry, and branch if TLB miss.  OFFSET is set to
853
   the offset of the first ADDR_READ or ADDR_WRITE member of the appropriate
854
   TLB for the memory index.  The return value is the offset from ENV
855
   contained in R1 afterward (to be used when loading ADDEND); if the
856
   return value is 0, R1 is not used.  */
857

    
858
static int tcg_out_tlb_read(TCGContext *s, int r0, int r1, int addrlo,
859
                            int addrhi, int s_bits, int lab_miss, int offset)
860
{
861
    int ret;
862

    
863
    /* Extracting the index into the TLB.  The "normal C operation" is
864
          r1 = addr_reg >> TARGET_PAGE_BITS;
865
          r1 &= CPU_TLB_SIZE - 1;
866
          r1 <<= CPU_TLB_ENTRY_BITS;
867
       What this does is extract CPU_TLB_BITS beginning at TARGET_PAGE_BITS
868
       and place them at CPU_TLB_ENTRY_BITS.  We can combine the first two
869
       operations with an EXTRU.  Unfortunately, the current value of
870
       CPU_TLB_ENTRY_BITS is > 3, so we can't merge that shift with the
871
       add that follows.  */
872
    tcg_out_extr(s, r1, addrlo, TARGET_PAGE_BITS, CPU_TLB_BITS, 0);
873
    tcg_out_andi(s, r0, addrlo, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
874
    tcg_out_shli(s, r1, r1, CPU_TLB_ENTRY_BITS);
875
    tcg_out_arith(s, r1, r1, TCG_AREG0, INSN_ADDL);
876

    
877
    /* Make sure that both the addr_{read,write} and addend can be
878
       read with a 14-bit offset from the same base register.  */
879
    if (check_fit_tl(offset + CPU_TLB_SIZE, 14)) {
880
        ret = 0;
881
    } else {
882
        ret = (offset + 0x400) & ~0x7ff;
883
        offset = ret - offset;
884
        tcg_out_addi2(s, TCG_REG_R1, r1, ret);
885
        r1 = TCG_REG_R1;
886
    }
887

    
888
    /* Load the entry from the computed slot.  */
889
    if (TARGET_LONG_BITS == 64) {
890
        tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_R23, r1, offset);
891
        tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_R20, r1, offset + 4);
892
    } else {
893
        tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_R20, r1, offset);
894
    }
895

    
896
    /* If not equal, jump to lab_miss. */
897
    if (TARGET_LONG_BITS == 64) {
898
        tcg_out_brcond2(s, TCG_COND_NE, TCG_REG_R20, TCG_REG_R23,
899
                        r0, 0, addrhi, 0, lab_miss);
900
    } else {
901
        tcg_out_brcond(s, TCG_COND_NE, TCG_REG_R20, r0, 0, lab_miss);
902
    }
903

    
904
    return ret;
905
}
906
#endif
907

    
908
static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args, int opc)
909
{
910
    int addr_reg, addr_reg2;
911
    int data_reg, data_reg2;
912
    int r0, r1, mem_index, s_bits, bswap;
913
    tcg_target_long offset;
914
#if defined(CONFIG_SOFTMMU)
915
    int lab1, lab2, argreg;
916
#endif
917

    
918
    data_reg = *args++;
919
    data_reg2 = (opc == 3 ? *args++ : TCG_REG_R0);
920
    addr_reg = *args++;
921
    addr_reg2 = (TARGET_LONG_BITS == 64 ? *args++ : TCG_REG_R0);
922
    mem_index = *args;
923
    s_bits = opc & 3;
924

    
925
    r0 = TCG_REG_R26;
926
    r1 = TCG_REG_R25;
927

    
928
#if defined(CONFIG_SOFTMMU)
929
    lab1 = gen_new_label();
930
    lab2 = gen_new_label();
931

    
932
    offset = tcg_out_tlb_read(s, r0, r1, addr_reg, addr_reg2, s_bits, lab1,
933
                              offsetof(CPUState,
934
                                       tlb_table[mem_index][0].addr_read));
935

    
936
    /* TLB Hit.  */
937
    tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_R20, (offset ? TCG_REG_R1 : r1),
938
               offsetof(CPUState, tlb_table[mem_index][0].addend) - offset);
939

    
940
    tcg_out_arith(s, r0, addr_reg, TCG_REG_R20, INSN_ADDL);
941
    offset = TCG_REG_R0;
942
#else
943
    r0 = addr_reg;
944
    offset = GUEST_BASE ? TCG_GUEST_BASE_REG : TCG_REG_R0;
945
#endif
946

    
947
#ifdef TARGET_WORDS_BIGENDIAN
948
    bswap = 0;
949
#else
950
    bswap = 1;
951
#endif
952
    switch (opc) {
953
    case 0:
954
        tcg_out_ldst_index(s, data_reg, r0, offset, INSN_LDBX);
955
        break;
956
    case 0 | 4:
957
        tcg_out_ldst_index(s, data_reg, r0, offset, INSN_LDBX);
958
        tcg_out_ext8s(s, data_reg, data_reg);
959
        break;
960
    case 1:
961
        tcg_out_ldst_index(s, data_reg, r0, offset, INSN_LDHX);
962
        if (bswap) {
963
            tcg_out_bswap16(s, data_reg, data_reg, 0);
964
        }
965
        break;
966
    case 1 | 4:
967
        tcg_out_ldst_index(s, data_reg, r0, offset, INSN_LDHX);
968
        if (bswap) {
969
            tcg_out_bswap16(s, data_reg, data_reg, 1);
970
        } else {
971
            tcg_out_ext16s(s, data_reg, data_reg);
972
        }
973
        break;
974
    case 2:
975
        tcg_out_ldst_index(s, data_reg, r0, offset, INSN_LDWX);
976
        if (bswap) {
977
            tcg_out_bswap32(s, data_reg, data_reg, TCG_REG_R20);
978
        }
979
        break;
980
    case 3:
981
        if (bswap) {
982
            int t = data_reg2;
983
            data_reg2 = data_reg;
984
            data_reg = t;
985
        }
986
        if (offset == TCG_REG_R0) {
987
            /* Make sure not to clobber the base register.  */
988
            if (data_reg2 == r0) {
989
                tcg_out_ldst(s, data_reg, r0, 4, INSN_LDW);
990
                tcg_out_ldst(s, data_reg2, r0, 0, INSN_LDW);
991
            } else {
992
                tcg_out_ldst(s, data_reg2, r0, 0, INSN_LDW);
993
                tcg_out_ldst(s, data_reg, r0, 4, INSN_LDW);
994
            }
995
        } else {
996
            tcg_out_addi2(s, TCG_REG_R20, r0, 4);
997
            tcg_out_ldst_index(s, data_reg2, r0, offset, INSN_LDWX);
998
            tcg_out_ldst_index(s, data_reg, TCG_REG_R20, offset, INSN_LDWX);
999
        }
1000
        if (bswap) {
1001
            tcg_out_bswap32(s, data_reg, data_reg, TCG_REG_R20);
1002
            tcg_out_bswap32(s, data_reg2, data_reg2, TCG_REG_R20);
1003
        }
1004
        break;
1005
    default:
1006
        tcg_abort();
1007
    }
1008

    
1009
#if defined(CONFIG_SOFTMMU)
1010
    tcg_out_branch(s, lab2, 1);
1011

    
1012
    /* TLB Miss.  */
1013
    /* label1: */
1014
    tcg_out_label(s, lab1, (tcg_target_long)s->code_ptr);
1015

    
1016
    argreg = TCG_REG_R26;
1017
    tcg_out_mov(s, argreg--, addr_reg);
1018
    if (TARGET_LONG_BITS == 64) {
1019
        tcg_out_mov(s, argreg--, addr_reg2);
1020
    }
1021
    tcg_out_movi(s, TCG_TYPE_I32, argreg, mem_index);
1022

    
1023
    tcg_out_call(s, qemu_ld_helpers[s_bits]);
1024

    
1025
    switch (opc) {
1026
    case 0:
1027
        tcg_out_andi(s, data_reg, TCG_REG_RET0, 0xff);
1028
        break;
1029
    case 0 | 4:
1030
        tcg_out_ext8s(s, data_reg, TCG_REG_RET0);
1031
        break;
1032
    case 1:
1033
        tcg_out_andi(s, data_reg, TCG_REG_RET0, 0xffff);
1034
        break;
1035
    case 1 | 4:
1036
        tcg_out_ext16s(s, data_reg, TCG_REG_RET0);
1037
        break;
1038
    case 2:
1039
    case 2 | 4:
1040
        tcg_out_mov(s, data_reg, TCG_REG_RET0);
1041
        break;
1042
    case 3:
1043
        tcg_out_mov(s, data_reg, TCG_REG_RET0);
1044
        tcg_out_mov(s, data_reg2, TCG_REG_RET1);
1045
        break;
1046
    default:
1047
        tcg_abort();
1048
    }
1049

    
1050
    /* label2: */
1051
    tcg_out_label(s, lab2, (tcg_target_long)s->code_ptr);
1052
#endif
1053
}
1054

    
1055
static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args, int opc)
1056
{
1057
    int addr_reg, addr_reg2;
1058
    int data_reg, data_reg2;
1059
    int r0, r1, mem_index, s_bits, bswap;
1060
#if defined(CONFIG_SOFTMMU)
1061
    tcg_target_long offset;
1062
    int lab1, lab2, argreg;
1063
#endif
1064

    
1065
    data_reg = *args++;
1066
    data_reg2 = (opc == 3 ? *args++ : 0);
1067
    addr_reg = *args++;
1068
    addr_reg2 = (TARGET_LONG_BITS == 64 ? *args++ : 0);
1069
    mem_index = *args;
1070
    s_bits = opc;
1071

    
1072
    r0 = TCG_REG_R26;
1073
    r1 = TCG_REG_R25;
1074

    
1075
#if defined(CONFIG_SOFTMMU)
1076
    lab1 = gen_new_label();
1077
    lab2 = gen_new_label();
1078

    
1079
    offset = tcg_out_tlb_read(s, r0, r1, addr_reg, addr_reg2, s_bits, lab1,
1080
                              offsetof(CPUState,
1081
                                       tlb_table[mem_index][0].addr_write));
1082

    
1083
    /* TLB Hit.  */
1084
    tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_R20, (offset ? TCG_REG_R1 : r1),
1085
               offsetof(CPUState, tlb_table[mem_index][0].addend) - offset);
1086

    
1087
    tcg_out_arith(s, r0, addr_reg, TCG_REG_R20, INSN_ADDL);
1088
#else
1089
    /* There are no indexed stores, so if GUEST_BASE is set
1090
       we must do the add explicitly.  Careful to avoid R20,
1091
       which is used for the bswaps to follow.  */
1092
    if (GUEST_BASE == 0) {
1093
        r0 = addr_reg;
1094
    } else {
1095
        tcg_out_arith(s, TCG_REG_R31, addr_reg, TCG_GUEST_BASE_REG, INSN_ADDL);
1096
        r0 = TCG_REG_R31;
1097
    }
1098
#endif
1099

    
1100
#ifdef TARGET_WORDS_BIGENDIAN
1101
    bswap = 0;
1102
#else
1103
    bswap = 1;
1104
#endif
1105
    switch (opc) {
1106
    case 0:
1107
        tcg_out_ldst(s, data_reg, r0, 0, INSN_STB);
1108
        break;
1109
    case 1:
1110
        if (bswap) {
1111
            tcg_out_bswap16(s, TCG_REG_R20, data_reg, 0);
1112
            data_reg = TCG_REG_R20;
1113
        }
1114
        tcg_out_ldst(s, data_reg, r0, 0, INSN_STH);
1115
        break;
1116
    case 2:
1117
        if (bswap) {
1118
            tcg_out_bswap32(s, TCG_REG_R20, data_reg, TCG_REG_R20);
1119
            data_reg = TCG_REG_R20;
1120
        }
1121
        tcg_out_ldst(s, data_reg, r0, 0, INSN_STW);
1122
        break;
1123
    case 3:
1124
        if (bswap) {
1125
            tcg_out_bswap32(s, TCG_REG_R20, data_reg, TCG_REG_R20);
1126
            tcg_out_bswap32(s, TCG_REG_R23, data_reg2, TCG_REG_R23);
1127
            data_reg2 = TCG_REG_R20;
1128
            data_reg = TCG_REG_R23;
1129
        }
1130
        tcg_out_ldst(s, data_reg2, r0, 0, INSN_STW);
1131
        tcg_out_ldst(s, data_reg, r0, 4, INSN_STW);
1132
        break;
1133
    default:
1134
        tcg_abort();
1135
    }
1136

    
1137
#if defined(CONFIG_SOFTMMU)
1138
    tcg_out_branch(s, lab2, 1);
1139

    
1140
    /* TLB Miss.  */
1141
    /* label1: */
1142
    tcg_out_label(s, lab1, (tcg_target_long)s->code_ptr);
1143

    
1144
    argreg = TCG_REG_R26;
1145
    tcg_out_mov(s, argreg--, addr_reg);
1146
    if (TARGET_LONG_BITS == 64) {
1147
        tcg_out_mov(s, argreg--, addr_reg2);
1148
    }
1149

    
1150
    switch(opc) {
1151
    case 0:
1152
        tcg_out_andi(s, argreg--, data_reg, 0xff);
1153
        tcg_out_movi(s, TCG_TYPE_I32, argreg, mem_index);
1154
        break;
1155
    case 1:
1156
        tcg_out_andi(s, argreg--, data_reg, 0xffff);
1157
        tcg_out_movi(s, TCG_TYPE_I32, argreg, mem_index);
1158
        break;
1159
    case 2:
1160
        tcg_out_mov(s, argreg--, data_reg);
1161
        tcg_out_movi(s, TCG_TYPE_I32, argreg, mem_index);
1162
        break;
1163
    case 3:
1164
        /* Because of the alignment required by the 64-bit data argument,
1165
           we will always use R23/R24.  Also, we will always run out of
1166
           argument registers for storing mem_index, so that will have
1167
           to go on the stack.  */
1168
        if (mem_index == 0) {
1169
            argreg = TCG_REG_R0;
1170
        } else {
1171
            argreg = TCG_REG_R20;
1172
            tcg_out_movi(s, TCG_TYPE_I32, argreg, mem_index);
1173
        }
1174
        tcg_out_mov(s, TCG_REG_R23, data_reg2);
1175
        tcg_out_mov(s, TCG_REG_R24, data_reg);
1176
        tcg_out_st(s, TCG_TYPE_I32, argreg, TCG_REG_SP,
1177
                   TCG_TARGET_CALL_STACK_OFFSET - 4);
1178
        break;
1179
    default:
1180
        tcg_abort();
1181
    }
1182

    
1183
    tcg_out_call(s, qemu_st_helpers[s_bits]);
1184

    
1185
    /* label2: */
1186
    tcg_out_label(s, lab2, (tcg_target_long)s->code_ptr);
1187
#endif
1188
}
1189

    
1190
static void tcg_out_exit_tb(TCGContext *s, TCGArg arg)
1191
{
1192
    if (!check_fit_tl(arg, 14)) {
1193
        uint32_t hi, lo;
1194
        hi = arg & ~0x7ff;
1195
        lo = arg & 0x7ff;
1196
        if (lo) {
1197
            tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_RET0, hi);
1198
            tcg_out32(s, INSN_BV | INSN_R2(TCG_REG_R18));
1199
            tcg_out_addi(s, TCG_REG_RET0, lo);
1200
            return;
1201
        }
1202
        arg = hi;
1203
    }
1204
    tcg_out32(s, INSN_BV | INSN_R2(TCG_REG_R18));
1205
    tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_RET0, arg);
1206
}
1207

    
1208
static void tcg_out_goto_tb(TCGContext *s, TCGArg arg)
1209
{
1210
    if (s->tb_jmp_offset) {
1211
        /* direct jump method */
1212
        fprintf(stderr, "goto_tb direct\n");
1213
        tcg_abort();
1214
    } else {
1215
        /* indirect jump method */
1216
        tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_R20, TCG_REG_R0,
1217
                   (tcg_target_long)(s->tb_next + arg));
1218
        tcg_out32(s, INSN_BV_N | INSN_R2(TCG_REG_R20));
1219
    }
1220
    s->tb_next_offset[arg] = s->code_ptr - s->code_buf;
1221
}
1222

    
1223
static inline void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
1224
                              const int *const_args)
1225
{
1226
    switch (opc) {
1227
    case INDEX_op_exit_tb:
1228
        tcg_out_exit_tb(s, args[0]);
1229
        break;
1230
    case INDEX_op_goto_tb:
1231
        tcg_out_goto_tb(s, args[0]);
1232
        break;
1233

    
1234
    case INDEX_op_call:
1235
        if (const_args[0]) {
1236
            tcg_out_call(s, (void *)args[0]);
1237
        } else {
1238
            tcg_out32(s, INSN_BLE_SR4 | INSN_R2(args[0]));
1239
            tcg_out_mov(s, TCG_REG_RP, TCG_REG_R31);
1240
        }
1241
        break;
1242

    
1243
    case INDEX_op_jmp:
1244
        fprintf(stderr, "unimplemented jmp\n");
1245
        tcg_abort();
1246
        break;
1247

    
1248
    case INDEX_op_br:
1249
        tcg_out_branch(s, args[0], 1);
1250
        break;
1251

    
1252
    case INDEX_op_movi_i32:
1253
        tcg_out_movi(s, TCG_TYPE_I32, args[0], (uint32_t)args[1]);
1254
        break;
1255

    
1256
    case INDEX_op_ld8u_i32:
1257
        tcg_out_ldst(s, args[0], args[1], args[2], INSN_LDB);
1258
        break;
1259
    case INDEX_op_ld8s_i32:
1260
        tcg_out_ldst(s, args[0], args[1], args[2], INSN_LDB);
1261
        tcg_out_ext8s(s, args[0], args[0]);
1262
        break;
1263
    case INDEX_op_ld16u_i32:
1264
        tcg_out_ldst(s, args[0], args[1], args[2], INSN_LDH);
1265
        break;
1266
    case INDEX_op_ld16s_i32:
1267
        tcg_out_ldst(s, args[0], args[1], args[2], INSN_LDH);
1268
        tcg_out_ext16s(s, args[0], args[0]);
1269
        break;
1270
    case INDEX_op_ld_i32:
1271
        tcg_out_ldst(s, args[0], args[1], args[2], INSN_LDW);
1272
        break;
1273

    
1274
    case INDEX_op_st8_i32:
1275
        tcg_out_ldst(s, args[0], args[1], args[2], INSN_STB);
1276
        break;
1277
    case INDEX_op_st16_i32:
1278
        tcg_out_ldst(s, args[0], args[1], args[2], INSN_STH);
1279
        break;
1280
    case INDEX_op_st_i32:
1281
        tcg_out_ldst(s, args[0], args[1], args[2], INSN_STW);
1282
        break;
1283

    
1284
    case INDEX_op_add_i32:
1285
        if (const_args[2]) {
1286
            tcg_out_addi2(s, args[0], args[1], args[2]);
1287
        } else {
1288
            tcg_out_arith(s, args[0], args[1], args[2], INSN_ADDL);
1289
        }
1290
        break;
1291

    
1292
    case INDEX_op_sub_i32:
1293
        if (const_args[1]) {
1294
            if (const_args[2]) {
1295
                tcg_out_movi(s, TCG_TYPE_I32, args[0], args[1] - args[2]);
1296
            } else {
1297
                /* Recall that SUBI is a reversed subtract.  */
1298
                tcg_out_arithi(s, args[0], args[2], args[1], INSN_SUBI);
1299
            }
1300
        } else if (const_args[2]) {
1301
            tcg_out_addi2(s, args[0], args[1], -args[2]);
1302
        } else {
1303
            tcg_out_arith(s, args[0], args[1], args[2], INSN_SUB);
1304
        }
1305
        break;
1306

    
1307
    case INDEX_op_and_i32:
1308
        if (const_args[2]) {
1309
            tcg_out_andi(s, args[0], args[1], args[2]);
1310
        } else {
1311
            tcg_out_arith(s, args[0], args[1], args[2], INSN_AND);
1312
        }
1313
        break;
1314

    
1315
    case INDEX_op_or_i32:
1316
        if (const_args[2]) {
1317
            tcg_out_ori(s, args[0], args[1], args[2]);
1318
        } else {
1319
            tcg_out_arith(s, args[0], args[1], args[2], INSN_OR);
1320
        }
1321
        break;
1322

    
1323
    case INDEX_op_xor_i32:
1324
        tcg_out_arith(s, args[0], args[1], args[2], INSN_XOR);
1325
        break;
1326

    
1327
    case INDEX_op_andc_i32:
1328
        if (const_args[2]) {
1329
            tcg_out_andi(s, args[0], args[1], ~args[2]);
1330
        } else {
1331
            tcg_out_arith(s, args[0], args[1], args[2], INSN_ANDCM);
1332
        }
1333
        break;
1334

    
1335
    case INDEX_op_shl_i32:
1336
        if (const_args[2]) {
1337
            tcg_out_shli(s, args[0], args[1], args[2]);
1338
        } else {
1339
            tcg_out_shl(s, args[0], args[1], args[2]);
1340
        }
1341
        break;
1342

    
1343
    case INDEX_op_shr_i32:
1344
        if (const_args[2]) {
1345
            tcg_out_shri(s, args[0], args[1], args[2]);
1346
        } else {
1347
            tcg_out_shr(s, args[0], args[1], args[2]);
1348
        }
1349
        break;
1350

    
1351
    case INDEX_op_sar_i32:
1352
        if (const_args[2]) {
1353
            tcg_out_sari(s, args[0], args[1], args[2]);
1354
        } else {
1355
            tcg_out_sar(s, args[0], args[1], args[2]);
1356
        }
1357
        break;
1358

    
1359
    case INDEX_op_rotl_i32:
1360
        if (const_args[2]) {
1361
            tcg_out_rotli(s, args[0], args[1], args[2]);
1362
        } else {
1363
            tcg_out_rotl(s, args[0], args[1], args[2]);
1364
        }
1365
        break;
1366

    
1367
    case INDEX_op_rotr_i32:
1368
        if (const_args[2]) {
1369
            tcg_out_rotri(s, args[0], args[1], args[2]);
1370
        } else {
1371
            tcg_out_rotr(s, args[0], args[1], args[2]);
1372
        }
1373
        break;
1374

    
1375
    case INDEX_op_mul_i32:
1376
        tcg_out_xmpyu(s, args[0], TCG_REG_R0, args[1], args[2]);
1377
        break;
1378
    case INDEX_op_mulu2_i32:
1379
        tcg_out_xmpyu(s, args[0], args[1], args[2], args[3]);
1380
        break;
1381

    
1382
    case INDEX_op_bswap16_i32:
1383
        tcg_out_bswap16(s, args[0], args[1], 0);
1384
        break;
1385
    case INDEX_op_bswap32_i32:
1386
        tcg_out_bswap32(s, args[0], args[1], TCG_REG_R20);
1387
        break;
1388

    
1389
    case INDEX_op_not_i32:
1390
        tcg_out_arithi(s, args[0], args[1], -1, INSN_SUBI);
1391
        break;
1392
    case INDEX_op_ext8s_i32:
1393
        tcg_out_ext8s(s, args[0], args[1]);
1394
        break;
1395
    case INDEX_op_ext16s_i32:
1396
        tcg_out_ext16s(s, args[0], args[1]);
1397
        break;
1398

    
1399
    /* These three correspond exactly to the fallback implementation.
1400
       But by including them we reduce the number of TCG ops that
1401
       need to be generated, and these opcodes are fairly common.  */
1402
    case INDEX_op_neg_i32:
1403
        tcg_out_arith(s, args[0], TCG_REG_R0, args[1], INSN_SUB);
1404
        break;
1405
    case INDEX_op_ext8u_i32:
1406
        tcg_out_andi(s, args[0], args[1], 0xff);
1407
        break;
1408
    case INDEX_op_ext16u_i32:
1409
        tcg_out_andi(s, args[0], args[1], 0xffff);
1410
        break;
1411

    
1412
    case INDEX_op_brcond_i32:
1413
        tcg_out_brcond(s, args[2], args[0], args[1], const_args[1], args[3]);
1414
        break;
1415
    case INDEX_op_brcond2_i32:
1416
        tcg_out_brcond2(s, args[4], args[0], args[1],
1417
                        args[2], const_args[2],
1418
                        args[3], const_args[3], args[5]);
1419
        break;
1420

    
1421
    case INDEX_op_setcond_i32:
1422
        tcg_out_setcond(s, args[3], args[0], args[1], args[2], const_args[2]);
1423
        break;
1424
    case INDEX_op_setcond2_i32:
1425
        tcg_out_setcond2(s, args[5], args[0], args[1], args[2],
1426
                         args[3], const_args[3], args[4], const_args[4]);
1427
        break;
1428

    
1429
    case INDEX_op_add2_i32:
1430
        if (const_args[4]) {
1431
            tcg_out_arithi(s, args[0], args[2], args[4], INSN_ADDI);
1432
        } else {
1433
            tcg_out_arith(s, args[0], args[2], args[4], INSN_ADD);
1434
        }
1435
        tcg_out_arith(s, args[1], args[3], args[5], INSN_ADDC);
1436
        break;
1437

    
1438
    case INDEX_op_sub2_i32:
1439
        if (const_args[2]) {
1440
            /* Recall that SUBI is a reversed subtract.  */
1441
            tcg_out_arithi(s, args[0], args[4], args[2], INSN_SUBI);
1442
        } else {
1443
            tcg_out_arith(s, args[0], args[2], args[4], INSN_SUB);
1444
        }
1445
        tcg_out_arith(s, args[1], args[3], args[5], INSN_SUBB);
1446
        break;
1447

    
1448
    case INDEX_op_qemu_ld8u:
1449
        tcg_out_qemu_ld(s, args, 0);
1450
        break;
1451
    case INDEX_op_qemu_ld8s:
1452
        tcg_out_qemu_ld(s, args, 0 | 4);
1453
        break;
1454
    case INDEX_op_qemu_ld16u:
1455
        tcg_out_qemu_ld(s, args, 1);
1456
        break;
1457
    case INDEX_op_qemu_ld16s:
1458
        tcg_out_qemu_ld(s, args, 1 | 4);
1459
        break;
1460
    case INDEX_op_qemu_ld32:
1461
        tcg_out_qemu_ld(s, args, 2);
1462
        break;
1463
    case INDEX_op_qemu_ld64:
1464
        tcg_out_qemu_ld(s, args, 3);
1465
        break;
1466

    
1467
    case INDEX_op_qemu_st8:
1468
        tcg_out_qemu_st(s, args, 0);
1469
        break;
1470
    case INDEX_op_qemu_st16:
1471
        tcg_out_qemu_st(s, args, 1);
1472
        break;
1473
    case INDEX_op_qemu_st32:
1474
        tcg_out_qemu_st(s, args, 2);
1475
        break;
1476
    case INDEX_op_qemu_st64:
1477
        tcg_out_qemu_st(s, args, 3);
1478
        break;
1479

    
1480
    default:
1481
        fprintf(stderr, "unknown opcode 0x%x\n", opc);
1482
        tcg_abort();
1483
    }
1484
}
1485

    
1486
static const TCGTargetOpDef hppa_op_defs[] = {
1487
    { INDEX_op_exit_tb, { } },
1488
    { INDEX_op_goto_tb, { } },
1489

    
1490
    { INDEX_op_call, { "ri" } },
1491
    { INDEX_op_jmp, { "r" } },
1492
    { INDEX_op_br, { } },
1493

    
1494
    { INDEX_op_mov_i32, { "r", "r" } },
1495
    { INDEX_op_movi_i32, { "r" } },
1496

    
1497
    { INDEX_op_ld8u_i32, { "r", "r" } },
1498
    { INDEX_op_ld8s_i32, { "r", "r" } },
1499
    { INDEX_op_ld16u_i32, { "r", "r" } },
1500
    { INDEX_op_ld16s_i32, { "r", "r" } },
1501
    { INDEX_op_ld_i32, { "r", "r" } },
1502
    { INDEX_op_st8_i32, { "rZ", "r" } },
1503
    { INDEX_op_st16_i32, { "rZ", "r" } },
1504
    { INDEX_op_st_i32, { "rZ", "r" } },
1505

    
1506
    { INDEX_op_add_i32, { "r", "rZ", "ri" } },
1507
    { INDEX_op_sub_i32, { "r", "rI", "ri" } },
1508
    { INDEX_op_and_i32, { "r", "rZ", "ri" } },
1509
    { INDEX_op_or_i32, { "r", "rZ", "ri" } },
1510
    { INDEX_op_xor_i32, { "r", "rZ", "rZ" } },
1511
    { INDEX_op_andc_i32, { "r", "rZ", "ri" } },
1512

    
1513
    { INDEX_op_mul_i32, { "r", "r", "r" } },
1514
    { INDEX_op_mulu2_i32, { "r", "r", "r", "r" } },
1515

    
1516
    { INDEX_op_shl_i32, { "r", "r", "ri" } },
1517
    { INDEX_op_shr_i32, { "r", "r", "ri" } },
1518
    { INDEX_op_sar_i32, { "r", "r", "ri" } },
1519
    { INDEX_op_rotl_i32, { "r", "r", "ri" } },
1520
    { INDEX_op_rotr_i32, { "r", "r", "ri" } },
1521

    
1522
    { INDEX_op_bswap16_i32, { "r", "r" } },
1523
    { INDEX_op_bswap32_i32, { "r", "r" } },
1524
    { INDEX_op_neg_i32, { "r", "r" } },
1525
    { INDEX_op_not_i32, { "r", "r" } },
1526

    
1527
    { INDEX_op_ext8s_i32, { "r", "r" } },
1528
    { INDEX_op_ext8u_i32, { "r", "r" } },
1529
    { INDEX_op_ext16s_i32, { "r", "r" } },
1530
    { INDEX_op_ext16u_i32, { "r", "r" } },
1531

    
1532
    { INDEX_op_brcond_i32, { "rZ", "rJ" } },
1533
    { INDEX_op_brcond2_i32,  { "rZ", "rZ", "rJ", "rJ" } },
1534

    
1535
    { INDEX_op_setcond_i32, { "r", "rZ", "rI" } },
1536
    { INDEX_op_setcond2_i32, { "r", "rZ", "rZ", "rI", "rI" } },
1537

    
1538
    { INDEX_op_add2_i32, { "r", "r", "rZ", "rZ", "rI", "rZ" } },
1539
    { INDEX_op_sub2_i32, { "r", "r", "rI", "rZ", "rZ", "rZ" } },
1540

    
1541
#if TARGET_LONG_BITS == 32
1542
    { INDEX_op_qemu_ld8u, { "r", "L" } },
1543
    { INDEX_op_qemu_ld8s, { "r", "L" } },
1544
    { INDEX_op_qemu_ld16u, { "r", "L" } },
1545
    { INDEX_op_qemu_ld16s, { "r", "L" } },
1546
    { INDEX_op_qemu_ld32, { "r", "L" } },
1547
    { INDEX_op_qemu_ld64, { "r", "r", "L" } },
1548

    
1549
    { INDEX_op_qemu_st8, { "LZ", "L" } },
1550
    { INDEX_op_qemu_st16, { "LZ", "L" } },
1551
    { INDEX_op_qemu_st32, { "LZ", "L" } },
1552
    { INDEX_op_qemu_st64, { "LZ", "LZ", "L" } },
1553
#else
1554
    { INDEX_op_qemu_ld8u, { "r", "L", "L" } },
1555
    { INDEX_op_qemu_ld8s, { "r", "L", "L" } },
1556
    { INDEX_op_qemu_ld16u, { "r", "L", "L" } },
1557
    { INDEX_op_qemu_ld16s, { "r", "L", "L" } },
1558
    { INDEX_op_qemu_ld32, { "r", "L", "L" } },
1559
    { INDEX_op_qemu_ld64, { "r", "r", "L", "L" } },
1560

    
1561
    { INDEX_op_qemu_st8, { "LZ", "L", "L" } },
1562
    { INDEX_op_qemu_st16, { "LZ", "L", "L" } },
1563
    { INDEX_op_qemu_st32, { "LZ", "L", "L" } },
1564
    { INDEX_op_qemu_st64, { "LZ", "LZ", "L", "L" } },
1565
#endif
1566
    { -1 },
1567
};
1568

    
1569
static int tcg_target_callee_save_regs[] = {
1570
    /* R2, the return address register, is saved specially
1571
       in the caller's frame.  */
1572
    /* R3, the frame pointer, is not currently modified.  */
1573
    TCG_REG_R4,
1574
    TCG_REG_R5,
1575
    TCG_REG_R6,
1576
    TCG_REG_R7,
1577
    TCG_REG_R8,
1578
    TCG_REG_R9,
1579
    TCG_REG_R10,
1580
    TCG_REG_R11,
1581
    TCG_REG_R12,
1582
    TCG_REG_R13,
1583
    TCG_REG_R14,
1584
    TCG_REG_R15,
1585
    TCG_REG_R16,
1586
    /* R17 is the global env, so no need to save.  */
1587
    TCG_REG_R18
1588
};
1589

    
1590
void tcg_target_qemu_prologue(TCGContext *s)
1591
{
1592
    int frame_size, i;
1593

    
1594
    /* Allocate space for the fixed frame marker.  */
1595
    frame_size = -TCG_TARGET_CALL_STACK_OFFSET;
1596
    frame_size += TCG_TARGET_STATIC_CALL_ARGS_SIZE;
1597

    
1598
    /* Allocate space for the saved registers.  */
1599
    frame_size += ARRAY_SIZE(tcg_target_callee_save_regs) * 4;
1600

    
1601
    /* Align the allocated space.  */
1602
    frame_size = ((frame_size + TCG_TARGET_STACK_ALIGN - 1)
1603
                  & -TCG_TARGET_STACK_ALIGN);
1604

    
1605
    /* The return address is stored in the caller's frame.  */
1606
    tcg_out_st(s, TCG_TYPE_PTR, TCG_REG_RP, TCG_REG_SP, -20);
1607

    
1608
    /* Allocate stack frame, saving the first register at the same time.  */
1609
    tcg_out_ldst(s, tcg_target_callee_save_regs[0],
1610
                 TCG_REG_SP, frame_size, INSN_STWM);
1611

    
1612
    /* Save all callee saved registers.  */
1613
    for (i = 1; i < ARRAY_SIZE(tcg_target_callee_save_regs); i++) {
1614
        tcg_out_st(s, TCG_TYPE_PTR, tcg_target_callee_save_regs[i],
1615
                   TCG_REG_SP, -frame_size + i * 4);
1616
    }
1617

    
1618
    if (GUEST_BASE != 0) {
1619
        tcg_out_movi(s, TCG_TYPE_PTR, TCG_GUEST_BASE_REG, GUEST_BASE);
1620
    }
1621

    
1622
    /* Jump to TB, and adjust R18 to be the return address.  */
1623
    tcg_out32(s, INSN_BLE_SR4 | INSN_R2(TCG_REG_R26));
1624
    tcg_out_mov(s, TCG_REG_R18, TCG_REG_R31);
1625

    
1626
    /* Restore callee saved registers.  */
1627
    tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_RP, TCG_REG_SP, -frame_size - 20);
1628
    for (i = 1; i < ARRAY_SIZE(tcg_target_callee_save_regs); i++) {
1629
        tcg_out_ld(s, TCG_TYPE_PTR, tcg_target_callee_save_regs[i],
1630
                   TCG_REG_SP, -frame_size + i * 4);
1631
    }
1632

    
1633
    /* Deallocate stack frame and return.  */
1634
    tcg_out32(s, INSN_BV | INSN_R2(TCG_REG_RP));
1635
    tcg_out_ldst(s, tcg_target_callee_save_regs[0],
1636
                 TCG_REG_SP, -frame_size, INSN_LDWM);
1637
}
1638

    
1639
void tcg_target_init(TCGContext *s)
1640
{
1641
    tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xffffffff);
1642

    
1643
    tcg_regset_clear(tcg_target_call_clobber_regs);
1644
    tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R20);
1645
    tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R21);
1646
    tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R22);
1647
    tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R23);
1648
    tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R24);
1649
    tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R25);
1650
    tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R26);
1651
    tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_RET0);
1652
    tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_RET1);
1653

    
1654
    tcg_regset_clear(s->reserved_regs);
1655
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_R0);  /* hardwired to zero */
1656
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_R1);  /* addil target */
1657
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_RP);  /* link register */
1658
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_R3);  /* frame pointer */
1659
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_R18); /* return pointer */
1660
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_R19); /* clobbered w/o pic */
1661
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_R20); /* reserved */
1662
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_DP);  /* data pointer */
1663
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_SP);  /* stack pointer */
1664
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_R31); /* ble link reg */
1665
    if (GUEST_BASE != 0) {
1666
        tcg_regset_set_reg(s->reserved_regs, TCG_GUEST_BASE_REG);
1667
    }
1668

    
1669
    tcg_add_target_add_op_defs(hppa_op_defs);
1670
}