Statistics
| Branch: | Revision:

root / tcg / hppa / tcg-target.c @ a42bceec

History | View | Annotate | Download (53.1 kB)

1
/*
2
 * Tiny Code Generator for QEMU
3
 *
4
 * Copyright (c) 2008 Fabrice Bellard
5
 *
6
 * Permission is hereby granted, free of charge, to any person obtaining a copy
7
 * of this software and associated documentation files (the "Software"), to deal
8
 * in the Software without restriction, including without limitation the rights
9
 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10
 * copies of the Software, and to permit persons to whom the Software is
11
 * furnished to do so, subject to the following conditions:
12
 *
13
 * The above copyright notice and this permission notice shall be included in
14
 * all copies or substantial portions of the Software.
15
 *
16
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19
 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21
 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22
 * THE SOFTWARE.
23
 */
24

    
25
#ifndef NDEBUG
26
static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
27
    "%r0", "%r1", "%rp", "%r3", "%r4", "%r5", "%r6", "%r7",
28
    "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
29
    "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
30
    "%r24", "%r25", "%r26", "%dp", "%ret0", "%ret1", "%sp", "%r31",
31
};
32
#endif
33

    
34
/* This is an 8 byte temp slot in the stack frame.  */
35
#define STACK_TEMP_OFS -16
36

    
37
#ifdef CONFIG_USE_GUEST_BASE
38
#define TCG_GUEST_BASE_REG TCG_REG_R16
39
#else
40
#define TCG_GUEST_BASE_REG TCG_REG_R0
41
#endif
42

    
43
static const int tcg_target_reg_alloc_order[] = {
44
    TCG_REG_R4,
45
    TCG_REG_R5,
46
    TCG_REG_R6,
47
    TCG_REG_R7,
48
    TCG_REG_R8,
49
    TCG_REG_R9,
50
    TCG_REG_R10,
51
    TCG_REG_R11,
52
    TCG_REG_R12,
53
    TCG_REG_R13,
54

    
55
    TCG_REG_R17,
56
    TCG_REG_R14,
57
    TCG_REG_R15,
58
    TCG_REG_R16,
59

    
60
    TCG_REG_R26,
61
    TCG_REG_R25,
62
    TCG_REG_R24,
63
    TCG_REG_R23,
64

    
65
    TCG_REG_RET0,
66
    TCG_REG_RET1,
67
};
68

    
69
static const int tcg_target_call_iarg_regs[4] = {
70
    TCG_REG_R26,
71
    TCG_REG_R25,
72
    TCG_REG_R24,
73
    TCG_REG_R23,
74
};
75

    
76
static const int tcg_target_call_oarg_regs[2] = {
77
    TCG_REG_RET0,
78
    TCG_REG_RET1,
79
};
80

    
81
/* True iff val fits a signed field of width BITS.  */
82
static inline int check_fit_tl(tcg_target_long val, unsigned int bits)
83
{
84
    return (val << ((sizeof(tcg_target_long) * 8 - bits))
85
            >> (sizeof(tcg_target_long) * 8 - bits)) == val;
86
}
87

    
88
/* True iff depi can be used to compute (reg | MASK).
89
   Accept a bit pattern like:
90
      0....01....1
91
      1....10....0
92
      0..01..10..0
93
   Copied from gcc sources.  */
94
static inline int or_mask_p(tcg_target_ulong mask)
95
{
96
    if (mask == 0 || mask == -1) {
97
        return 0;
98
    }
99
    mask += mask & -mask;
100
    return (mask & (mask - 1)) == 0;
101
}
102

    
103
/* True iff depi or extru can be used to compute (reg & mask).
104
   Accept a bit pattern like these:
105
      0....01....1
106
      1....10....0
107
      1..10..01..1
108
   Copied from gcc sources.  */
109
static inline int and_mask_p(tcg_target_ulong mask)
110
{
111
    return or_mask_p(~mask);
112
}
113

    
114
static int low_sign_ext(int val, int len)
115
{
116
    return (((val << 1) & ~(-1u << len)) | ((val >> (len - 1)) & 1));
117
}
118

    
119
static int reassemble_12(int as12)
120
{
121
    return (((as12 & 0x800) >> 11) |
122
            ((as12 & 0x400) >> 8) |
123
            ((as12 & 0x3ff) << 3));
124
}
125

    
126
static int reassemble_17(int as17)
127
{
128
    return (((as17 & 0x10000) >> 16) |
129
            ((as17 & 0x0f800) << 5) |
130
            ((as17 & 0x00400) >> 8) |
131
            ((as17 & 0x003ff) << 3));
132
}
133

    
134
static int reassemble_21(int as21)
135
{
136
    return (((as21 & 0x100000) >> 20) |
137
            ((as21 & 0x0ffe00) >> 8) |
138
            ((as21 & 0x000180) << 7) |
139
            ((as21 & 0x00007c) << 14) |
140
            ((as21 & 0x000003) << 12));
141
}
142

    
143
/* ??? Bizzarely, there is no PCREL12F relocation type.  I guess all
144
   such relocations are simply fully handled by the assembler.  */
145
#define R_PARISC_PCREL12F  R_PARISC_NONE
146

    
147
static void patch_reloc(uint8_t *code_ptr, int type,
148
                        tcg_target_long value, tcg_target_long addend)
149
{
150
    uint32_t *insn_ptr = (uint32_t *)code_ptr;
151
    uint32_t insn = *insn_ptr;
152
    tcg_target_long pcrel;
153

    
154
    value += addend;
155
    pcrel = (value - ((tcg_target_long)code_ptr + 8)) >> 2;
156

    
157
    switch (type) {
158
    case R_PARISC_PCREL12F:
159
        assert(check_fit_tl(pcrel, 12));
160
        /* ??? We assume all patches are forward.  See tcg_out_brcond
161
           re setting the NUL bit on the branch and eliding the nop.  */
162
        assert(pcrel >= 0);
163
        insn &= ~0x1ffdu;
164
        insn |= reassemble_12(pcrel);
165
        break;
166
    case R_PARISC_PCREL17F:
167
        assert(check_fit_tl(pcrel, 17));
168
        insn &= ~0x1f1ffdu;
169
        insn |= reassemble_17(pcrel);
170
        break;
171
    default:
172
        tcg_abort();
173
    }
174

    
175
    *insn_ptr = insn;
176
}
177

    
178
/* maximum number of register used for input function arguments */
179
static inline int tcg_target_get_call_iarg_regs_count(int flags)
180
{
181
    return 4;
182
}
183

    
184
/* parse target specific constraints */
185
static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
186
{
187
    const char *ct_str;
188

    
189
    ct_str = *pct_str;
190
    switch (ct_str[0]) {
191
    case 'r':
192
        ct->ct |= TCG_CT_REG;
193
        tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
194
        break;
195
    case 'L': /* qemu_ld/st constraint */
196
        ct->ct |= TCG_CT_REG;
197
        tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
198
        tcg_regset_reset_reg(ct->u.regs, TCG_REG_R26);
199
        tcg_regset_reset_reg(ct->u.regs, TCG_REG_R25);
200
        tcg_regset_reset_reg(ct->u.regs, TCG_REG_R24);
201
        tcg_regset_reset_reg(ct->u.regs, TCG_REG_R23);
202
        break;
203
    case 'Z':
204
        ct->ct |= TCG_CT_CONST_0;
205
        break;
206
    case 'I':
207
        ct->ct |= TCG_CT_CONST_S11;
208
        break;
209
    case 'J':
210
        ct->ct |= TCG_CT_CONST_S5;
211
        break;
212
    case 'K':
213
        ct->ct |= TCG_CT_CONST_MS11;
214
        break;
215
    case 'M':
216
        ct->ct |= TCG_CT_CONST_AND;
217
        break;
218
    case 'O':
219
        ct->ct |= TCG_CT_CONST_OR;
220
        break;
221
    default:
222
        return -1;
223
    }
224
    ct_str++;
225
    *pct_str = ct_str;
226
    return 0;
227
}
228

    
229
/* test if a constant matches the constraint */
230
static int tcg_target_const_match(tcg_target_long val,
231
                                  const TCGArgConstraint *arg_ct)
232
{
233
    int ct = arg_ct->ct;
234
    if (ct & TCG_CT_CONST) {
235
        return 1;
236
    } else if (ct & TCG_CT_CONST_0) {
237
        return val == 0;
238
    } else if (ct & TCG_CT_CONST_S5) {
239
        return check_fit_tl(val, 5);
240
    } else if (ct & TCG_CT_CONST_S11) {
241
        return check_fit_tl(val, 11);
242
    } else if (ct & TCG_CT_CONST_MS11) {
243
        return check_fit_tl(-val, 11);
244
    } else if (ct & TCG_CT_CONST_AND) {
245
        return and_mask_p(val);
246
    } else if (ct & TCG_CT_CONST_OR) {
247
        return or_mask_p(val);
248
    }
249
    return 0;
250
}
251

    
252
#define INSN_OP(x)       ((x) << 26)
253
#define INSN_EXT3BR(x)   ((x) << 13)
254
#define INSN_EXT3SH(x)   ((x) << 10)
255
#define INSN_EXT4(x)     ((x) << 6)
256
#define INSN_EXT5(x)     (x)
257
#define INSN_EXT6(x)     ((x) << 6)
258
#define INSN_EXT7(x)     ((x) << 6)
259
#define INSN_EXT8A(x)    ((x) << 6)
260
#define INSN_EXT8B(x)    ((x) << 5)
261
#define INSN_T(x)        (x)
262
#define INSN_R1(x)       ((x) << 16)
263
#define INSN_R2(x)       ((x) << 21)
264
#define INSN_DEP_LEN(x)  (32 - (x))
265
#define INSN_SHDEP_CP(x) ((31 - (x)) << 5)
266
#define INSN_SHDEP_P(x)  ((x) << 5)
267
#define INSN_COND(x)     ((x) << 13)
268
#define INSN_IM11(x)     low_sign_ext(x, 11)
269
#define INSN_IM14(x)     low_sign_ext(x, 14)
270
#define INSN_IM5(x)      (low_sign_ext(x, 5) << 16)
271

    
272
#define COND_NEVER   0
273
#define COND_EQ      1
274
#define COND_LT      2
275
#define COND_LE      3
276
#define COND_LTU     4
277
#define COND_LEU     5
278
#define COND_SV      6
279
#define COND_OD      7
280
#define COND_FALSE   8
281

    
282
#define INSN_ADD        (INSN_OP(0x02) | INSN_EXT6(0x18))
283
#define INSN_ADDC        (INSN_OP(0x02) | INSN_EXT6(0x1c))
284
#define INSN_ADDI        (INSN_OP(0x2d))
285
#define INSN_ADDIL        (INSN_OP(0x0a))
286
#define INSN_ADDL        (INSN_OP(0x02) | INSN_EXT6(0x28))
287
#define INSN_AND        (INSN_OP(0x02) | INSN_EXT6(0x08))
288
#define INSN_ANDCM        (INSN_OP(0x02) | INSN_EXT6(0x00))
289
#define INSN_COMCLR        (INSN_OP(0x02) | INSN_EXT6(0x22))
290
#define INSN_COMICLR        (INSN_OP(0x24))
291
#define INSN_DEP        (INSN_OP(0x35) | INSN_EXT3SH(3))
292
#define INSN_DEPI        (INSN_OP(0x35) | INSN_EXT3SH(7))
293
#define INSN_EXTRS        (INSN_OP(0x34) | INSN_EXT3SH(7))
294
#define INSN_EXTRU        (INSN_OP(0x34) | INSN_EXT3SH(6))
295
#define INSN_LDIL        (INSN_OP(0x08))
296
#define INSN_LDO        (INSN_OP(0x0d))
297
#define INSN_MTCTL        (INSN_OP(0x00) | INSN_EXT8B(0xc2))
298
#define INSN_OR                (INSN_OP(0x02) | INSN_EXT6(0x09))
299
#define INSN_SHD        (INSN_OP(0x34) | INSN_EXT3SH(2))
300
#define INSN_SUB        (INSN_OP(0x02) | INSN_EXT6(0x10))
301
#define INSN_SUBB        (INSN_OP(0x02) | INSN_EXT6(0x14))
302
#define INSN_SUBI        (INSN_OP(0x25))
303
#define INSN_VEXTRS        (INSN_OP(0x34) | INSN_EXT3SH(5))
304
#define INSN_VEXTRU        (INSN_OP(0x34) | INSN_EXT3SH(4))
305
#define INSN_VSHD        (INSN_OP(0x34) | INSN_EXT3SH(0))
306
#define INSN_XOR        (INSN_OP(0x02) | INSN_EXT6(0x0a))
307
#define INSN_ZDEP        (INSN_OP(0x35) | INSN_EXT3SH(2))
308
#define INSN_ZVDEP        (INSN_OP(0x35) | INSN_EXT3SH(0))
309

    
310
#define INSN_BL         (INSN_OP(0x3a) | INSN_EXT3BR(0))
311
#define INSN_BL_N       (INSN_OP(0x3a) | INSN_EXT3BR(0) | 2)
312
#define INSN_BLR        (INSN_OP(0x3a) | INSN_EXT3BR(2))
313
#define INSN_BV         (INSN_OP(0x3a) | INSN_EXT3BR(6))
314
#define INSN_BV_N       (INSN_OP(0x3a) | INSN_EXT3BR(6) | 2)
315
#define INSN_BLE_SR4    (INSN_OP(0x39) | (1 << 13))
316

    
317
#define INSN_LDB        (INSN_OP(0x10))
318
#define INSN_LDH        (INSN_OP(0x11))
319
#define INSN_LDW        (INSN_OP(0x12))
320
#define INSN_LDWM       (INSN_OP(0x13))
321
#define INSN_FLDDS      (INSN_OP(0x0b) | INSN_EXT4(0) | (1 << 12))
322

    
323
#define INSN_LDBX        (INSN_OP(0x03) | INSN_EXT4(0))
324
#define INSN_LDHX        (INSN_OP(0x03) | INSN_EXT4(1))
325
#define INSN_LDWX       (INSN_OP(0x03) | INSN_EXT4(2))
326

    
327
#define INSN_STB        (INSN_OP(0x18))
328
#define INSN_STH        (INSN_OP(0x19))
329
#define INSN_STW        (INSN_OP(0x1a))
330
#define INSN_STWM       (INSN_OP(0x1b))
331
#define INSN_FSTDS      (INSN_OP(0x0b) | INSN_EXT4(8) | (1 << 12))
332

    
333
#define INSN_COMBT      (INSN_OP(0x20))
334
#define INSN_COMBF      (INSN_OP(0x22))
335
#define INSN_COMIBT     (INSN_OP(0x21))
336
#define INSN_COMIBF     (INSN_OP(0x23))
337

    
338
/* supplied by libgcc */
339
extern void *__canonicalize_funcptr_for_compare(void *);
340

    
341
static void tcg_out_mov(TCGContext *s, TCGType type, int ret, int arg)
342
{
343
    /* PA1.1 defines COPY as OR r,0,t; PA2.0 defines COPY as LDO 0(r),t
344
       but hppa-dis.c is unaware of this definition */
345
    if (ret != arg) {
346
        tcg_out32(s, INSN_OR | INSN_T(ret) | INSN_R1(arg)
347
                  | INSN_R2(TCG_REG_R0));
348
    }
349
}
350

    
351
static void tcg_out_movi(TCGContext *s, TCGType type,
352
                         int ret, tcg_target_long arg)
353
{
354
    if (check_fit_tl(arg, 14)) {
355
        tcg_out32(s, INSN_LDO | INSN_R1(ret)
356
                  | INSN_R2(TCG_REG_R0) | INSN_IM14(arg));
357
    } else {
358
        uint32_t hi, lo;
359
        hi = arg >> 11;
360
        lo = arg & 0x7ff;
361

    
362
        tcg_out32(s, INSN_LDIL | INSN_R2(ret) | reassemble_21(hi));
363
        if (lo) {
364
            tcg_out32(s, INSN_LDO | INSN_R1(ret)
365
                      | INSN_R2(ret) | INSN_IM14(lo));
366
        }
367
    }
368
}
369

    
370
static void tcg_out_ldst(TCGContext *s, int ret, int addr,
371
                         tcg_target_long offset, int op)
372
{
373
    if (!check_fit_tl(offset, 14)) {
374
        uint32_t hi, lo, op;
375

    
376
        hi = offset >> 11;
377
        lo = offset & 0x7ff;
378

    
379
        if (addr == TCG_REG_R0) {
380
            op = INSN_LDIL | INSN_R2(TCG_REG_R1);
381
        } else {
382
            op = INSN_ADDIL | INSN_R2(addr);
383
        }
384
        tcg_out32(s, op | reassemble_21(hi));
385

    
386
        addr = TCG_REG_R1;
387
        offset = lo;
388
    }
389

    
390
    if (ret != addr || offset != 0 || op != INSN_LDO) {
391
        tcg_out32(s, op | INSN_R1(ret) | INSN_R2(addr) | INSN_IM14(offset));
392
    }
393
}
394

    
395
/* This function is required by tcg.c.  */
396
static inline void tcg_out_ld(TCGContext *s, TCGType type, int ret,
397
                              int arg1, tcg_target_long arg2)
398
{
399
    tcg_out_ldst(s, ret, arg1, arg2, INSN_LDW);
400
}
401

    
402
/* This function is required by tcg.c.  */
403
static inline void tcg_out_st(TCGContext *s, TCGType type, int ret,
404
                              int arg1, tcg_target_long arg2)
405
{
406
    tcg_out_ldst(s, ret, arg1, arg2, INSN_STW);
407
}
408

    
409
static void tcg_out_ldst_index(TCGContext *s, int data,
410
                               int base, int index, int op)
411
{
412
    tcg_out32(s, op | INSN_T(data) | INSN_R1(index) | INSN_R2(base));
413
}
414

    
415
static inline void tcg_out_addi2(TCGContext *s, int ret, int arg1,
416
                                 tcg_target_long val)
417
{
418
    tcg_out_ldst(s, ret, arg1, val, INSN_LDO);
419
}
420

    
421
/* This function is required by tcg.c.  */
422
static inline void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
423
{
424
    tcg_out_addi2(s, reg, reg, val);
425
}
426

    
427
static inline void tcg_out_arith(TCGContext *s, int t, int r1, int r2, int op)
428
{
429
    tcg_out32(s, op | INSN_T(t) | INSN_R1(r1) | INSN_R2(r2));
430
}
431

    
432
static inline void tcg_out_arithi(TCGContext *s, int t, int r1,
433
                                  tcg_target_long val, int op)
434
{
435
    assert(check_fit_tl(val, 11));
436
    tcg_out32(s, op | INSN_R1(t) | INSN_R2(r1) | INSN_IM11(val));
437
}
438

    
439
static inline void tcg_out_nop(TCGContext *s)
440
{
441
    tcg_out_arith(s, TCG_REG_R0, TCG_REG_R0, TCG_REG_R0, INSN_OR);
442
}
443

    
444
static inline void tcg_out_mtctl_sar(TCGContext *s, int arg)
445
{
446
    tcg_out32(s, INSN_MTCTL | INSN_R2(11) | INSN_R1(arg));
447
}
448

    
449
/* Extract LEN bits at position OFS from ARG and place in RET.
450
   Note that here the bit ordering is reversed from the PA-RISC
451
   standard, such that the right-most bit is 0.  */
452
static inline void tcg_out_extr(TCGContext *s, int ret, int arg,
453
                                unsigned ofs, unsigned len, int sign)
454
{
455
    assert(ofs < 32 && len <= 32 - ofs);
456
    tcg_out32(s, (sign ? INSN_EXTRS : INSN_EXTRU)
457
              | INSN_R1(ret) | INSN_R2(arg)
458
              | INSN_SHDEP_P(31 - ofs) | INSN_DEP_LEN(len));
459
}
460

    
461
/* Likewise with OFS interpreted little-endian.  */
462
static inline void tcg_out_dep(TCGContext *s, int ret, int arg,
463
                               unsigned ofs, unsigned len)
464
{
465
    assert(ofs < 32 && len <= 32 - ofs);
466
    tcg_out32(s, INSN_DEP | INSN_R2(ret) | INSN_R1(arg)
467
              | INSN_SHDEP_CP(31 - ofs) | INSN_DEP_LEN(len));
468
}
469

    
470
static inline void tcg_out_shd(TCGContext *s, int ret, int hi, int lo,
471
                               unsigned count)
472
{
473
    assert(count < 32);
474
    tcg_out32(s, INSN_SHD | INSN_R1(hi) | INSN_R2(lo) | INSN_T(ret)
475
              | INSN_SHDEP_CP(count));
476
}
477

    
478
static void tcg_out_vshd(TCGContext *s, int ret, int hi, int lo, int creg)
479
{
480
    tcg_out_mtctl_sar(s, creg);
481
    tcg_out32(s, INSN_VSHD | INSN_T(ret) | INSN_R1(hi) | INSN_R2(lo));
482
}
483

    
484
static void tcg_out_ori(TCGContext *s, int ret, int arg, tcg_target_ulong m)
485
{
486
    int bs0, bs1;
487

    
488
    /* Note that the argument is constrained to match or_mask_p.  */
489
    for (bs0 = 0; bs0 < 32; bs0++) {
490
        if ((m & (1u << bs0)) != 0) {
491
            break;
492
        }
493
    }
494
    for (bs1 = bs0; bs1 < 32; bs1++) {
495
        if ((m & (1u << bs1)) == 0) {
496
            break;
497
        }
498
    }
499
    assert(bs1 == 32 || (1ul << bs1) > m);
500

    
501
    tcg_out_mov(s, TCG_TYPE_I32, ret, arg);
502
    tcg_out32(s, INSN_DEPI | INSN_R2(ret) | INSN_IM5(-1)
503
              | INSN_SHDEP_CP(31 - bs0) | INSN_DEP_LEN(bs1 - bs0));
504
}
505

    
506
static void tcg_out_andi(TCGContext *s, int ret, int arg, tcg_target_ulong m)
507
{
508
    int ls0, ls1, ms0;
509

    
510
    /* Note that the argument is constrained to match and_mask_p.  */
511
    for (ls0 = 0; ls0 < 32; ls0++) {
512
        if ((m & (1u << ls0)) == 0) {
513
            break;
514
        }
515
    }
516
    for (ls1 = ls0; ls1 < 32; ls1++) {
517
        if ((m & (1u << ls1)) != 0) {
518
            break;
519
        }
520
    }
521
    for (ms0 = ls1; ms0 < 32; ms0++) {
522
        if ((m & (1u << ms0)) == 0) {
523
            break;
524
        }
525
    }
526
    assert (ms0 == 32);
527

    
528
    if (ls1 == 32) {
529
        tcg_out_extr(s, ret, arg, 0, ls0, 0);
530
    } else {
531
        tcg_out_mov(s, TCG_TYPE_I32, ret, arg);
532
        tcg_out32(s, INSN_DEPI | INSN_R2(ret) | INSN_IM5(0)
533
                  | INSN_SHDEP_CP(31 - ls0) | INSN_DEP_LEN(ls1 - ls0));
534
    }
535
}
536

    
537
static inline void tcg_out_ext8s(TCGContext *s, int ret, int arg)
538
{
539
    tcg_out_extr(s, ret, arg, 0, 8, 1);
540
}
541

    
542
static inline void tcg_out_ext16s(TCGContext *s, int ret, int arg)
543
{
544
    tcg_out_extr(s, ret, arg, 0, 16, 1);
545
}
546

    
547
static void tcg_out_shli(TCGContext *s, int ret, int arg, int count)
548
{
549
    count &= 31;
550
    tcg_out32(s, INSN_ZDEP | INSN_R2(ret) | INSN_R1(arg)
551
              | INSN_SHDEP_CP(31 - count) | INSN_DEP_LEN(32 - count));
552
}
553

    
554
static void tcg_out_shl(TCGContext *s, int ret, int arg, int creg)
555
{
556
    tcg_out_arithi(s, TCG_REG_R20, creg, 31, INSN_SUBI);
557
    tcg_out_mtctl_sar(s, TCG_REG_R20);
558
    tcg_out32(s, INSN_ZVDEP | INSN_R2(ret) | INSN_R1(arg) | INSN_DEP_LEN(32));
559
}
560

    
561
static void tcg_out_shri(TCGContext *s, int ret, int arg, int count)
562
{
563
    count &= 31;
564
    tcg_out_extr(s, ret, arg, count, 32 - count, 0);
565
}
566

    
567
static void tcg_out_shr(TCGContext *s, int ret, int arg, int creg)
568
{
569
    tcg_out_vshd(s, ret, TCG_REG_R0, arg, creg);
570
}
571

    
572
static void tcg_out_sari(TCGContext *s, int ret, int arg, int count)
573
{
574
    count &= 31;
575
    tcg_out_extr(s, ret, arg, count, 32 - count, 1);
576
}
577

    
578
static void tcg_out_sar(TCGContext *s, int ret, int arg, int creg)
579
{
580
    tcg_out_arithi(s, TCG_REG_R20, creg, 31, INSN_SUBI);
581
    tcg_out_mtctl_sar(s, TCG_REG_R20);
582
    tcg_out32(s, INSN_VEXTRS | INSN_R1(ret) | INSN_R2(arg) | INSN_DEP_LEN(32));
583
}
584

    
585
static void tcg_out_rotli(TCGContext *s, int ret, int arg, int count)
586
{
587
    count &= 31;
588
    tcg_out_shd(s, ret, arg, arg, 32 - count);
589
}
590

    
591
static void tcg_out_rotl(TCGContext *s, int ret, int arg, int creg)
592
{
593
    tcg_out_arithi(s, TCG_REG_R20, creg, 32, INSN_SUBI);
594
    tcg_out_vshd(s, ret, arg, arg, TCG_REG_R20);
595
}
596

    
597
static void tcg_out_rotri(TCGContext *s, int ret, int arg, int count)
598
{
599
    count &= 31;
600
    tcg_out_shd(s, ret, arg, arg, count);
601
}
602

    
603
static void tcg_out_rotr(TCGContext *s, int ret, int arg, int creg)
604
{
605
    tcg_out_vshd(s, ret, arg, arg, creg);
606
}
607

    
608
static void tcg_out_bswap16(TCGContext *s, int ret, int arg, int sign)
609
{
610
    if (ret != arg) {
611
        tcg_out_mov(s, TCG_TYPE_I32, ret, arg); /* arg =  xxAB */
612
    }
613
    tcg_out_dep(s, ret, ret, 16, 8);          /* ret =  xBAB */
614
    tcg_out_extr(s, ret, ret, 8, 16, sign);   /* ret =  ..BA */
615
}
616

    
617
static void tcg_out_bswap32(TCGContext *s, int ret, int arg, int temp)
618
{
619
                                          /* arg =  ABCD */
620
    tcg_out_rotri(s, temp, arg, 16);      /* temp = CDAB */
621
    tcg_out_dep(s, temp, temp, 16, 8);    /* temp = CBAB */
622
    tcg_out_shd(s, ret, arg, temp, 8);    /* ret =  DCBA */
623
}
624

    
625
static void tcg_out_call(TCGContext *s, void *func)
626
{
627
    tcg_target_long val, hi, lo, disp;
628

    
629
    val = (uint32_t)__canonicalize_funcptr_for_compare(func);
630
    disp = (val - ((tcg_target_long)s->code_ptr + 8)) >> 2;
631

    
632
    if (check_fit_tl(disp, 17)) {
633
        tcg_out32(s, INSN_BL_N | INSN_R2(TCG_REG_RP) | reassemble_17(disp));
634
    } else {
635
        hi = val >> 11;
636
        lo = val & 0x7ff;
637

    
638
        tcg_out32(s, INSN_LDIL | INSN_R2(TCG_REG_R20) | reassemble_21(hi));
639
        tcg_out32(s, INSN_BLE_SR4 | INSN_R2(TCG_REG_R20)
640
                  | reassemble_17(lo >> 2));
641
        tcg_out_mov(s, TCG_TYPE_I32, TCG_REG_RP, TCG_REG_R31);
642
    }
643
}
644

    
645
static void tcg_out_xmpyu(TCGContext *s, int retl, int reth,
646
                          int arg1, int arg2)
647
{
648
    /* Store both words into the stack for copy to the FPU.  */
649
    tcg_out_ldst(s, arg1, TCG_REG_CALL_STACK, STACK_TEMP_OFS, INSN_STW);
650
    tcg_out_ldst(s, arg2, TCG_REG_CALL_STACK, STACK_TEMP_OFS + 4, INSN_STW);
651

    
652
    /* Load both words into the FPU at the same time.  We get away
653
       with this because we can address the left and right half of the
654
       FPU registers individually once loaded.  */
655
    /* fldds stack_temp(sp),fr22 */
656
    tcg_out32(s, INSN_FLDDS | INSN_R2(TCG_REG_CALL_STACK)
657
              | INSN_IM5(STACK_TEMP_OFS) | INSN_T(22));
658

    
659
    /* xmpyu fr22r,fr22,fr22 */
660
    tcg_out32(s, 0x3ad64796);
661

    
662
    /* Store the 64-bit result back into the stack.  */
663
    /* fstds stack_temp(sp),fr22 */
664
    tcg_out32(s, INSN_FSTDS | INSN_R2(TCG_REG_CALL_STACK)
665
              | INSN_IM5(STACK_TEMP_OFS) | INSN_T(22));
666

    
667
    /* Load the pieces of the result that the caller requested.  */
668
    if (reth) {
669
        tcg_out_ldst(s, reth, TCG_REG_CALL_STACK, STACK_TEMP_OFS, INSN_LDW);
670
    }
671
    if (retl) {
672
        tcg_out_ldst(s, retl, TCG_REG_CALL_STACK, STACK_TEMP_OFS + 4,
673
                     INSN_LDW);
674
    }
675
}
676

    
677
static void tcg_out_add2(TCGContext *s, int destl, int desth,
678
                         int al, int ah, int bl, int bh, int blconst)
679
{
680
    int tmp = (destl == ah || destl == bh ? TCG_REG_R20 : destl);
681

    
682
    if (blconst) {
683
        tcg_out_arithi(s, tmp, al, bl, INSN_ADDI);
684
    } else {
685
        tcg_out_arith(s, tmp, al, bl, INSN_ADD);
686
    }
687
    tcg_out_arith(s, desth, ah, bh, INSN_ADDC);
688

    
689
    tcg_out_mov(s, TCG_TYPE_I32, destl, tmp);
690
}
691

    
692
static void tcg_out_sub2(TCGContext *s, int destl, int desth, int al, int ah,
693
                         int bl, int bh, int alconst, int blconst)
694
{
695
    int tmp = (destl == ah || destl == bh ? TCG_REG_R20 : destl);
696

    
697
    if (alconst) {
698
        if (blconst) {
699
            tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_R20, bl);
700
            bl = TCG_REG_R20;
701
        }
702
        tcg_out_arithi(s, tmp, bl, al, INSN_SUBI);
703
    } else if (blconst) {
704
        tcg_out_arithi(s, tmp, al, -bl, INSN_ADDI);
705
    } else {
706
        tcg_out_arith(s, tmp, al, bl, INSN_SUB);
707
    }
708
    tcg_out_arith(s, desth, ah, bh, INSN_SUBB);
709

    
710
    tcg_out_mov(s, TCG_TYPE_I32, destl, tmp);
711
}
712

    
713
static void tcg_out_branch(TCGContext *s, int label_index, int nul)
714
{
715
    TCGLabel *l = &s->labels[label_index];
716
    uint32_t op = nul ? INSN_BL_N : INSN_BL;
717

    
718
    if (l->has_value) {
719
        tcg_target_long val = l->u.value;
720

    
721
        val -= (tcg_target_long)s->code_ptr + 8;
722
        val >>= 2;
723
        assert(check_fit_tl(val, 17));
724

    
725
        tcg_out32(s, op | reassemble_17(val));
726
    } else {
727
        /* We need to keep the offset unchanged for retranslation.  */
728
        uint32_t old_insn = *(uint32_t *)s->code_ptr;
729

    
730
        tcg_out_reloc(s, s->code_ptr, R_PARISC_PCREL17F, label_index, 0);
731
        tcg_out32(s, op | (old_insn & 0x1f1ffdu));
732
    }
733
}
734

    
735
static const uint8_t tcg_cond_to_cmp_cond[10] =
736
{
737
    [TCG_COND_EQ] = COND_EQ,
738
    [TCG_COND_NE] = COND_EQ | COND_FALSE,
739
    [TCG_COND_LT] = COND_LT,
740
    [TCG_COND_GE] = COND_LT | COND_FALSE,
741
    [TCG_COND_LE] = COND_LE,
742
    [TCG_COND_GT] = COND_LE | COND_FALSE,
743
    [TCG_COND_LTU] = COND_LTU,
744
    [TCG_COND_GEU] = COND_LTU | COND_FALSE,
745
    [TCG_COND_LEU] = COND_LEU,
746
    [TCG_COND_GTU] = COND_LEU | COND_FALSE,
747
};
748

    
749
static void tcg_out_brcond(TCGContext *s, int cond, TCGArg c1,
750
                           TCGArg c2, int c2const, int label_index)
751
{
752
    TCGLabel *l = &s->labels[label_index];
753
    int op, pacond;
754

    
755
    /* Note that COMIB operates as if the immediate is the first
756
       operand.  We model brcond with the immediate in the second
757
       to better match what targets are likely to give us.  For
758
       consistency, model COMB with reversed operands as well.  */
759
    pacond = tcg_cond_to_cmp_cond[tcg_swap_cond(cond)];
760

    
761
    if (c2const) {
762
        op = (pacond & COND_FALSE ? INSN_COMIBF : INSN_COMIBT);
763
        op |= INSN_IM5(c2);
764
    } else {
765
        op = (pacond & COND_FALSE ? INSN_COMBF : INSN_COMBT);
766
        op |= INSN_R1(c2);
767
    }
768
    op |= INSN_R2(c1);
769
    op |= INSN_COND(pacond & 7);
770

    
771
    if (l->has_value) {
772
        tcg_target_long val = l->u.value;
773

    
774
        val -= (tcg_target_long)s->code_ptr + 8;
775
        val >>= 2;
776
        assert(check_fit_tl(val, 12));
777

    
778
        /* ??? Assume that all branches to defined labels are backward.
779
           Which means that if the nul bit is set, the delay slot is
780
           executed if the branch is taken, and not executed in fallthru.  */
781
        tcg_out32(s, op | reassemble_12(val));
782
        tcg_out_nop(s);
783
    } else {
784
        /* We need to keep the offset unchanged for retranslation.  */
785
        uint32_t old_insn = *(uint32_t *)s->code_ptr;
786

    
787
        tcg_out_reloc(s, s->code_ptr, R_PARISC_PCREL12F, label_index, 0);
788
        /* ??? Assume that all branches to undefined labels are forward.
789
           Which means that if the nul bit is set, the delay slot is
790
           not executed if the branch is taken, which is what we want.  */
791
        tcg_out32(s, op | 2 | (old_insn & 0x1ffdu));
792
    }
793
}
794

    
795
static void tcg_out_comclr(TCGContext *s, int cond, TCGArg ret,
796
                           TCGArg c1, TCGArg c2, int c2const)
797
{
798
    int op, pacond;
799

    
800
    /* Note that COMICLR operates as if the immediate is the first
801
       operand.  We model setcond with the immediate in the second
802
       to better match what targets are likely to give us.  For
803
       consistency, model COMCLR with reversed operands as well.  */
804
    pacond = tcg_cond_to_cmp_cond[tcg_swap_cond(cond)];
805

    
806
    if (c2const) {
807
        op = INSN_COMICLR | INSN_R2(c1) | INSN_R1(ret) | INSN_IM11(c2);
808
    } else {
809
        op = INSN_COMCLR | INSN_R2(c1) | INSN_R1(c2) | INSN_T(ret);
810
    }
811
    op |= INSN_COND(pacond & 7);
812
    op |= pacond & COND_FALSE ? 1 << 12 : 0;
813

    
814
    tcg_out32(s, op);
815
}
816

    
817
static void tcg_out_brcond2(TCGContext *s, int cond, TCGArg al, TCGArg ah,
818
                            TCGArg bl, int blconst, TCGArg bh, int bhconst,
819
                            int label_index)
820
{
821
    switch (cond) {
822
    case TCG_COND_EQ:
823
    case TCG_COND_NE:
824
        tcg_out_comclr(s, tcg_invert_cond(cond), TCG_REG_R0, al, bl, blconst);
825
        tcg_out_brcond(s, cond, ah, bh, bhconst, label_index);
826
        break;
827

    
828
    default:
829
        tcg_out_brcond(s, cond, ah, bh, bhconst, label_index);
830
        tcg_out_comclr(s, TCG_COND_NE, TCG_REG_R0, ah, bh, bhconst);
831
        tcg_out_brcond(s, tcg_unsigned_cond(cond),
832
                       al, bl, blconst, label_index);
833
        break;
834
    }
835
}
836

    
837
static void tcg_out_setcond(TCGContext *s, int cond, TCGArg ret,
838
                            TCGArg c1, TCGArg c2, int c2const)
839
{
840
    tcg_out_comclr(s, tcg_invert_cond(cond), ret, c1, c2, c2const);
841
    tcg_out_movi(s, TCG_TYPE_I32, ret, 1);
842
}
843

    
844
static void tcg_out_setcond2(TCGContext *s, int cond, TCGArg ret,
845
                             TCGArg al, TCGArg ah, TCGArg bl, int blconst,
846
                             TCGArg bh, int bhconst)
847
{
848
    int scratch = TCG_REG_R20;
849

    
850
    if (ret != al && ret != ah
851
        && (blconst || ret != bl)
852
        && (bhconst || ret != bh)) {
853
        scratch = ret;
854
    }
855

    
856
    switch (cond) {
857
    case TCG_COND_EQ:
858
    case TCG_COND_NE:
859
        tcg_out_setcond(s, cond, scratch, al, bl, blconst);
860
        tcg_out_comclr(s, TCG_COND_EQ, TCG_REG_R0, ah, bh, bhconst);
861
        tcg_out_movi(s, TCG_TYPE_I32, scratch, cond == TCG_COND_NE);
862
        break;
863

    
864
    default:
865
        tcg_out_setcond(s, tcg_unsigned_cond(cond), scratch, al, bl, blconst);
866
        tcg_out_comclr(s, TCG_COND_EQ, TCG_REG_R0, ah, bh, bhconst);
867
        tcg_out_movi(s, TCG_TYPE_I32, scratch, 0);
868
        tcg_out_comclr(s, cond, TCG_REG_R0, ah, bh, bhconst);
869
        tcg_out_movi(s, TCG_TYPE_I32, scratch, 1);
870
        break;
871
    }
872

    
873
    tcg_out_mov(s, TCG_TYPE_I32, ret, scratch);
874
}
875

    
876
#if defined(CONFIG_SOFTMMU)
877
#include "../../softmmu_defs.h"
878

    
879
static void *qemu_ld_helpers[4] = {
880
    __ldb_mmu,
881
    __ldw_mmu,
882
    __ldl_mmu,
883
    __ldq_mmu,
884
};
885

    
886
static void *qemu_st_helpers[4] = {
887
    __stb_mmu,
888
    __stw_mmu,
889
    __stl_mmu,
890
    __stq_mmu,
891
};
892

    
893
/* Load and compare a TLB entry, and branch if TLB miss.  OFFSET is set to
894
   the offset of the first ADDR_READ or ADDR_WRITE member of the appropriate
895
   TLB for the memory index.  The return value is the offset from ENV
896
   contained in R1 afterward (to be used when loading ADDEND); if the
897
   return value is 0, R1 is not used.  */
898

    
899
static int tcg_out_tlb_read(TCGContext *s, int r0, int r1, int addrlo,
900
                            int addrhi, int s_bits, int lab_miss, int offset)
901
{
902
    int ret;
903

    
904
    /* Extracting the index into the TLB.  The "normal C operation" is
905
          r1 = addr_reg >> TARGET_PAGE_BITS;
906
          r1 &= CPU_TLB_SIZE - 1;
907
          r1 <<= CPU_TLB_ENTRY_BITS;
908
       What this does is extract CPU_TLB_BITS beginning at TARGET_PAGE_BITS
909
       and place them at CPU_TLB_ENTRY_BITS.  We can combine the first two
910
       operations with an EXTRU.  Unfortunately, the current value of
911
       CPU_TLB_ENTRY_BITS is > 3, so we can't merge that shift with the
912
       add that follows.  */
913
    tcg_out_extr(s, r1, addrlo, TARGET_PAGE_BITS, CPU_TLB_BITS, 0);
914
    tcg_out_shli(s, r1, r1, CPU_TLB_ENTRY_BITS);
915
    tcg_out_arith(s, r1, r1, TCG_AREG0, INSN_ADDL);
916

    
917
    /* Make sure that both the addr_{read,write} and addend can be
918
       read with a 14-bit offset from the same base register.  */
919
    if (check_fit_tl(offset + CPU_TLB_SIZE, 14)) {
920
        ret = 0;
921
    } else {
922
        ret = (offset + 0x400) & ~0x7ff;
923
        offset = ret - offset;
924
        tcg_out_addi2(s, TCG_REG_R1, r1, ret);
925
        r1 = TCG_REG_R1;
926
    }
927

    
928
    /* Load the entry from the computed slot.  */
929
    if (TARGET_LONG_BITS == 64) {
930
        tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_R23, r1, offset);
931
        tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_R20, r1, offset + 4);
932
    } else {
933
        tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_R20, r1, offset);
934
    }
935

    
936
    /* Compute the value that ought to appear in the TLB for a hit, namely, the page
937
       of the address.  We include the low N bits of the address to catch unaligned
938
       accesses and force them onto the slow path.  Do this computation after having
939
       issued the load from the TLB slot to give the load time to complete.  */
940
    tcg_out_andi(s, r0, addrlo, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
941

    
942
    /* If not equal, jump to lab_miss. */
943
    if (TARGET_LONG_BITS == 64) {
944
        tcg_out_brcond2(s, TCG_COND_NE, TCG_REG_R20, TCG_REG_R23,
945
                        r0, 0, addrhi, 0, lab_miss);
946
    } else {
947
        tcg_out_brcond(s, TCG_COND_NE, TCG_REG_R20, r0, 0, lab_miss);
948
    }
949

    
950
    return ret;
951
}
952
#endif
953

    
954
static void tcg_out_qemu_ld_direct(TCGContext *s, int datalo_reg, int datahi_reg,
955
                                   int addr_reg, int addend_reg, int opc)
956
{
957
#ifdef TARGET_WORDS_BIGENDIAN
958
    const int bswap = 0;
959
#else
960
    const int bswap = 1;
961
#endif
962

    
963
    switch (opc) {
964
    case 0:
965
        tcg_out_ldst_index(s, datalo_reg, addr_reg, addend_reg, INSN_LDBX);
966
        break;
967
    case 0 | 4:
968
        tcg_out_ldst_index(s, datalo_reg, addr_reg, addend_reg, INSN_LDBX);
969
        tcg_out_ext8s(s, datalo_reg, datalo_reg);
970
        break;
971
    case 1:
972
        tcg_out_ldst_index(s, datalo_reg, addr_reg, addend_reg, INSN_LDHX);
973
        if (bswap) {
974
            tcg_out_bswap16(s, datalo_reg, datalo_reg, 0);
975
        }
976
        break;
977
    case 1 | 4:
978
        tcg_out_ldst_index(s, datalo_reg, addr_reg, addend_reg, INSN_LDHX);
979
        if (bswap) {
980
            tcg_out_bswap16(s, datalo_reg, datalo_reg, 1);
981
        } else {
982
            tcg_out_ext16s(s, datalo_reg, datalo_reg);
983
        }
984
        break;
985
    case 2:
986
        tcg_out_ldst_index(s, datalo_reg, addr_reg, addend_reg, INSN_LDWX);
987
        if (bswap) {
988
            tcg_out_bswap32(s, datalo_reg, datalo_reg, TCG_REG_R20);
989
        }
990
        break;
991
    case 3:
992
        if (bswap) {
993
            int t = datahi_reg;
994
            datahi_reg = datalo_reg;
995
            datalo_reg = t;
996
        }
997
        /* We can't access the low-part with a reg+reg addressing mode,
998
           so perform the addition now and use reg_ofs addressing mode.  */
999
        if (addend_reg != TCG_REG_R0) {
1000
            tcg_out_arith(s, TCG_REG_R20, addr_reg, addend_reg, INSN_ADD);
1001
            addr_reg = TCG_REG_R20;
1002
        }
1003
        /* Make sure not to clobber the base register.  */
1004
        if (datahi_reg == addr_reg) {
1005
            tcg_out_ldst(s, datalo_reg, addr_reg, 4, INSN_LDW);
1006
            tcg_out_ldst(s, datahi_reg, addr_reg, 0, INSN_LDW);
1007
        } else {
1008
            tcg_out_ldst(s, datahi_reg, addr_reg, 0, INSN_LDW);
1009
            tcg_out_ldst(s, datalo_reg, addr_reg, 4, INSN_LDW);
1010
        }
1011
        if (bswap) {
1012
            tcg_out_bswap32(s, datalo_reg, datalo_reg, TCG_REG_R20);
1013
            tcg_out_bswap32(s, datahi_reg, datahi_reg, TCG_REG_R20);
1014
        }
1015
        break;
1016
    default:
1017
        tcg_abort();
1018
    }
1019
}
1020

    
1021
static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args, int opc)
1022
{
1023
    int datalo_reg = *args++;
1024
    /* Note that datahi_reg is only used for 64-bit loads.  */
1025
    int datahi_reg = (opc == 3 ? *args++ : TCG_REG_R0);
1026
    int addrlo_reg = *args++;
1027

    
1028
#if defined(CONFIG_SOFTMMU)
1029
    /* Note that addrhi_reg is only used for 64-bit guests.  */
1030
    int addrhi_reg = (TARGET_LONG_BITS == 64 ? *args++ : TCG_REG_R0);
1031
    int mem_index = *args;
1032
    int lab1, lab2, argreg, offset;
1033

    
1034
    lab1 = gen_new_label();
1035
    lab2 = gen_new_label();
1036

    
1037
    offset = offsetof(CPUState, tlb_table[mem_index][0].addr_read);
1038
    offset = tcg_out_tlb_read(s, TCG_REG_R26, TCG_REG_R25, addrlo_reg, addrhi_reg,
1039
                              opc & 3, lab1, offset);
1040

    
1041
    /* TLB Hit.  */
1042
    tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_R20, (offset ? TCG_REG_R1 : TCG_REG_R25),
1043
               offsetof(CPUState, tlb_table[mem_index][0].addend) - offset);
1044
    tcg_out_qemu_ld_direct(s, datalo_reg, datahi_reg, addrlo_reg, TCG_REG_R20, opc);
1045
    tcg_out_branch(s, lab2, 1);
1046

    
1047
    /* TLB Miss.  */
1048
    /* label1: */
1049
    tcg_out_label(s, lab1, (tcg_target_long)s->code_ptr);
1050

    
1051
    argreg = TCG_REG_R26;
1052
    tcg_out_mov(s, TCG_TYPE_I32, argreg--, addrlo_reg);
1053
    if (TARGET_LONG_BITS == 64) {
1054
        tcg_out_mov(s, TCG_TYPE_I32, argreg--, addrhi_reg);
1055
    }
1056
    tcg_out_movi(s, TCG_TYPE_I32, argreg, mem_index);
1057

    
1058
    tcg_out_call(s, qemu_ld_helpers[opc & 3]);
1059

    
1060
    switch (opc) {
1061
    case 0:
1062
        tcg_out_andi(s, datalo_reg, TCG_REG_RET0, 0xff);
1063
        break;
1064
    case 0 | 4:
1065
        tcg_out_ext8s(s, datalo_reg, TCG_REG_RET0);
1066
        break;
1067
    case 1:
1068
        tcg_out_andi(s, datalo_reg, TCG_REG_RET0, 0xffff);
1069
        break;
1070
    case 1 | 4:
1071
        tcg_out_ext16s(s, datalo_reg, TCG_REG_RET0);
1072
        break;
1073
    case 2:
1074
    case 2 | 4:
1075
        tcg_out_mov(s, TCG_TYPE_I32, datalo_reg, TCG_REG_RET0);
1076
        break;
1077
    case 3:
1078
        tcg_out_mov(s, TCG_TYPE_I32, datahi_reg, TCG_REG_RET0);
1079
        tcg_out_mov(s, TCG_TYPE_I32, datalo_reg, TCG_REG_RET1);
1080
        break;
1081
    default:
1082
        tcg_abort();
1083
    }
1084

    
1085
    /* label2: */
1086
    tcg_out_label(s, lab2, (tcg_target_long)s->code_ptr);
1087
#else
1088
    tcg_out_qemu_ld_direct(s, datalo_reg, datahi_reg, addrlo_reg,
1089
                           (GUEST_BASE ? TCG_GUEST_BASE_REG : TCG_REG_R0), opc);
1090
#endif
1091
}
1092

    
1093
static void tcg_out_qemu_st_direct(TCGContext *s, int datalo_reg, int datahi_reg,
1094
                                   int addr_reg, int opc)
1095
{
1096
#ifdef TARGET_WORDS_BIGENDIAN
1097
    const int bswap = 0;
1098
#else
1099
    const int bswap = 1;
1100
#endif
1101

    
1102
    switch (opc) {
1103
    case 0:
1104
        tcg_out_ldst(s, datalo_reg, addr_reg, 0, INSN_STB);
1105
        break;
1106
    case 1:
1107
        if (bswap) {
1108
            tcg_out_bswap16(s, TCG_REG_R20, datalo_reg, 0);
1109
            datalo_reg = TCG_REG_R20;
1110
        }
1111
        tcg_out_ldst(s, datalo_reg, addr_reg, 0, INSN_STH);
1112
        break;
1113
    case 2:
1114
        if (bswap) {
1115
            tcg_out_bswap32(s, TCG_REG_R20, datalo_reg, TCG_REG_R20);
1116
            datalo_reg = TCG_REG_R20;
1117
        }
1118
        tcg_out_ldst(s, datalo_reg, addr_reg, 0, INSN_STW);
1119
        break;
1120
    case 3:
1121
        if (bswap) {
1122
            tcg_out_bswap32(s, TCG_REG_R20, datalo_reg, TCG_REG_R20);
1123
            tcg_out_bswap32(s, TCG_REG_R23, datahi_reg, TCG_REG_R23);
1124
            datahi_reg = TCG_REG_R20;
1125
            datalo_reg = TCG_REG_R23;
1126
        }
1127
        tcg_out_ldst(s, datahi_reg, addr_reg, 0, INSN_STW);
1128
        tcg_out_ldst(s, datalo_reg, addr_reg, 4, INSN_STW);
1129
        break;
1130
    default:
1131
        tcg_abort();
1132
    }
1133

    
1134
}
1135

    
1136
static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args, int opc)
1137
{
1138
    int datalo_reg = *args++;
1139
    /* Note that datahi_reg is only used for 64-bit loads.  */
1140
    int datahi_reg = (opc == 3 ? *args++ : TCG_REG_R0);
1141
    int addrlo_reg = *args++;
1142

    
1143
#if defined(CONFIG_SOFTMMU)
1144
    /* Note that addrhi_reg is only used for 64-bit guests.  */
1145
    int addrhi_reg = (TARGET_LONG_BITS == 64 ? *args++ : TCG_REG_R0);
1146
    int mem_index = *args;
1147
    int lab1, lab2, argreg, offset;
1148

    
1149
    lab1 = gen_new_label();
1150
    lab2 = gen_new_label();
1151

    
1152
    offset = offsetof(CPUState, tlb_table[mem_index][0].addr_write);
1153
    offset = tcg_out_tlb_read(s, TCG_REG_R26, TCG_REG_R25, addrlo_reg, addrhi_reg,
1154
                              opc, lab1, offset);
1155

    
1156
    /* TLB Hit.  */
1157
    tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_R20, (offset ? TCG_REG_R1 : TCG_REG_R25),
1158
               offsetof(CPUState, tlb_table[mem_index][0].addend) - offset);
1159

    
1160
    /* There are no indexed stores, so we must do this addition explitly.
1161
       Careful to avoid R20, which is used for the bswaps to follow.  */
1162
    tcg_out_arith(s, TCG_REG_R31, addrlo_reg, TCG_REG_R20, INSN_ADDL);
1163
    tcg_out_qemu_st_direct(s, datalo_reg, datahi_reg, TCG_REG_R31, opc);
1164
    tcg_out_branch(s, lab2, 1);
1165

    
1166
    /* TLB Miss.  */
1167
    /* label1: */
1168
    tcg_out_label(s, lab1, (tcg_target_long)s->code_ptr);
1169

    
1170
    argreg = TCG_REG_R26;
1171
    tcg_out_mov(s, TCG_TYPE_I32, argreg--, addrlo_reg);
1172
    if (TARGET_LONG_BITS == 64) {
1173
        tcg_out_mov(s, TCG_TYPE_I32, argreg--, addrhi_reg);
1174
    }
1175

    
1176
    switch(opc) {
1177
    case 0:
1178
        tcg_out_andi(s, argreg--, datalo_reg, 0xff);
1179
        tcg_out_movi(s, TCG_TYPE_I32, argreg, mem_index);
1180
        break;
1181
    case 1:
1182
        tcg_out_andi(s, argreg--, datalo_reg, 0xffff);
1183
        tcg_out_movi(s, TCG_TYPE_I32, argreg, mem_index);
1184
        break;
1185
    case 2:
1186
        tcg_out_mov(s, TCG_TYPE_I32, argreg--, datalo_reg);
1187
        tcg_out_movi(s, TCG_TYPE_I32, argreg, mem_index);
1188
        break;
1189
    case 3:
1190
        /* Because of the alignment required by the 64-bit data argument,
1191
           we will always use R23/R24.  Also, we will always run out of
1192
           argument registers for storing mem_index, so that will have
1193
           to go on the stack.  */
1194
        if (mem_index == 0) {
1195
            argreg = TCG_REG_R0;
1196
        } else {
1197
            argreg = TCG_REG_R20;
1198
            tcg_out_movi(s, TCG_TYPE_I32, argreg, mem_index);
1199
        }
1200
        tcg_out_mov(s, TCG_TYPE_I32, TCG_REG_R23, datahi_reg);
1201
        tcg_out_mov(s, TCG_TYPE_I32, TCG_REG_R24, datalo_reg);
1202
        tcg_out_st(s, TCG_TYPE_I32, argreg, TCG_REG_CALL_STACK,
1203
                   TCG_TARGET_CALL_STACK_OFFSET - 4);
1204
        break;
1205
    default:
1206
        tcg_abort();
1207
    }
1208

    
1209
    tcg_out_call(s, qemu_st_helpers[opc]);
1210

    
1211
    /* label2: */
1212
    tcg_out_label(s, lab2, (tcg_target_long)s->code_ptr);
1213
#else
1214
    /* There are no indexed stores, so if GUEST_BASE is set we must do the add
1215
       explicitly.  Careful to avoid R20, which is used for the bswaps to follow.  */
1216
    if (GUEST_BASE != 0) {
1217
        tcg_out_arith(s, TCG_REG_R31, addrlo_reg, TCG_GUEST_BASE_REG, INSN_ADDL);
1218
        addrlo_reg = TCG_REG_R31;
1219
    }
1220
    tcg_out_qemu_st_direct(s, datalo_reg, datahi_reg, addrlo_reg, opc);
1221
#endif
1222
}
1223

    
1224
static void tcg_out_exit_tb(TCGContext *s, TCGArg arg)
1225
{
1226
    if (!check_fit_tl(arg, 14)) {
1227
        uint32_t hi, lo;
1228
        hi = arg & ~0x7ff;
1229
        lo = arg & 0x7ff;
1230
        if (lo) {
1231
            tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_RET0, hi);
1232
            tcg_out32(s, INSN_BV | INSN_R2(TCG_REG_R18));
1233
            tcg_out_addi(s, TCG_REG_RET0, lo);
1234
            return;
1235
        }
1236
        arg = hi;
1237
    }
1238
    tcg_out32(s, INSN_BV | INSN_R2(TCG_REG_R18));
1239
    tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_RET0, arg);
1240
}
1241

    
1242
static void tcg_out_goto_tb(TCGContext *s, TCGArg arg)
1243
{
1244
    if (s->tb_jmp_offset) {
1245
        /* direct jump method */
1246
        fprintf(stderr, "goto_tb direct\n");
1247
        tcg_abort();
1248
    } else {
1249
        /* indirect jump method */
1250
        tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_R20, TCG_REG_R0,
1251
                   (tcg_target_long)(s->tb_next + arg));
1252
        tcg_out32(s, INSN_BV_N | INSN_R2(TCG_REG_R20));
1253
    }
1254
    s->tb_next_offset[arg] = s->code_ptr - s->code_buf;
1255
}
1256

    
1257
static inline void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
1258
                              const int *const_args)
1259
{
1260
    switch (opc) {
1261
    case INDEX_op_exit_tb:
1262
        tcg_out_exit_tb(s, args[0]);
1263
        break;
1264
    case INDEX_op_goto_tb:
1265
        tcg_out_goto_tb(s, args[0]);
1266
        break;
1267

    
1268
    case INDEX_op_call:
1269
        if (const_args[0]) {
1270
            tcg_out_call(s, (void *)args[0]);
1271
        } else {
1272
            /* ??? FIXME: the value in the register in args[0] is almost
1273
               certainly a procedure descriptor, not a code address.  We
1274
               probably need to use the millicode $$dyncall routine.  */
1275
            tcg_abort();
1276
        }
1277
        break;
1278

    
1279
    case INDEX_op_jmp:
1280
        fprintf(stderr, "unimplemented jmp\n");
1281
        tcg_abort();
1282
        break;
1283

    
1284
    case INDEX_op_br:
1285
        tcg_out_branch(s, args[0], 1);
1286
        break;
1287

    
1288
    case INDEX_op_movi_i32:
1289
        tcg_out_movi(s, TCG_TYPE_I32, args[0], (uint32_t)args[1]);
1290
        break;
1291

    
1292
    case INDEX_op_ld8u_i32:
1293
        tcg_out_ldst(s, args[0], args[1], args[2], INSN_LDB);
1294
        break;
1295
    case INDEX_op_ld8s_i32:
1296
        tcg_out_ldst(s, args[0], args[1], args[2], INSN_LDB);
1297
        tcg_out_ext8s(s, args[0], args[0]);
1298
        break;
1299
    case INDEX_op_ld16u_i32:
1300
        tcg_out_ldst(s, args[0], args[1], args[2], INSN_LDH);
1301
        break;
1302
    case INDEX_op_ld16s_i32:
1303
        tcg_out_ldst(s, args[0], args[1], args[2], INSN_LDH);
1304
        tcg_out_ext16s(s, args[0], args[0]);
1305
        break;
1306
    case INDEX_op_ld_i32:
1307
        tcg_out_ldst(s, args[0], args[1], args[2], INSN_LDW);
1308
        break;
1309

    
1310
    case INDEX_op_st8_i32:
1311
        tcg_out_ldst(s, args[0], args[1], args[2], INSN_STB);
1312
        break;
1313
    case INDEX_op_st16_i32:
1314
        tcg_out_ldst(s, args[0], args[1], args[2], INSN_STH);
1315
        break;
1316
    case INDEX_op_st_i32:
1317
        tcg_out_ldst(s, args[0], args[1], args[2], INSN_STW);
1318
        break;
1319

    
1320
    case INDEX_op_add_i32:
1321
        if (const_args[2]) {
1322
            tcg_out_addi2(s, args[0], args[1], args[2]);
1323
        } else {
1324
            tcg_out_arith(s, args[0], args[1], args[2], INSN_ADDL);
1325
        }
1326
        break;
1327

    
1328
    case INDEX_op_sub_i32:
1329
        if (const_args[1]) {
1330
            if (const_args[2]) {
1331
                tcg_out_movi(s, TCG_TYPE_I32, args[0], args[1] - args[2]);
1332
            } else {
1333
                /* Recall that SUBI is a reversed subtract.  */
1334
                tcg_out_arithi(s, args[0], args[2], args[1], INSN_SUBI);
1335
            }
1336
        } else if (const_args[2]) {
1337
            tcg_out_addi2(s, args[0], args[1], -args[2]);
1338
        } else {
1339
            tcg_out_arith(s, args[0], args[1], args[2], INSN_SUB);
1340
        }
1341
        break;
1342

    
1343
    case INDEX_op_and_i32:
1344
        if (const_args[2]) {
1345
            tcg_out_andi(s, args[0], args[1], args[2]);
1346
        } else {
1347
            tcg_out_arith(s, args[0], args[1], args[2], INSN_AND);
1348
        }
1349
        break;
1350

    
1351
    case INDEX_op_or_i32:
1352
        if (const_args[2]) {
1353
            tcg_out_ori(s, args[0], args[1], args[2]);
1354
        } else {
1355
            tcg_out_arith(s, args[0], args[1], args[2], INSN_OR);
1356
        }
1357
        break;
1358

    
1359
    case INDEX_op_xor_i32:
1360
        tcg_out_arith(s, args[0], args[1], args[2], INSN_XOR);
1361
        break;
1362

    
1363
    case INDEX_op_andc_i32:
1364
        if (const_args[2]) {
1365
            tcg_out_andi(s, args[0], args[1], ~args[2]);
1366
        } else {
1367
            tcg_out_arith(s, args[0], args[1], args[2], INSN_ANDCM);
1368
        }
1369
        break;
1370

    
1371
    case INDEX_op_shl_i32:
1372
        if (const_args[2]) {
1373
            tcg_out_shli(s, args[0], args[1], args[2]);
1374
        } else {
1375
            tcg_out_shl(s, args[0], args[1], args[2]);
1376
        }
1377
        break;
1378

    
1379
    case INDEX_op_shr_i32:
1380
        if (const_args[2]) {
1381
            tcg_out_shri(s, args[0], args[1], args[2]);
1382
        } else {
1383
            tcg_out_shr(s, args[0], args[1], args[2]);
1384
        }
1385
        break;
1386

    
1387
    case INDEX_op_sar_i32:
1388
        if (const_args[2]) {
1389
            tcg_out_sari(s, args[0], args[1], args[2]);
1390
        } else {
1391
            tcg_out_sar(s, args[0], args[1], args[2]);
1392
        }
1393
        break;
1394

    
1395
    case INDEX_op_rotl_i32:
1396
        if (const_args[2]) {
1397
            tcg_out_rotli(s, args[0], args[1], args[2]);
1398
        } else {
1399
            tcg_out_rotl(s, args[0], args[1], args[2]);
1400
        }
1401
        break;
1402

    
1403
    case INDEX_op_rotr_i32:
1404
        if (const_args[2]) {
1405
            tcg_out_rotri(s, args[0], args[1], args[2]);
1406
        } else {
1407
            tcg_out_rotr(s, args[0], args[1], args[2]);
1408
        }
1409
        break;
1410

    
1411
    case INDEX_op_mul_i32:
1412
        tcg_out_xmpyu(s, args[0], TCG_REG_R0, args[1], args[2]);
1413
        break;
1414
    case INDEX_op_mulu2_i32:
1415
        tcg_out_xmpyu(s, args[0], args[1], args[2], args[3]);
1416
        break;
1417

    
1418
    case INDEX_op_bswap16_i32:
1419
        tcg_out_bswap16(s, args[0], args[1], 0);
1420
        break;
1421
    case INDEX_op_bswap32_i32:
1422
        tcg_out_bswap32(s, args[0], args[1], TCG_REG_R20);
1423
        break;
1424

    
1425
    case INDEX_op_not_i32:
1426
        tcg_out_arithi(s, args[0], args[1], -1, INSN_SUBI);
1427
        break;
1428
    case INDEX_op_ext8s_i32:
1429
        tcg_out_ext8s(s, args[0], args[1]);
1430
        break;
1431
    case INDEX_op_ext16s_i32:
1432
        tcg_out_ext16s(s, args[0], args[1]);
1433
        break;
1434

    
1435
    case INDEX_op_brcond_i32:
1436
        tcg_out_brcond(s, args[2], args[0], args[1], const_args[1], args[3]);
1437
        break;
1438
    case INDEX_op_brcond2_i32:
1439
        tcg_out_brcond2(s, args[4], args[0], args[1],
1440
                        args[2], const_args[2],
1441
                        args[3], const_args[3], args[5]);
1442
        break;
1443

    
1444
    case INDEX_op_setcond_i32:
1445
        tcg_out_setcond(s, args[3], args[0], args[1], args[2], const_args[2]);
1446
        break;
1447
    case INDEX_op_setcond2_i32:
1448
        tcg_out_setcond2(s, args[5], args[0], args[1], args[2],
1449
                         args[3], const_args[3], args[4], const_args[4]);
1450
        break;
1451

    
1452
    case INDEX_op_add2_i32:
1453
        tcg_out_add2(s, args[0], args[1], args[2], args[3],
1454
                     args[4], args[5], const_args[4]);
1455
        break;
1456

    
1457
    case INDEX_op_sub2_i32:
1458
        tcg_out_sub2(s, args[0], args[1], args[2], args[3],
1459
                     args[4], args[5], const_args[2], const_args[4]);
1460
        break;
1461

    
1462
    case INDEX_op_qemu_ld8u:
1463
        tcg_out_qemu_ld(s, args, 0);
1464
        break;
1465
    case INDEX_op_qemu_ld8s:
1466
        tcg_out_qemu_ld(s, args, 0 | 4);
1467
        break;
1468
    case INDEX_op_qemu_ld16u:
1469
        tcg_out_qemu_ld(s, args, 1);
1470
        break;
1471
    case INDEX_op_qemu_ld16s:
1472
        tcg_out_qemu_ld(s, args, 1 | 4);
1473
        break;
1474
    case INDEX_op_qemu_ld32:
1475
        tcg_out_qemu_ld(s, args, 2);
1476
        break;
1477
    case INDEX_op_qemu_ld64:
1478
        tcg_out_qemu_ld(s, args, 3);
1479
        break;
1480

    
1481
    case INDEX_op_qemu_st8:
1482
        tcg_out_qemu_st(s, args, 0);
1483
        break;
1484
    case INDEX_op_qemu_st16:
1485
        tcg_out_qemu_st(s, args, 1);
1486
        break;
1487
    case INDEX_op_qemu_st32:
1488
        tcg_out_qemu_st(s, args, 2);
1489
        break;
1490
    case INDEX_op_qemu_st64:
1491
        tcg_out_qemu_st(s, args, 3);
1492
        break;
1493

    
1494
    default:
1495
        fprintf(stderr, "unknown opcode 0x%x\n", opc);
1496
        tcg_abort();
1497
    }
1498
}
1499

    
1500
static const TCGTargetOpDef hppa_op_defs[] = {
1501
    { INDEX_op_exit_tb, { } },
1502
    { INDEX_op_goto_tb, { } },
1503

    
1504
    { INDEX_op_call, { "ri" } },
1505
    { INDEX_op_jmp, { "r" } },
1506
    { INDEX_op_br, { } },
1507

    
1508
    { INDEX_op_mov_i32, { "r", "r" } },
1509
    { INDEX_op_movi_i32, { "r" } },
1510

    
1511
    { INDEX_op_ld8u_i32, { "r", "r" } },
1512
    { INDEX_op_ld8s_i32, { "r", "r" } },
1513
    { INDEX_op_ld16u_i32, { "r", "r" } },
1514
    { INDEX_op_ld16s_i32, { "r", "r" } },
1515
    { INDEX_op_ld_i32, { "r", "r" } },
1516
    { INDEX_op_st8_i32, { "rZ", "r" } },
1517
    { INDEX_op_st16_i32, { "rZ", "r" } },
1518
    { INDEX_op_st_i32, { "rZ", "r" } },
1519

    
1520
    { INDEX_op_add_i32, { "r", "rZ", "ri" } },
1521
    { INDEX_op_sub_i32, { "r", "rI", "ri" } },
1522
    { INDEX_op_and_i32, { "r", "rZ", "rM" } },
1523
    { INDEX_op_or_i32, { "r", "rZ", "rO" } },
1524
    { INDEX_op_xor_i32, { "r", "rZ", "rZ" } },
1525
    /* Note that the second argument will be inverted, which means
1526
       we want a constant whose inversion matches M, and that O = ~M.
1527
       See the implementation of and_mask_p.  */
1528
    { INDEX_op_andc_i32, { "r", "rZ", "rO" } },
1529

    
1530
    { INDEX_op_mul_i32, { "r", "r", "r" } },
1531
    { INDEX_op_mulu2_i32, { "r", "r", "r", "r" } },
1532

    
1533
    { INDEX_op_shl_i32, { "r", "r", "ri" } },
1534
    { INDEX_op_shr_i32, { "r", "r", "ri" } },
1535
    { INDEX_op_sar_i32, { "r", "r", "ri" } },
1536
    { INDEX_op_rotl_i32, { "r", "r", "ri" } },
1537
    { INDEX_op_rotr_i32, { "r", "r", "ri" } },
1538

    
1539
    { INDEX_op_bswap16_i32, { "r", "r" } },
1540
    { INDEX_op_bswap32_i32, { "r", "r" } },
1541
    { INDEX_op_not_i32, { "r", "r" } },
1542

    
1543
    { INDEX_op_ext8s_i32, { "r", "r" } },
1544
    { INDEX_op_ext16s_i32, { "r", "r" } },
1545

    
1546
    { INDEX_op_brcond_i32, { "rZ", "rJ" } },
1547
    { INDEX_op_brcond2_i32,  { "rZ", "rZ", "rJ", "rJ" } },
1548

    
1549
    { INDEX_op_setcond_i32, { "r", "rZ", "rI" } },
1550
    { INDEX_op_setcond2_i32, { "r", "rZ", "rZ", "rI", "rI" } },
1551

    
1552
    { INDEX_op_add2_i32, { "r", "r", "rZ", "rZ", "rI", "rZ" } },
1553
    { INDEX_op_sub2_i32, { "r", "r", "rI", "rZ", "rK", "rZ" } },
1554

    
1555
#if TARGET_LONG_BITS == 32
1556
    { INDEX_op_qemu_ld8u, { "r", "L" } },
1557
    { INDEX_op_qemu_ld8s, { "r", "L" } },
1558
    { INDEX_op_qemu_ld16u, { "r", "L" } },
1559
    { INDEX_op_qemu_ld16s, { "r", "L" } },
1560
    { INDEX_op_qemu_ld32, { "r", "L" } },
1561
    { INDEX_op_qemu_ld64, { "r", "r", "L" } },
1562

    
1563
    { INDEX_op_qemu_st8, { "LZ", "L" } },
1564
    { INDEX_op_qemu_st16, { "LZ", "L" } },
1565
    { INDEX_op_qemu_st32, { "LZ", "L" } },
1566
    { INDEX_op_qemu_st64, { "LZ", "LZ", "L" } },
1567
#else
1568
    { INDEX_op_qemu_ld8u, { "r", "L", "L" } },
1569
    { INDEX_op_qemu_ld8s, { "r", "L", "L" } },
1570
    { INDEX_op_qemu_ld16u, { "r", "L", "L" } },
1571
    { INDEX_op_qemu_ld16s, { "r", "L", "L" } },
1572
    { INDEX_op_qemu_ld32, { "r", "L", "L" } },
1573
    { INDEX_op_qemu_ld64, { "r", "r", "L", "L" } },
1574

    
1575
    { INDEX_op_qemu_st8, { "LZ", "L", "L" } },
1576
    { INDEX_op_qemu_st16, { "LZ", "L", "L" } },
1577
    { INDEX_op_qemu_st32, { "LZ", "L", "L" } },
1578
    { INDEX_op_qemu_st64, { "LZ", "LZ", "L", "L" } },
1579
#endif
1580
    { -1 },
1581
};
1582

    
1583
static int tcg_target_callee_save_regs[] = {
1584
    /* R2, the return address register, is saved specially
1585
       in the caller's frame.  */
1586
    /* R3, the frame pointer, is not currently modified.  */
1587
    TCG_REG_R4,
1588
    TCG_REG_R5,
1589
    TCG_REG_R6,
1590
    TCG_REG_R7,
1591
    TCG_REG_R8,
1592
    TCG_REG_R9,
1593
    TCG_REG_R10,
1594
    TCG_REG_R11,
1595
    TCG_REG_R12,
1596
    TCG_REG_R13,
1597
    TCG_REG_R14,
1598
    TCG_REG_R15,
1599
    TCG_REG_R16,
1600
    TCG_REG_R17, /* R17 is the global env.  */
1601
    TCG_REG_R18
1602
};
1603

    
1604
static void tcg_target_qemu_prologue(TCGContext *s)
1605
{
1606
    int frame_size, i;
1607

    
1608
    /* Allocate space for the fixed frame marker.  */
1609
    frame_size = -TCG_TARGET_CALL_STACK_OFFSET;
1610
    frame_size += TCG_TARGET_STATIC_CALL_ARGS_SIZE;
1611

    
1612
    /* Allocate space for the saved registers.  */
1613
    frame_size += ARRAY_SIZE(tcg_target_callee_save_regs) * 4;
1614

    
1615
    /* Align the allocated space.  */
1616
    frame_size = ((frame_size + TCG_TARGET_STACK_ALIGN - 1)
1617
                  & -TCG_TARGET_STACK_ALIGN);
1618

    
1619
    /* The return address is stored in the caller's frame.  */
1620
    tcg_out_st(s, TCG_TYPE_PTR, TCG_REG_RP, TCG_REG_CALL_STACK, -20);
1621

    
1622
    /* Allocate stack frame, saving the first register at the same time.  */
1623
    tcg_out_ldst(s, tcg_target_callee_save_regs[0],
1624
                 TCG_REG_CALL_STACK, frame_size, INSN_STWM);
1625

    
1626
    /* Save all callee saved registers.  */
1627
    for (i = 1; i < ARRAY_SIZE(tcg_target_callee_save_regs); i++) {
1628
        tcg_out_st(s, TCG_TYPE_PTR, tcg_target_callee_save_regs[i],
1629
                   TCG_REG_CALL_STACK, -frame_size + i * 4);
1630
    }
1631

    
1632
#ifdef CONFIG_USE_GUEST_BASE
1633
    if (GUEST_BASE != 0) {
1634
        tcg_out_movi(s, TCG_TYPE_PTR, TCG_GUEST_BASE_REG, GUEST_BASE);
1635
        tcg_regset_set_reg(s->reserved_regs, TCG_GUEST_BASE_REG);
1636
    }
1637
#endif
1638

    
1639
    tcg_out_mov(s, TCG_TYPE_PTR, TCG_AREG0, tcg_target_call_iarg_regs[0]);
1640

    
1641
    /* Jump to TB, and adjust R18 to be the return address.  */
1642
    tcg_out32(s, INSN_BLE_SR4 | INSN_R2(tcg_target_call_iarg_regs[1]));
1643
    tcg_out_mov(s, TCG_TYPE_I32, TCG_REG_R18, TCG_REG_R31);
1644

    
1645
    /* Restore callee saved registers.  */
1646
    tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_RP, TCG_REG_CALL_STACK,
1647
               -frame_size - 20);
1648
    for (i = 1; i < ARRAY_SIZE(tcg_target_callee_save_regs); i++) {
1649
        tcg_out_ld(s, TCG_TYPE_PTR, tcg_target_callee_save_regs[i],
1650
                   TCG_REG_CALL_STACK, -frame_size + i * 4);
1651
    }
1652

    
1653
    /* Deallocate stack frame and return.  */
1654
    tcg_out32(s, INSN_BV | INSN_R2(TCG_REG_RP));
1655
    tcg_out_ldst(s, tcg_target_callee_save_regs[0],
1656
                 TCG_REG_CALL_STACK, -frame_size, INSN_LDWM);
1657
}
1658

    
1659
static void tcg_target_init(TCGContext *s)
1660
{
1661
    tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xffffffff);
1662

    
1663
    tcg_regset_clear(tcg_target_call_clobber_regs);
1664
    tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R20);
1665
    tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R21);
1666
    tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R22);
1667
    tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R23);
1668
    tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R24);
1669
    tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R25);
1670
    tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R26);
1671
    tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_RET0);
1672
    tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_RET1);
1673

    
1674
    tcg_regset_clear(s->reserved_regs);
1675
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_R0);  /* hardwired to zero */
1676
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_R1);  /* addil target */
1677
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_RP);  /* link register */
1678
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_R3);  /* frame pointer */
1679
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_R18); /* return pointer */
1680
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_R19); /* clobbered w/o pic */
1681
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_R20); /* reserved */
1682
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_DP);  /* data pointer */
1683
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);  /* stack pointer */
1684
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_R31); /* ble link reg */
1685

    
1686
    tcg_add_target_add_op_defs(hppa_op_defs);
1687
    tcg_set_frame(s, TCG_AREG0, offsetof(CPUState, temp_buf),
1688
                  CPU_TEMP_BUF_NLONGS * sizeof(long));
1689
}