Statistics
| Branch: | Revision:

root / tcg / i386 / tcg-target.c @ bbc863bf

History | View | Annotate | Download (74.5 kB)

1 c896fe29 bellard
/*
2 c896fe29 bellard
 * Tiny Code Generator for QEMU
3 c896fe29 bellard
 *
4 c896fe29 bellard
 * Copyright (c) 2008 Fabrice Bellard
5 c896fe29 bellard
 *
6 c896fe29 bellard
 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 c896fe29 bellard
 * of this software and associated documentation files (the "Software"), to deal
8 c896fe29 bellard
 * in the Software without restriction, including without limitation the rights
9 c896fe29 bellard
 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 c896fe29 bellard
 * copies of the Software, and to permit persons to whom the Software is
11 c896fe29 bellard
 * furnished to do so, subject to the following conditions:
12 c896fe29 bellard
 *
13 c896fe29 bellard
 * The above copyright notice and this permission notice shall be included in
14 c896fe29 bellard
 * all copies or substantial portions of the Software.
15 c896fe29 bellard
 *
16 c896fe29 bellard
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 c896fe29 bellard
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 c896fe29 bellard
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 c896fe29 bellard
 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 c896fe29 bellard
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 c896fe29 bellard
 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 c896fe29 bellard
 * THE SOFTWARE.
23 c896fe29 bellard
 */
24 d4a9eb1f blueswir1
25 d4a9eb1f blueswir1
#ifndef NDEBUG
26 d4a9eb1f blueswir1
static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
27 5d8a4f8f Richard Henderson
#if TCG_TARGET_REG_BITS == 64
28 5d8a4f8f Richard Henderson
    "%rax", "%rcx", "%rdx", "%rbx", "%rsp", "%rbp", "%rsi", "%rdi",
29 5d8a4f8f Richard Henderson
    "%r8",  "%r9",  "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
30 5d8a4f8f Richard Henderson
#else
31 5d8a4f8f Richard Henderson
    "%eax", "%ecx", "%edx", "%ebx", "%esp", "%ebp", "%esi", "%edi",
32 5d8a4f8f Richard Henderson
#endif
33 c896fe29 bellard
};
34 d4a9eb1f blueswir1
#endif
35 c896fe29 bellard
36 d4a9eb1f blueswir1
static const int tcg_target_reg_alloc_order[] = {
37 5d8a4f8f Richard Henderson
#if TCG_TARGET_REG_BITS == 64
38 5d8a4f8f Richard Henderson
    TCG_REG_RBP,
39 5d8a4f8f Richard Henderson
    TCG_REG_RBX,
40 5d8a4f8f Richard Henderson
    TCG_REG_R12,
41 5d8a4f8f Richard Henderson
    TCG_REG_R13,
42 5d8a4f8f Richard Henderson
    TCG_REG_R14,
43 5d8a4f8f Richard Henderson
    TCG_REG_R15,
44 5d8a4f8f Richard Henderson
    TCG_REG_R10,
45 5d8a4f8f Richard Henderson
    TCG_REG_R11,
46 5d8a4f8f Richard Henderson
    TCG_REG_R9,
47 5d8a4f8f Richard Henderson
    TCG_REG_R8,
48 5d8a4f8f Richard Henderson
    TCG_REG_RCX,
49 5d8a4f8f Richard Henderson
    TCG_REG_RDX,
50 5d8a4f8f Richard Henderson
    TCG_REG_RSI,
51 5d8a4f8f Richard Henderson
    TCG_REG_RDI,
52 5d8a4f8f Richard Henderson
    TCG_REG_RAX,
53 5d8a4f8f Richard Henderson
#else
54 c896fe29 bellard
    TCG_REG_EBX,
55 c896fe29 bellard
    TCG_REG_ESI,
56 c896fe29 bellard
    TCG_REG_EDI,
57 c896fe29 bellard
    TCG_REG_EBP,
58 6648e296 Richard Henderson
    TCG_REG_ECX,
59 6648e296 Richard Henderson
    TCG_REG_EDX,
60 6648e296 Richard Henderson
    TCG_REG_EAX,
61 5d8a4f8f Richard Henderson
#endif
62 c896fe29 bellard
};
63 c896fe29 bellard
64 5d8a4f8f Richard Henderson
static const int tcg_target_call_iarg_regs[] = {
65 5d8a4f8f Richard Henderson
#if TCG_TARGET_REG_BITS == 64
66 8d918718 Stefan Weil
#if defined(_WIN64)
67 8d918718 Stefan Weil
    TCG_REG_RCX,
68 8d918718 Stefan Weil
    TCG_REG_RDX,
69 8d918718 Stefan Weil
#else
70 5d8a4f8f Richard Henderson
    TCG_REG_RDI,
71 5d8a4f8f Richard Henderson
    TCG_REG_RSI,
72 5d8a4f8f Richard Henderson
    TCG_REG_RDX,
73 5d8a4f8f Richard Henderson
    TCG_REG_RCX,
74 8d918718 Stefan Weil
#endif
75 5d8a4f8f Richard Henderson
    TCG_REG_R8,
76 5d8a4f8f Richard Henderson
    TCG_REG_R9,
77 5d8a4f8f Richard Henderson
#else
78 d73685e3 Stefan Weil
    /* 32 bit mode uses stack based calling convention (GCC default). */
79 5d8a4f8f Richard Henderson
#endif
80 5d8a4f8f Richard Henderson
};
81 5d8a4f8f Richard Henderson
82 68af23af Stefan Weil
static const int tcg_target_call_oarg_regs[] = {
83 5d8a4f8f Richard Henderson
    TCG_REG_EAX,
84 68af23af Stefan Weil
#if TCG_TARGET_REG_BITS == 32
85 5d8a4f8f Richard Henderson
    TCG_REG_EDX
86 68af23af Stefan Weil
#endif
87 5d8a4f8f Richard Henderson
};
88 c896fe29 bellard
89 b18212c6 Stefan Weil
/* Registers used with L constraint, which are the first argument 
90 b18212c6 Stefan Weil
   registers on x86_64, and two random call clobbered registers on
91 b18212c6 Stefan Weil
   i386. */
92 b18212c6 Stefan Weil
#if TCG_TARGET_REG_BITS == 64
93 b18212c6 Stefan Weil
# define TCG_REG_L0 tcg_target_call_iarg_regs[0]
94 b18212c6 Stefan Weil
# define TCG_REG_L1 tcg_target_call_iarg_regs[1]
95 b18212c6 Stefan Weil
#else
96 b18212c6 Stefan Weil
# define TCG_REG_L0 TCG_REG_EAX
97 b18212c6 Stefan Weil
# define TCG_REG_L1 TCG_REG_EDX
98 b18212c6 Stefan Weil
#endif
99 b18212c6 Stefan Weil
100 76a347e1 Richard Henderson
/* For 32-bit, we are going to attempt to determine at runtime whether cmov
101 76a347e1 Richard Henderson
   is available.  However, the host compiler must supply <cpuid.h>, as we're
102 76a347e1 Richard Henderson
   not going to go so far as our own inline assembly.  */
103 76a347e1 Richard Henderson
#if TCG_TARGET_REG_BITS == 64
104 76a347e1 Richard Henderson
# define have_cmov 1
105 76a347e1 Richard Henderson
#elif defined(CONFIG_CPUID_H)
106 76a347e1 Richard Henderson
#include <cpuid.h>
107 76a347e1 Richard Henderson
static bool have_cmov;
108 76a347e1 Richard Henderson
#else
109 76a347e1 Richard Henderson
# define have_cmov 0
110 76a347e1 Richard Henderson
#endif
111 76a347e1 Richard Henderson
112 b03cce8e bellard
static uint8_t *tb_ret_addr;
113 b03cce8e bellard
114 78686523 Richard Henderson
static void patch_reloc(uint8_t *code_ptr, int type,
115 f54b3f92 aurel32
                        tcg_target_long value, tcg_target_long addend)
116 c896fe29 bellard
{
117 f54b3f92 aurel32
    value += addend;
118 c896fe29 bellard
    switch(type) {
119 c896fe29 bellard
    case R_386_PC32:
120 5d8a4f8f Richard Henderson
        value -= (uintptr_t)code_ptr;
121 5d8a4f8f Richard Henderson
        if (value != (int32_t)value) {
122 5d8a4f8f Richard Henderson
            tcg_abort();
123 5d8a4f8f Richard Henderson
        }
124 5d8a4f8f Richard Henderson
        *(uint32_t *)code_ptr = value;
125 c896fe29 bellard
        break;
126 f75b56c1 Richard Henderson
    case R_386_PC8:
127 5d8a4f8f Richard Henderson
        value -= (uintptr_t)code_ptr;
128 f75b56c1 Richard Henderson
        if (value != (int8_t)value) {
129 f75b56c1 Richard Henderson
            tcg_abort();
130 f75b56c1 Richard Henderson
        }
131 f75b56c1 Richard Henderson
        *(uint8_t *)code_ptr = value;
132 f75b56c1 Richard Henderson
        break;
133 c896fe29 bellard
    default:
134 c896fe29 bellard
        tcg_abort();
135 c896fe29 bellard
    }
136 c896fe29 bellard
}
137 c896fe29 bellard
138 c896fe29 bellard
/* parse target specific constraints */
139 d4a9eb1f blueswir1
static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
140 c896fe29 bellard
{
141 c896fe29 bellard
    const char *ct_str;
142 c896fe29 bellard
143 c896fe29 bellard
    ct_str = *pct_str;
144 c896fe29 bellard
    switch(ct_str[0]) {
145 c896fe29 bellard
    case 'a':
146 c896fe29 bellard
        ct->ct |= TCG_CT_REG;
147 c896fe29 bellard
        tcg_regset_set_reg(ct->u.regs, TCG_REG_EAX);
148 c896fe29 bellard
        break;
149 c896fe29 bellard
    case 'b':
150 c896fe29 bellard
        ct->ct |= TCG_CT_REG;
151 c896fe29 bellard
        tcg_regset_set_reg(ct->u.regs, TCG_REG_EBX);
152 c896fe29 bellard
        break;
153 c896fe29 bellard
    case 'c':
154 c896fe29 bellard
        ct->ct |= TCG_CT_REG;
155 c896fe29 bellard
        tcg_regset_set_reg(ct->u.regs, TCG_REG_ECX);
156 c896fe29 bellard
        break;
157 c896fe29 bellard
    case 'd':
158 c896fe29 bellard
        ct->ct |= TCG_CT_REG;
159 c896fe29 bellard
        tcg_regset_set_reg(ct->u.regs, TCG_REG_EDX);
160 c896fe29 bellard
        break;
161 c896fe29 bellard
    case 'S':
162 c896fe29 bellard
        ct->ct |= TCG_CT_REG;
163 c896fe29 bellard
        tcg_regset_set_reg(ct->u.regs, TCG_REG_ESI);
164 c896fe29 bellard
        break;
165 c896fe29 bellard
    case 'D':
166 c896fe29 bellard
        ct->ct |= TCG_CT_REG;
167 c896fe29 bellard
        tcg_regset_set_reg(ct->u.regs, TCG_REG_EDI);
168 c896fe29 bellard
        break;
169 c896fe29 bellard
    case 'q':
170 c896fe29 bellard
        ct->ct |= TCG_CT_REG;
171 5d8a4f8f Richard Henderson
        if (TCG_TARGET_REG_BITS == 64) {
172 5d8a4f8f Richard Henderson
            tcg_regset_set32(ct->u.regs, 0, 0xffff);
173 5d8a4f8f Richard Henderson
        } else {
174 5d8a4f8f Richard Henderson
            tcg_regset_set32(ct->u.regs, 0, 0xf);
175 5d8a4f8f Richard Henderson
        }
176 c896fe29 bellard
        break;
177 a4773324 Jan Kiszka
    case 'Q':
178 a4773324 Jan Kiszka
        ct->ct |= TCG_CT_REG;
179 a4773324 Jan Kiszka
        tcg_regset_set32(ct->u.regs, 0, 0xf);
180 a4773324 Jan Kiszka
        break;
181 c896fe29 bellard
    case 'r':
182 c896fe29 bellard
        ct->ct |= TCG_CT_REG;
183 5d8a4f8f Richard Henderson
        if (TCG_TARGET_REG_BITS == 64) {
184 5d8a4f8f Richard Henderson
            tcg_regset_set32(ct->u.regs, 0, 0xffff);
185 5d8a4f8f Richard Henderson
        } else {
186 5d8a4f8f Richard Henderson
            tcg_regset_set32(ct->u.regs, 0, 0xff);
187 5d8a4f8f Richard Henderson
        }
188 c896fe29 bellard
        break;
189 c896fe29 bellard
190 c896fe29 bellard
        /* qemu_ld/st address constraint */
191 c896fe29 bellard
    case 'L':
192 c896fe29 bellard
        ct->ct |= TCG_CT_REG;
193 b18212c6 Stefan Weil
#if TCG_TARGET_REG_BITS == 64
194 5d8a4f8f Richard Henderson
            tcg_regset_set32(ct->u.regs, 0, 0xffff);
195 b18212c6 Stefan Weil
#else
196 5d8a4f8f Richard Henderson
            tcg_regset_set32(ct->u.regs, 0, 0xff);
197 b18212c6 Stefan Weil
#endif
198 17b91491 Aurelien Jarno
        tcg_regset_reset_reg(ct->u.regs, TCG_REG_L0);
199 17b91491 Aurelien Jarno
        tcg_regset_reset_reg(ct->u.regs, TCG_REG_L1);
200 5d8a4f8f Richard Henderson
        break;
201 5d8a4f8f Richard Henderson
202 5d8a4f8f Richard Henderson
    case 'e':
203 5d8a4f8f Richard Henderson
        ct->ct |= TCG_CT_CONST_S32;
204 5d8a4f8f Richard Henderson
        break;
205 5d8a4f8f Richard Henderson
    case 'Z':
206 5d8a4f8f Richard Henderson
        ct->ct |= TCG_CT_CONST_U32;
207 c896fe29 bellard
        break;
208 5d8a4f8f Richard Henderson
209 c896fe29 bellard
    default:
210 c896fe29 bellard
        return -1;
211 c896fe29 bellard
    }
212 c896fe29 bellard
    ct_str++;
213 c896fe29 bellard
    *pct_str = ct_str;
214 c896fe29 bellard
    return 0;
215 c896fe29 bellard
}
216 c896fe29 bellard
217 c896fe29 bellard
/* test if a constant matches the constraint */
218 c896fe29 bellard
static inline int tcg_target_const_match(tcg_target_long val,
219 c896fe29 bellard
                                         const TCGArgConstraint *arg_ct)
220 c896fe29 bellard
{
221 5d8a4f8f Richard Henderson
    int ct = arg_ct->ct;
222 5d8a4f8f Richard Henderson
    if (ct & TCG_CT_CONST) {
223 c896fe29 bellard
        return 1;
224 5d8a4f8f Richard Henderson
    }
225 5d8a4f8f Richard Henderson
    if ((ct & TCG_CT_CONST_S32) && val == (int32_t)val) {
226 5d8a4f8f Richard Henderson
        return 1;
227 5d8a4f8f Richard Henderson
    }
228 5d8a4f8f Richard Henderson
    if ((ct & TCG_CT_CONST_U32) && val == (uint32_t)val) {
229 5d8a4f8f Richard Henderson
        return 1;
230 5d8a4f8f Richard Henderson
    }
231 5d8a4f8f Richard Henderson
    return 0;
232 c896fe29 bellard
}
233 c896fe29 bellard
234 5d8a4f8f Richard Henderson
#if TCG_TARGET_REG_BITS == 64
235 5d8a4f8f Richard Henderson
# define LOWREGMASK(x)        ((x) & 7)
236 5d8a4f8f Richard Henderson
#else
237 5d8a4f8f Richard Henderson
# define LOWREGMASK(x)        (x)
238 5d8a4f8f Richard Henderson
#endif
239 5d8a4f8f Richard Henderson
240 96b4cf38 Richard Henderson
#define P_EXT                0x100                /* 0x0f opcode prefix */
241 96b4cf38 Richard Henderson
#define P_DATA16        0x200                /* 0x66 opcode prefix */
242 5d8a4f8f Richard Henderson
#if TCG_TARGET_REG_BITS == 64
243 5d8a4f8f Richard Henderson
# define P_ADDR32        0x400                /* 0x67 opcode prefix */
244 5d8a4f8f Richard Henderson
# define P_REXW                0x800                /* Set REX.W = 1 */
245 5d8a4f8f Richard Henderson
# define P_REXB_R        0x1000                /* REG field as byte register */
246 5d8a4f8f Richard Henderson
# define P_REXB_RM        0x2000                /* R/M field as byte register */
247 44b37ace Richard Henderson
# define P_GS           0x4000          /* gs segment override */
248 5d8a4f8f Richard Henderson
#else
249 5d8a4f8f Richard Henderson
# define P_ADDR32        0
250 5d8a4f8f Richard Henderson
# define P_REXW                0
251 5d8a4f8f Richard Henderson
# define P_REXB_R        0
252 5d8a4f8f Richard Henderson
# define P_REXB_RM        0
253 44b37ace Richard Henderson
# define P_GS           0
254 5d8a4f8f Richard Henderson
#endif
255 fcb5dac1 Richard Henderson
256 a369a702 Richard Henderson
#define OPC_ARITH_EvIz        (0x81)
257 a369a702 Richard Henderson
#define OPC_ARITH_EvIb        (0x83)
258 81570a70 Richard Henderson
#define OPC_ARITH_GvEv        (0x03)                /* ... plus (ARITH_FOO << 3) */
259 81570a70 Richard Henderson
#define OPC_ADD_GvEv        (OPC_ARITH_GvEv | (ARITH_ADD << 3))
260 fcb5dac1 Richard Henderson
#define OPC_BSWAP        (0xc8 | P_EXT)
261 aadb21a4 Richard Henderson
#define OPC_CALL_Jz        (0xe8)
262 d0a16297 Richard Henderson
#define OPC_CMOVCC      (0x40 | P_EXT)  /* ... plus condition code */
263 81570a70 Richard Henderson
#define OPC_CMP_GvEv        (OPC_ARITH_GvEv | (ARITH_CMP << 3))
264 81570a70 Richard Henderson
#define OPC_DEC_r32        (0x48)
265 0566d387 Richard Henderson
#define OPC_IMUL_GvEv        (0xaf | P_EXT)
266 0566d387 Richard Henderson
#define OPC_IMUL_GvEvIb        (0x6b)
267 0566d387 Richard Henderson
#define OPC_IMUL_GvEvIz        (0x69)
268 81570a70 Richard Henderson
#define OPC_INC_r32        (0x40)
269 da441cff Richard Henderson
#define OPC_JCC_long        (0x80 | P_EXT)        /* ... plus condition code */
270 da441cff Richard Henderson
#define OPC_JCC_short        (0x70)                /* ... plus condition code */
271 da441cff Richard Henderson
#define OPC_JMP_long        (0xe9)
272 da441cff Richard Henderson
#define OPC_JMP_short        (0xeb)
273 34a6d0b7 Richard Henderson
#define OPC_LEA         (0x8d)
274 af266089 Richard Henderson
#define OPC_MOVB_EvGv        (0x88)                /* stores, more or less */
275 af266089 Richard Henderson
#define OPC_MOVL_EvGv        (0x89)                /* stores, more or less */
276 af266089 Richard Henderson
#define OPC_MOVL_GvEv        (0x8b)                /* loads, more or less */
277 5c2d2a9e Aurelien Jarno
#define OPC_MOVB_EvIz   (0xc6)
278 5d8a4f8f Richard Henderson
#define OPC_MOVL_EvIz        (0xc7)
279 ef10b106 Richard Henderson
#define OPC_MOVL_Iv     (0xb8)
280 6817c355 Richard Henderson
#define OPC_MOVSBL        (0xbe | P_EXT)
281 6817c355 Richard Henderson
#define OPC_MOVSWL        (0xbf | P_EXT)
282 5d8a4f8f Richard Henderson
#define OPC_MOVSLQ        (0x63 | P_REXW)
283 55e082a7 Richard Henderson
#define OPC_MOVZBL        (0xb6 | P_EXT)
284 55e082a7 Richard Henderson
#define OPC_MOVZWL        (0xb7 | P_EXT)
285 6858614e Richard Henderson
#define OPC_POP_r32        (0x58)
286 6858614e Richard Henderson
#define OPC_PUSH_r32        (0x50)
287 6858614e Richard Henderson
#define OPC_PUSH_Iv        (0x68)
288 6858614e Richard Henderson
#define OPC_PUSH_Ib        (0x6a)
289 3c3accc6 Richard Henderson
#define OPC_RET                (0xc3)
290 5d8a4f8f Richard Henderson
#define OPC_SETCC        (0x90 | P_EXT | P_REXB_RM) /* ... plus cc */
291 f53dba01 Richard Henderson
#define OPC_SHIFT_1        (0xd1)
292 f53dba01 Richard Henderson
#define OPC_SHIFT_Ib        (0xc1)
293 f53dba01 Richard Henderson
#define OPC_SHIFT_cl        (0xd3)
294 81570a70 Richard Henderson
#define OPC_TESTL        (0x85)
295 b3e66df7 Richard Henderson
#define OPC_XCHG_ax_r32        (0x90)
296 fcb5dac1 Richard Henderson
297 9363dedb Richard Henderson
#define OPC_GRP3_Ev        (0xf7)
298 9363dedb Richard Henderson
#define OPC_GRP5        (0xff)
299 9363dedb Richard Henderson
300 9363dedb Richard Henderson
/* Group 1 opcode extensions for 0x80-0x83.
301 9363dedb Richard Henderson
   These are also used as modifiers for OPC_ARITH.  */
302 c896fe29 bellard
#define ARITH_ADD 0
303 c896fe29 bellard
#define ARITH_OR  1
304 c896fe29 bellard
#define ARITH_ADC 2
305 c896fe29 bellard
#define ARITH_SBB 3
306 c896fe29 bellard
#define ARITH_AND 4
307 c896fe29 bellard
#define ARITH_SUB 5
308 c896fe29 bellard
#define ARITH_XOR 6
309 c896fe29 bellard
#define ARITH_CMP 7
310 c896fe29 bellard
311 da441cff Richard Henderson
/* Group 2 opcode extensions for 0xc0, 0xc1, 0xd0-0xd3.  */
312 9619376c aurel32
#define SHIFT_ROL 0
313 9619376c aurel32
#define SHIFT_ROR 1
314 c896fe29 bellard
#define SHIFT_SHL 4
315 c896fe29 bellard
#define SHIFT_SHR 5
316 c896fe29 bellard
#define SHIFT_SAR 7
317 c896fe29 bellard
318 9363dedb Richard Henderson
/* Group 3 opcode extensions for 0xf6, 0xf7.  To be used with OPC_GRP3.  */
319 9363dedb Richard Henderson
#define EXT3_NOT   2
320 9363dedb Richard Henderson
#define EXT3_NEG   3
321 9363dedb Richard Henderson
#define EXT3_MUL   4
322 9363dedb Richard Henderson
#define EXT3_IMUL  5
323 9363dedb Richard Henderson
#define EXT3_DIV   6
324 9363dedb Richard Henderson
#define EXT3_IDIV  7
325 9363dedb Richard Henderson
326 9363dedb Richard Henderson
/* Group 5 opcode extensions for 0xff.  To be used with OPC_GRP5.  */
327 5d8a4f8f Richard Henderson
#define EXT5_INC_Ev        0
328 5d8a4f8f Richard Henderson
#define EXT5_DEC_Ev        1
329 9363dedb Richard Henderson
#define EXT5_CALLN_Ev        2
330 9363dedb Richard Henderson
#define EXT5_JMPN_Ev        4
331 da441cff Richard Henderson
332 da441cff Richard Henderson
/* Condition codes to be added to OPC_JCC_{long,short}.  */
333 c896fe29 bellard
#define JCC_JMP (-1)
334 c896fe29 bellard
#define JCC_JO  0x0
335 c896fe29 bellard
#define JCC_JNO 0x1
336 c896fe29 bellard
#define JCC_JB  0x2
337 c896fe29 bellard
#define JCC_JAE 0x3
338 c896fe29 bellard
#define JCC_JE  0x4
339 c896fe29 bellard
#define JCC_JNE 0x5
340 c896fe29 bellard
#define JCC_JBE 0x6
341 c896fe29 bellard
#define JCC_JA  0x7
342 c896fe29 bellard
#define JCC_JS  0x8
343 c896fe29 bellard
#define JCC_JNS 0x9
344 c896fe29 bellard
#define JCC_JP  0xa
345 c896fe29 bellard
#define JCC_JNP 0xb
346 c896fe29 bellard
#define JCC_JL  0xc
347 c896fe29 bellard
#define JCC_JGE 0xd
348 c896fe29 bellard
#define JCC_JLE 0xe
349 c896fe29 bellard
#define JCC_JG  0xf
350 c896fe29 bellard
351 0aed257f Richard Henderson
static const uint8_t tcg_cond_to_jcc[] = {
352 c896fe29 bellard
    [TCG_COND_EQ] = JCC_JE,
353 c896fe29 bellard
    [TCG_COND_NE] = JCC_JNE,
354 c896fe29 bellard
    [TCG_COND_LT] = JCC_JL,
355 c896fe29 bellard
    [TCG_COND_GE] = JCC_JGE,
356 c896fe29 bellard
    [TCG_COND_LE] = JCC_JLE,
357 c896fe29 bellard
    [TCG_COND_GT] = JCC_JG,
358 c896fe29 bellard
    [TCG_COND_LTU] = JCC_JB,
359 c896fe29 bellard
    [TCG_COND_GEU] = JCC_JAE,
360 c896fe29 bellard
    [TCG_COND_LEU] = JCC_JBE,
361 c896fe29 bellard
    [TCG_COND_GTU] = JCC_JA,
362 c896fe29 bellard
};
363 c896fe29 bellard
364 5d8a4f8f Richard Henderson
#if TCG_TARGET_REG_BITS == 64
365 5d8a4f8f Richard Henderson
static void tcg_out_opc(TCGContext *s, int opc, int r, int rm, int x)
366 5d8a4f8f Richard Henderson
{
367 5d8a4f8f Richard Henderson
    int rex;
368 5d8a4f8f Richard Henderson
369 44b37ace Richard Henderson
    if (opc & P_GS) {
370 44b37ace Richard Henderson
        tcg_out8(s, 0x65);
371 44b37ace Richard Henderson
    }
372 5d8a4f8f Richard Henderson
    if (opc & P_DATA16) {
373 5d8a4f8f Richard Henderson
        /* We should never be asking for both 16 and 64-bit operation.  */
374 5d8a4f8f Richard Henderson
        assert((opc & P_REXW) == 0);
375 5d8a4f8f Richard Henderson
        tcg_out8(s, 0x66);
376 5d8a4f8f Richard Henderson
    }
377 5d8a4f8f Richard Henderson
    if (opc & P_ADDR32) {
378 5d8a4f8f Richard Henderson
        tcg_out8(s, 0x67);
379 5d8a4f8f Richard Henderson
    }
380 5d8a4f8f Richard Henderson
381 5d8a4f8f Richard Henderson
    rex = 0;
382 5d8a4f8f Richard Henderson
    rex |= (opc & P_REXW) >> 8;                /* REX.W */
383 5d8a4f8f Richard Henderson
    rex |= (r & 8) >> 1;                /* REX.R */
384 5d8a4f8f Richard Henderson
    rex |= (x & 8) >> 2;                /* REX.X */
385 5d8a4f8f Richard Henderson
    rex |= (rm & 8) >> 3;                /* REX.B */
386 5d8a4f8f Richard Henderson
387 5d8a4f8f Richard Henderson
    /* P_REXB_{R,RM} indicates that the given register is the low byte.
388 5d8a4f8f Richard Henderson
       For %[abcd]l we need no REX prefix, but for %{si,di,bp,sp}l we do,
389 5d8a4f8f Richard Henderson
       as otherwise the encoding indicates %[abcd]h.  Note that the values
390 5d8a4f8f Richard Henderson
       that are ORed in merely indicate that the REX byte must be present;
391 5d8a4f8f Richard Henderson
       those bits get discarded in output.  */
392 5d8a4f8f Richard Henderson
    rex |= opc & (r >= 4 ? P_REXB_R : 0);
393 5d8a4f8f Richard Henderson
    rex |= opc & (rm >= 4 ? P_REXB_RM : 0);
394 5d8a4f8f Richard Henderson
395 5d8a4f8f Richard Henderson
    if (rex) {
396 5d8a4f8f Richard Henderson
        tcg_out8(s, (uint8_t)(rex | 0x40));
397 5d8a4f8f Richard Henderson
    }
398 5d8a4f8f Richard Henderson
399 5d8a4f8f Richard Henderson
    if (opc & P_EXT) {
400 5d8a4f8f Richard Henderson
        tcg_out8(s, 0x0f);
401 5d8a4f8f Richard Henderson
    }
402 5d8a4f8f Richard Henderson
    tcg_out8(s, opc);
403 5d8a4f8f Richard Henderson
}
404 5d8a4f8f Richard Henderson
#else
405 5d8a4f8f Richard Henderson
static void tcg_out_opc(TCGContext *s, int opc)
406 c896fe29 bellard
{
407 96b4cf38 Richard Henderson
    if (opc & P_DATA16) {
408 96b4cf38 Richard Henderson
        tcg_out8(s, 0x66);
409 96b4cf38 Richard Henderson
    }
410 96b4cf38 Richard Henderson
    if (opc & P_EXT) {
411 c896fe29 bellard
        tcg_out8(s, 0x0f);
412 96b4cf38 Richard Henderson
    }
413 c896fe29 bellard
    tcg_out8(s, opc);
414 c896fe29 bellard
}
415 5d8a4f8f Richard Henderson
/* Discard the register arguments to tcg_out_opc early, so as not to penalize
416 5d8a4f8f Richard Henderson
   the 32-bit compilation paths.  This method works with all versions of gcc,
417 5d8a4f8f Richard Henderson
   whereas relying on optimization may not be able to exclude them.  */
418 5d8a4f8f Richard Henderson
#define tcg_out_opc(s, opc, r, rm, x)  (tcg_out_opc)(s, opc)
419 5d8a4f8f Richard Henderson
#endif
420 c896fe29 bellard
421 5d8a4f8f Richard Henderson
static void tcg_out_modrm(TCGContext *s, int opc, int r, int rm)
422 c896fe29 bellard
{
423 5d8a4f8f Richard Henderson
    tcg_out_opc(s, opc, r, rm, 0);
424 5d8a4f8f Richard Henderson
    tcg_out8(s, 0xc0 | (LOWREGMASK(r) << 3) | LOWREGMASK(rm));
425 c896fe29 bellard
}
426 c896fe29 bellard
427 34a6d0b7 Richard Henderson
/* Output an opcode with a full "rm + (index<<shift) + offset" address mode.
428 5d8a4f8f Richard Henderson
   We handle either RM and INDEX missing with a negative value.  In 64-bit
429 5d8a4f8f Richard Henderson
   mode for absolute addresses, ~RM is the size of the immediate operand
430 5d8a4f8f Richard Henderson
   that will follow the instruction.  */
431 34a6d0b7 Richard Henderson
432 34a6d0b7 Richard Henderson
static void tcg_out_modrm_sib_offset(TCGContext *s, int opc, int r, int rm,
433 5d8a4f8f Richard Henderson
                                     int index, int shift,
434 5d8a4f8f Richard Henderson
                                     tcg_target_long offset)
435 c896fe29 bellard
{
436 34a6d0b7 Richard Henderson
    int mod, len;
437 34a6d0b7 Richard Henderson
438 5d8a4f8f Richard Henderson
    if (index < 0 && rm < 0) {
439 5d8a4f8f Richard Henderson
        if (TCG_TARGET_REG_BITS == 64) {
440 5d8a4f8f Richard Henderson
            /* Try for a rip-relative addressing mode.  This has replaced
441 5d8a4f8f Richard Henderson
               the 32-bit-mode absolute addressing encoding.  */
442 5d8a4f8f Richard Henderson
            tcg_target_long pc = (tcg_target_long)s->code_ptr + 5 + ~rm;
443 5d8a4f8f Richard Henderson
            tcg_target_long disp = offset - pc;
444 5d8a4f8f Richard Henderson
            if (disp == (int32_t)disp) {
445 5d8a4f8f Richard Henderson
                tcg_out_opc(s, opc, r, 0, 0);
446 5d8a4f8f Richard Henderson
                tcg_out8(s, (LOWREGMASK(r) << 3) | 5);
447 5d8a4f8f Richard Henderson
                tcg_out32(s, disp);
448 5d8a4f8f Richard Henderson
                return;
449 5d8a4f8f Richard Henderson
            }
450 34a6d0b7 Richard Henderson
451 5d8a4f8f Richard Henderson
            /* Try for an absolute address encoding.  This requires the
452 5d8a4f8f Richard Henderson
               use of the MODRM+SIB encoding and is therefore larger than
453 5d8a4f8f Richard Henderson
               rip-relative addressing.  */
454 5d8a4f8f Richard Henderson
            if (offset == (int32_t)offset) {
455 5d8a4f8f Richard Henderson
                tcg_out_opc(s, opc, r, 0, 0);
456 5d8a4f8f Richard Henderson
                tcg_out8(s, (LOWREGMASK(r) << 3) | 4);
457 5d8a4f8f Richard Henderson
                tcg_out8(s, (4 << 3) | 5);
458 5d8a4f8f Richard Henderson
                tcg_out32(s, offset);
459 5d8a4f8f Richard Henderson
                return;
460 5d8a4f8f Richard Henderson
            }
461 5d8a4f8f Richard Henderson
462 5d8a4f8f Richard Henderson
            /* ??? The memory isn't directly addressable.  */
463 5d8a4f8f Richard Henderson
            tcg_abort();
464 5d8a4f8f Richard Henderson
        } else {
465 5d8a4f8f Richard Henderson
            /* Absolute address.  */
466 5d8a4f8f Richard Henderson
            tcg_out_opc(s, opc, r, 0, 0);
467 5d8a4f8f Richard Henderson
            tcg_out8(s, (r << 3) | 5);
468 5d8a4f8f Richard Henderson
            tcg_out32(s, offset);
469 5d8a4f8f Richard Henderson
            return;
470 5d8a4f8f Richard Henderson
        }
471 5d8a4f8f Richard Henderson
    }
472 34a6d0b7 Richard Henderson
473 34a6d0b7 Richard Henderson
    /* Find the length of the immediate addend.  Note that the encoding
474 34a6d0b7 Richard Henderson
       that would be used for (%ebp) indicates absolute addressing.  */
475 5d8a4f8f Richard Henderson
    if (rm < 0) {
476 34a6d0b7 Richard Henderson
        mod = 0, len = 4, rm = 5;
477 5d8a4f8f Richard Henderson
    } else if (offset == 0 && LOWREGMASK(rm) != TCG_REG_EBP) {
478 34a6d0b7 Richard Henderson
        mod = 0, len = 0;
479 34a6d0b7 Richard Henderson
    } else if (offset == (int8_t)offset) {
480 34a6d0b7 Richard Henderson
        mod = 0x40, len = 1;
481 c896fe29 bellard
    } else {
482 34a6d0b7 Richard Henderson
        mod = 0x80, len = 4;
483 34a6d0b7 Richard Henderson
    }
484 34a6d0b7 Richard Henderson
485 34a6d0b7 Richard Henderson
    /* Use a single byte MODRM format if possible.  Note that the encoding
486 34a6d0b7 Richard Henderson
       that would be used for %esp is the escape to the two byte form.  */
487 5d8a4f8f Richard Henderson
    if (index < 0 && LOWREGMASK(rm) != TCG_REG_ESP) {
488 34a6d0b7 Richard Henderson
        /* Single byte MODRM format.  */
489 5d8a4f8f Richard Henderson
        tcg_out_opc(s, opc, r, rm, 0);
490 5d8a4f8f Richard Henderson
        tcg_out8(s, mod | (LOWREGMASK(r) << 3) | LOWREGMASK(rm));
491 34a6d0b7 Richard Henderson
    } else {
492 34a6d0b7 Richard Henderson
        /* Two byte MODRM+SIB format.  */
493 34a6d0b7 Richard Henderson
494 34a6d0b7 Richard Henderson
        /* Note that the encoding that would place %esp into the index
495 5d8a4f8f Richard Henderson
           field indicates no index register.  In 64-bit mode, the REX.X
496 5d8a4f8f Richard Henderson
           bit counts, so %r12 can be used as the index.  */
497 5d8a4f8f Richard Henderson
        if (index < 0) {
498 34a6d0b7 Richard Henderson
            index = 4;
499 c896fe29 bellard
        } else {
500 34a6d0b7 Richard Henderson
            assert(index != TCG_REG_ESP);
501 c896fe29 bellard
        }
502 34a6d0b7 Richard Henderson
503 5d8a4f8f Richard Henderson
        tcg_out_opc(s, opc, r, rm, index);
504 5d8a4f8f Richard Henderson
        tcg_out8(s, mod | (LOWREGMASK(r) << 3) | 4);
505 5d8a4f8f Richard Henderson
        tcg_out8(s, (shift << 6) | (LOWREGMASK(index) << 3) | LOWREGMASK(rm));
506 34a6d0b7 Richard Henderson
    }
507 34a6d0b7 Richard Henderson
508 34a6d0b7 Richard Henderson
    if (len == 1) {
509 34a6d0b7 Richard Henderson
        tcg_out8(s, offset);
510 34a6d0b7 Richard Henderson
    } else if (len == 4) {
511 c896fe29 bellard
        tcg_out32(s, offset);
512 c896fe29 bellard
    }
513 c896fe29 bellard
}
514 c896fe29 bellard
515 5d8a4f8f Richard Henderson
/* A simplification of the above with no index or shift.  */
516 5d8a4f8f Richard Henderson
static inline void tcg_out_modrm_offset(TCGContext *s, int opc, int r,
517 5d8a4f8f Richard Henderson
                                        int rm, tcg_target_long offset)
518 34a6d0b7 Richard Henderson
{
519 34a6d0b7 Richard Henderson
    tcg_out_modrm_sib_offset(s, opc, r, rm, -1, 0, offset);
520 34a6d0b7 Richard Henderson
}
521 34a6d0b7 Richard Henderson
522 81570a70 Richard Henderson
/* Generate dest op= src.  Uses the same ARITH_* codes as tgen_arithi.  */
523 81570a70 Richard Henderson
static inline void tgen_arithr(TCGContext *s, int subop, int dest, int src)
524 81570a70 Richard Henderson
{
525 5d8a4f8f Richard Henderson
    /* Propagate an opcode prefix, such as P_REXW.  */
526 5d8a4f8f Richard Henderson
    int ext = subop & ~0x7;
527 5d8a4f8f Richard Henderson
    subop &= 0x7;
528 5d8a4f8f Richard Henderson
529 5d8a4f8f Richard Henderson
    tcg_out_modrm(s, OPC_ARITH_GvEv + (subop << 3) + ext, dest, src);
530 81570a70 Richard Henderson
}
531 81570a70 Richard Henderson
532 2a534aff Richard Henderson
static inline void tcg_out_mov(TCGContext *s, TCGType type,
533 2a534aff Richard Henderson
                               TCGReg ret, TCGReg arg)
534 c896fe29 bellard
{
535 af266089 Richard Henderson
    if (arg != ret) {
536 5d8a4f8f Richard Henderson
        int opc = OPC_MOVL_GvEv + (type == TCG_TYPE_I64 ? P_REXW : 0);
537 5d8a4f8f Richard Henderson
        tcg_out_modrm(s, opc, ret, arg);
538 af266089 Richard Henderson
    }
539 c896fe29 bellard
}
540 c896fe29 bellard
541 5d8a4f8f Richard Henderson
static void tcg_out_movi(TCGContext *s, TCGType type,
542 2a534aff Richard Henderson
                         TCGReg ret, tcg_target_long arg)
543 c896fe29 bellard
{
544 c896fe29 bellard
    if (arg == 0) {
545 81570a70 Richard Henderson
        tgen_arithr(s, ARITH_XOR, ret, ret);
546 5d8a4f8f Richard Henderson
        return;
547 5d8a4f8f Richard Henderson
    } else if (arg == (uint32_t)arg || type == TCG_TYPE_I32) {
548 5d8a4f8f Richard Henderson
        tcg_out_opc(s, OPC_MOVL_Iv + LOWREGMASK(ret), 0, ret, 0);
549 5d8a4f8f Richard Henderson
        tcg_out32(s, arg);
550 5d8a4f8f Richard Henderson
    } else if (arg == (int32_t)arg) {
551 5d8a4f8f Richard Henderson
        tcg_out_modrm(s, OPC_MOVL_EvIz + P_REXW, 0, ret);
552 5d8a4f8f Richard Henderson
        tcg_out32(s, arg);
553 c896fe29 bellard
    } else {
554 5d8a4f8f Richard Henderson
        tcg_out_opc(s, OPC_MOVL_Iv + P_REXW + LOWREGMASK(ret), 0, ret, 0);
555 c896fe29 bellard
        tcg_out32(s, arg);
556 5d8a4f8f Richard Henderson
        tcg_out32(s, arg >> 31 >> 1);
557 c896fe29 bellard
    }
558 c896fe29 bellard
}
559 c896fe29 bellard
560 6858614e Richard Henderson
static inline void tcg_out_pushi(TCGContext *s, tcg_target_long val)
561 6858614e Richard Henderson
{
562 6858614e Richard Henderson
    if (val == (int8_t)val) {
563 5d8a4f8f Richard Henderson
        tcg_out_opc(s, OPC_PUSH_Ib, 0, 0, 0);
564 6858614e Richard Henderson
        tcg_out8(s, val);
565 5d8a4f8f Richard Henderson
    } else if (val == (int32_t)val) {
566 5d8a4f8f Richard Henderson
        tcg_out_opc(s, OPC_PUSH_Iv, 0, 0, 0);
567 6858614e Richard Henderson
        tcg_out32(s, val);
568 5d8a4f8f Richard Henderson
    } else {
569 5d8a4f8f Richard Henderson
        tcg_abort();
570 6858614e Richard Henderson
    }
571 6858614e Richard Henderson
}
572 6858614e Richard Henderson
573 6858614e Richard Henderson
static inline void tcg_out_push(TCGContext *s, int reg)
574 6858614e Richard Henderson
{
575 5d8a4f8f Richard Henderson
    tcg_out_opc(s, OPC_PUSH_r32 + LOWREGMASK(reg), 0, reg, 0);
576 6858614e Richard Henderson
}
577 6858614e Richard Henderson
578 6858614e Richard Henderson
static inline void tcg_out_pop(TCGContext *s, int reg)
579 6858614e Richard Henderson
{
580 5d8a4f8f Richard Henderson
    tcg_out_opc(s, OPC_POP_r32 + LOWREGMASK(reg), 0, reg, 0);
581 6858614e Richard Henderson
}
582 6858614e Richard Henderson
583 2a534aff Richard Henderson
static inline void tcg_out_ld(TCGContext *s, TCGType type, TCGReg ret,
584 2a534aff Richard Henderson
                              TCGReg arg1, tcg_target_long arg2)
585 c896fe29 bellard
{
586 5d8a4f8f Richard Henderson
    int opc = OPC_MOVL_GvEv + (type == TCG_TYPE_I64 ? P_REXW : 0);
587 5d8a4f8f Richard Henderson
    tcg_out_modrm_offset(s, opc, ret, arg1, arg2);
588 c896fe29 bellard
}
589 c896fe29 bellard
590 2a534aff Richard Henderson
static inline void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg,
591 2a534aff Richard Henderson
                              TCGReg arg1, tcg_target_long arg2)
592 c896fe29 bellard
{
593 5d8a4f8f Richard Henderson
    int opc = OPC_MOVL_EvGv + (type == TCG_TYPE_I64 ? P_REXW : 0);
594 5d8a4f8f Richard Henderson
    tcg_out_modrm_offset(s, opc, arg, arg1, arg2);
595 c896fe29 bellard
}
596 c896fe29 bellard
597 f53dba01 Richard Henderson
static void tcg_out_shifti(TCGContext *s, int subopc, int reg, int count)
598 f53dba01 Richard Henderson
{
599 96b4cf38 Richard Henderson
    /* Propagate an opcode prefix, such as P_DATA16.  */
600 96b4cf38 Richard Henderson
    int ext = subopc & ~0x7;
601 96b4cf38 Richard Henderson
    subopc &= 0x7;
602 96b4cf38 Richard Henderson
603 f53dba01 Richard Henderson
    if (count == 1) {
604 5d8a4f8f Richard Henderson
        tcg_out_modrm(s, OPC_SHIFT_1 + ext, subopc, reg);
605 f53dba01 Richard Henderson
    } else {
606 5d8a4f8f Richard Henderson
        tcg_out_modrm(s, OPC_SHIFT_Ib + ext, subopc, reg);
607 f53dba01 Richard Henderson
        tcg_out8(s, count);
608 f53dba01 Richard Henderson
    }
609 f53dba01 Richard Henderson
}
610 f53dba01 Richard Henderson
611 fcb5dac1 Richard Henderson
static inline void tcg_out_bswap32(TCGContext *s, int reg)
612 fcb5dac1 Richard Henderson
{
613 5d8a4f8f Richard Henderson
    tcg_out_opc(s, OPC_BSWAP + LOWREGMASK(reg), 0, reg, 0);
614 fcb5dac1 Richard Henderson
}
615 fcb5dac1 Richard Henderson
616 fcb5dac1 Richard Henderson
static inline void tcg_out_rolw_8(TCGContext *s, int reg)
617 fcb5dac1 Richard Henderson
{
618 5d8a4f8f Richard Henderson
    tcg_out_shifti(s, SHIFT_ROL + P_DATA16, reg, 8);
619 fcb5dac1 Richard Henderson
}
620 fcb5dac1 Richard Henderson
621 55e082a7 Richard Henderson
static inline void tcg_out_ext8u(TCGContext *s, int dest, int src)
622 55e082a7 Richard Henderson
{
623 55e082a7 Richard Henderson
    /* movzbl */
624 5d8a4f8f Richard Henderson
    assert(src < 4 || TCG_TARGET_REG_BITS == 64);
625 5d8a4f8f Richard Henderson
    tcg_out_modrm(s, OPC_MOVZBL + P_REXB_RM, dest, src);
626 55e082a7 Richard Henderson
}
627 55e082a7 Richard Henderson
628 5d8a4f8f Richard Henderson
static void tcg_out_ext8s(TCGContext *s, int dest, int src, int rexw)
629 6817c355 Richard Henderson
{
630 6817c355 Richard Henderson
    /* movsbl */
631 5d8a4f8f Richard Henderson
    assert(src < 4 || TCG_TARGET_REG_BITS == 64);
632 5d8a4f8f Richard Henderson
    tcg_out_modrm(s, OPC_MOVSBL + P_REXB_RM + rexw, dest, src);
633 6817c355 Richard Henderson
}
634 6817c355 Richard Henderson
635 55e082a7 Richard Henderson
static inline void tcg_out_ext16u(TCGContext *s, int dest, int src)
636 55e082a7 Richard Henderson
{
637 55e082a7 Richard Henderson
    /* movzwl */
638 55e082a7 Richard Henderson
    tcg_out_modrm(s, OPC_MOVZWL, dest, src);
639 55e082a7 Richard Henderson
}
640 55e082a7 Richard Henderson
641 5d8a4f8f Richard Henderson
static inline void tcg_out_ext16s(TCGContext *s, int dest, int src, int rexw)
642 6817c355 Richard Henderson
{
643 5d8a4f8f Richard Henderson
    /* movsw[lq] */
644 5d8a4f8f Richard Henderson
    tcg_out_modrm(s, OPC_MOVSWL + rexw, dest, src);
645 6817c355 Richard Henderson
}
646 6817c355 Richard Henderson
647 5d8a4f8f Richard Henderson
static inline void tcg_out_ext32u(TCGContext *s, int dest, int src)
648 c896fe29 bellard
{
649 5d8a4f8f Richard Henderson
    /* 32-bit mov zero extends.  */
650 5d8a4f8f Richard Henderson
    tcg_out_modrm(s, OPC_MOVL_GvEv, dest, src);
651 5d8a4f8f Richard Henderson
}
652 5d8a4f8f Richard Henderson
653 5d8a4f8f Richard Henderson
static inline void tcg_out_ext32s(TCGContext *s, int dest, int src)
654 5d8a4f8f Richard Henderson
{
655 5d8a4f8f Richard Henderson
    tcg_out_modrm(s, OPC_MOVSLQ, dest, src);
656 5d8a4f8f Richard Henderson
}
657 5d8a4f8f Richard Henderson
658 5d8a4f8f Richard Henderson
static inline void tcg_out_bswap64(TCGContext *s, int reg)
659 5d8a4f8f Richard Henderson
{
660 5d8a4f8f Richard Henderson
    tcg_out_opc(s, OPC_BSWAP + P_REXW + LOWREGMASK(reg), 0, reg, 0);
661 5d8a4f8f Richard Henderson
}
662 5d8a4f8f Richard Henderson
663 5d8a4f8f Richard Henderson
static void tgen_arithi(TCGContext *s, int c, int r0,
664 5d8a4f8f Richard Henderson
                        tcg_target_long val, int cf)
665 5d8a4f8f Richard Henderson
{
666 5d8a4f8f Richard Henderson
    int rexw = 0;
667 5d8a4f8f Richard Henderson
668 5d8a4f8f Richard Henderson
    if (TCG_TARGET_REG_BITS == 64) {
669 5d8a4f8f Richard Henderson
        rexw = c & -8;
670 5d8a4f8f Richard Henderson
        c &= 7;
671 5d8a4f8f Richard Henderson
    }
672 5d8a4f8f Richard Henderson
673 81570a70 Richard Henderson
    /* ??? While INC is 2 bytes shorter than ADDL $1, they also induce
674 81570a70 Richard Henderson
       partial flags update stalls on Pentium4 and are not recommended
675 81570a70 Richard Henderson
       by current Intel optimization manuals.  */
676 81570a70 Richard Henderson
    if (!cf && (c == ARITH_ADD || c == ARITH_SUB) && (val == 1 || val == -1)) {
677 447d681e Aurelien Jarno
        int is_inc = (c == ARITH_ADD) ^ (val < 0);
678 5d8a4f8f Richard Henderson
        if (TCG_TARGET_REG_BITS == 64) {
679 5d8a4f8f Richard Henderson
            /* The single-byte increment encodings are re-tasked as the
680 5d8a4f8f Richard Henderson
               REX prefixes.  Use the MODRM encoding.  */
681 5d8a4f8f Richard Henderson
            tcg_out_modrm(s, OPC_GRP5 + rexw,
682 5d8a4f8f Richard Henderson
                          (is_inc ? EXT5_INC_Ev : EXT5_DEC_Ev), r0);
683 5d8a4f8f Richard Henderson
        } else {
684 5d8a4f8f Richard Henderson
            tcg_out8(s, (is_inc ? OPC_INC_r32 : OPC_DEC_r32) + r0);
685 5d8a4f8f Richard Henderson
        }
686 5d8a4f8f Richard Henderson
        return;
687 5d8a4f8f Richard Henderson
    }
688 5d8a4f8f Richard Henderson
689 5d8a4f8f Richard Henderson
    if (c == ARITH_AND) {
690 5d8a4f8f Richard Henderson
        if (TCG_TARGET_REG_BITS == 64) {
691 5d8a4f8f Richard Henderson
            if (val == 0xffffffffu) {
692 5d8a4f8f Richard Henderson
                tcg_out_ext32u(s, r0, r0);
693 5d8a4f8f Richard Henderson
                return;
694 5d8a4f8f Richard Henderson
            }
695 5d8a4f8f Richard Henderson
            if (val == (uint32_t)val) {
696 5d8a4f8f Richard Henderson
                /* AND with no high bits set can use a 32-bit operation.  */
697 5d8a4f8f Richard Henderson
                rexw = 0;
698 5d8a4f8f Richard Henderson
            }
699 5d8a4f8f Richard Henderson
        }
700 dc397ca3 Aurelien Jarno
        if (val == 0xffu && (r0 < 4 || TCG_TARGET_REG_BITS == 64)) {
701 5d8a4f8f Richard Henderson
            tcg_out_ext8u(s, r0, r0);
702 5d8a4f8f Richard Henderson
            return;
703 5d8a4f8f Richard Henderson
        }
704 5d8a4f8f Richard Henderson
        if (val == 0xffffu) {
705 5d8a4f8f Richard Henderson
            tcg_out_ext16u(s, r0, r0);
706 5d8a4f8f Richard Henderson
            return;
707 5d8a4f8f Richard Henderson
        }
708 5d8a4f8f Richard Henderson
    }
709 5d8a4f8f Richard Henderson
710 5d8a4f8f Richard Henderson
    if (val == (int8_t)val) {
711 5d8a4f8f Richard Henderson
        tcg_out_modrm(s, OPC_ARITH_EvIb + rexw, c, r0);
712 c896fe29 bellard
        tcg_out8(s, val);
713 5d8a4f8f Richard Henderson
        return;
714 5d8a4f8f Richard Henderson
    }
715 5d8a4f8f Richard Henderson
    if (rexw == 0 || val == (int32_t)val) {
716 5d8a4f8f Richard Henderson
        tcg_out_modrm(s, OPC_ARITH_EvIz + rexw, c, r0);
717 c896fe29 bellard
        tcg_out32(s, val);
718 5d8a4f8f Richard Henderson
        return;
719 c896fe29 bellard
    }
720 5d8a4f8f Richard Henderson
721 5d8a4f8f Richard Henderson
    tcg_abort();
722 c896fe29 bellard
}
723 c896fe29 bellard
724 3e9a474e aurel32
static void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
725 c896fe29 bellard
{
726 5d8a4f8f Richard Henderson
    if (val != 0) {
727 5d8a4f8f Richard Henderson
        tgen_arithi(s, ARITH_ADD + P_REXW, reg, val, 0);
728 5d8a4f8f Richard Henderson
    }
729 c896fe29 bellard
}
730 c896fe29 bellard
731 f75b56c1 Richard Henderson
/* Use SMALL != 0 to force a short forward branch.  */
732 f75b56c1 Richard Henderson
static void tcg_out_jxx(TCGContext *s, int opc, int label_index, int small)
733 c896fe29 bellard
{
734 c896fe29 bellard
    int32_t val, val1;
735 c896fe29 bellard
    TCGLabel *l = &s->labels[label_index];
736 78686523 Richard Henderson
737 c896fe29 bellard
    if (l->has_value) {
738 c896fe29 bellard
        val = l->u.value - (tcg_target_long)s->code_ptr;
739 c896fe29 bellard
        val1 = val - 2;
740 c896fe29 bellard
        if ((int8_t)val1 == val1) {
741 f75b56c1 Richard Henderson
            if (opc == -1) {
742 da441cff Richard Henderson
                tcg_out8(s, OPC_JMP_short);
743 f75b56c1 Richard Henderson
            } else {
744 da441cff Richard Henderson
                tcg_out8(s, OPC_JCC_short + opc);
745 f75b56c1 Richard Henderson
            }
746 c896fe29 bellard
            tcg_out8(s, val1);
747 c896fe29 bellard
        } else {
748 f75b56c1 Richard Henderson
            if (small) {
749 f75b56c1 Richard Henderson
                tcg_abort();
750 f75b56c1 Richard Henderson
            }
751 c896fe29 bellard
            if (opc == -1) {
752 da441cff Richard Henderson
                tcg_out8(s, OPC_JMP_long);
753 c896fe29 bellard
                tcg_out32(s, val - 5);
754 c896fe29 bellard
            } else {
755 5d8a4f8f Richard Henderson
                tcg_out_opc(s, OPC_JCC_long + opc, 0, 0, 0);
756 c896fe29 bellard
                tcg_out32(s, val - 6);
757 c896fe29 bellard
            }
758 c896fe29 bellard
        }
759 f75b56c1 Richard Henderson
    } else if (small) {
760 f75b56c1 Richard Henderson
        if (opc == -1) {
761 da441cff Richard Henderson
            tcg_out8(s, OPC_JMP_short);
762 f75b56c1 Richard Henderson
        } else {
763 da441cff Richard Henderson
            tcg_out8(s, OPC_JCC_short + opc);
764 f75b56c1 Richard Henderson
        }
765 f75b56c1 Richard Henderson
        tcg_out_reloc(s, s->code_ptr, R_386_PC8, label_index, -1);
766 f75b56c1 Richard Henderson
        s->code_ptr += 1;
767 c896fe29 bellard
    } else {
768 c896fe29 bellard
        if (opc == -1) {
769 da441cff Richard Henderson
            tcg_out8(s, OPC_JMP_long);
770 c896fe29 bellard
        } else {
771 5d8a4f8f Richard Henderson
            tcg_out_opc(s, OPC_JCC_long + opc, 0, 0, 0);
772 c896fe29 bellard
        }
773 c896fe29 bellard
        tcg_out_reloc(s, s->code_ptr, R_386_PC32, label_index, -4);
774 623e265c pbrook
        s->code_ptr += 4;
775 c896fe29 bellard
    }
776 c896fe29 bellard
}
777 c896fe29 bellard
778 1d2699ae Richard Henderson
static void tcg_out_cmp(TCGContext *s, TCGArg arg1, TCGArg arg2,
779 5d8a4f8f Richard Henderson
                        int const_arg2, int rexw)
780 c896fe29 bellard
{
781 c896fe29 bellard
    if (const_arg2) {
782 c896fe29 bellard
        if (arg2 == 0) {
783 c896fe29 bellard
            /* test r, r */
784 5d8a4f8f Richard Henderson
            tcg_out_modrm(s, OPC_TESTL + rexw, arg1, arg1);
785 c896fe29 bellard
        } else {
786 5d8a4f8f Richard Henderson
            tgen_arithi(s, ARITH_CMP + rexw, arg1, arg2, 0);
787 c896fe29 bellard
        }
788 c896fe29 bellard
    } else {
789 5d8a4f8f Richard Henderson
        tgen_arithr(s, ARITH_CMP + rexw, arg1, arg2);
790 c896fe29 bellard
    }
791 1d2699ae Richard Henderson
}
792 1d2699ae Richard Henderson
793 5d8a4f8f Richard Henderson
static void tcg_out_brcond32(TCGContext *s, TCGCond cond,
794 5d8a4f8f Richard Henderson
                             TCGArg arg1, TCGArg arg2, int const_arg2,
795 5d8a4f8f Richard Henderson
                             int label_index, int small)
796 1d2699ae Richard Henderson
{
797 5d8a4f8f Richard Henderson
    tcg_out_cmp(s, arg1, arg2, const_arg2, 0);
798 f75b56c1 Richard Henderson
    tcg_out_jxx(s, tcg_cond_to_jcc[cond], label_index, small);
799 c896fe29 bellard
}
800 c896fe29 bellard
801 5d8a4f8f Richard Henderson
#if TCG_TARGET_REG_BITS == 64
802 5d8a4f8f Richard Henderson
static void tcg_out_brcond64(TCGContext *s, TCGCond cond,
803 5d8a4f8f Richard Henderson
                             TCGArg arg1, TCGArg arg2, int const_arg2,
804 5d8a4f8f Richard Henderson
                             int label_index, int small)
805 5d8a4f8f Richard Henderson
{
806 5d8a4f8f Richard Henderson
    tcg_out_cmp(s, arg1, arg2, const_arg2, P_REXW);
807 5d8a4f8f Richard Henderson
    tcg_out_jxx(s, tcg_cond_to_jcc[cond], label_index, small);
808 5d8a4f8f Richard Henderson
}
809 5d8a4f8f Richard Henderson
#else
810 c896fe29 bellard
/* XXX: we implement it at the target level to avoid having to
811 c896fe29 bellard
   handle cross basic blocks temporaries */
812 f75b56c1 Richard Henderson
static void tcg_out_brcond2(TCGContext *s, const TCGArg *args,
813 f75b56c1 Richard Henderson
                            const int *const_args, int small)
814 c896fe29 bellard
{
815 c896fe29 bellard
    int label_next;
816 c896fe29 bellard
    label_next = gen_new_label();
817 c896fe29 bellard
    switch(args[4]) {
818 c896fe29 bellard
    case TCG_COND_EQ:
819 5d8a4f8f Richard Henderson
        tcg_out_brcond32(s, TCG_COND_NE, args[0], args[2], const_args[2],
820 5d8a4f8f Richard Henderson
                         label_next, 1);
821 5d8a4f8f Richard Henderson
        tcg_out_brcond32(s, TCG_COND_EQ, args[1], args[3], const_args[3],
822 5d8a4f8f Richard Henderson
                         args[5], small);
823 c896fe29 bellard
        break;
824 c896fe29 bellard
    case TCG_COND_NE:
825 5d8a4f8f Richard Henderson
        tcg_out_brcond32(s, TCG_COND_NE, args[0], args[2], const_args[2],
826 5d8a4f8f Richard Henderson
                         args[5], small);
827 5d8a4f8f Richard Henderson
        tcg_out_brcond32(s, TCG_COND_NE, args[1], args[3], const_args[3],
828 5d8a4f8f Richard Henderson
                         args[5], small);
829 c896fe29 bellard
        break;
830 c896fe29 bellard
    case TCG_COND_LT:
831 5d8a4f8f Richard Henderson
        tcg_out_brcond32(s, TCG_COND_LT, args[1], args[3], const_args[3],
832 5d8a4f8f Richard Henderson
                         args[5], small);
833 f75b56c1 Richard Henderson
        tcg_out_jxx(s, JCC_JNE, label_next, 1);
834 5d8a4f8f Richard Henderson
        tcg_out_brcond32(s, TCG_COND_LTU, args[0], args[2], const_args[2],
835 5d8a4f8f Richard Henderson
                         args[5], small);
836 c896fe29 bellard
        break;
837 c896fe29 bellard
    case TCG_COND_LE:
838 5d8a4f8f Richard Henderson
        tcg_out_brcond32(s, TCG_COND_LT, args[1], args[3], const_args[3],
839 5d8a4f8f Richard Henderson
                         args[5], small);
840 f75b56c1 Richard Henderson
        tcg_out_jxx(s, JCC_JNE, label_next, 1);
841 5d8a4f8f Richard Henderson
        tcg_out_brcond32(s, TCG_COND_LEU, args[0], args[2], const_args[2],
842 5d8a4f8f Richard Henderson
                         args[5], small);
843 c896fe29 bellard
        break;
844 c896fe29 bellard
    case TCG_COND_GT:
845 5d8a4f8f Richard Henderson
        tcg_out_brcond32(s, TCG_COND_GT, args[1], args[3], const_args[3],
846 5d8a4f8f Richard Henderson
                         args[5], small);
847 f75b56c1 Richard Henderson
        tcg_out_jxx(s, JCC_JNE, label_next, 1);
848 5d8a4f8f Richard Henderson
        tcg_out_brcond32(s, TCG_COND_GTU, args[0], args[2], const_args[2],
849 5d8a4f8f Richard Henderson
                         args[5], small);
850 c896fe29 bellard
        break;
851 c896fe29 bellard
    case TCG_COND_GE:
852 5d8a4f8f Richard Henderson
        tcg_out_brcond32(s, TCG_COND_GT, args[1], args[3], const_args[3],
853 5d8a4f8f Richard Henderson
                         args[5], small);
854 f75b56c1 Richard Henderson
        tcg_out_jxx(s, JCC_JNE, label_next, 1);
855 5d8a4f8f Richard Henderson
        tcg_out_brcond32(s, TCG_COND_GEU, args[0], args[2], const_args[2],
856 5d8a4f8f Richard Henderson
                         args[5], small);
857 c896fe29 bellard
        break;
858 c896fe29 bellard
    case TCG_COND_LTU:
859 5d8a4f8f Richard Henderson
        tcg_out_brcond32(s, TCG_COND_LTU, args[1], args[3], const_args[3],
860 5d8a4f8f Richard Henderson
                         args[5], small);
861 f75b56c1 Richard Henderson
        tcg_out_jxx(s, JCC_JNE, label_next, 1);
862 5d8a4f8f Richard Henderson
        tcg_out_brcond32(s, TCG_COND_LTU, args[0], args[2], const_args[2],
863 5d8a4f8f Richard Henderson
                         args[5], small);
864 c896fe29 bellard
        break;
865 c896fe29 bellard
    case TCG_COND_LEU:
866 5d8a4f8f Richard Henderson
        tcg_out_brcond32(s, TCG_COND_LTU, args[1], args[3], const_args[3],
867 5d8a4f8f Richard Henderson
                         args[5], small);
868 f75b56c1 Richard Henderson
        tcg_out_jxx(s, JCC_JNE, label_next, 1);
869 5d8a4f8f Richard Henderson
        tcg_out_brcond32(s, TCG_COND_LEU, args[0], args[2], const_args[2],
870 5d8a4f8f Richard Henderson
                         args[5], small);
871 c896fe29 bellard
        break;
872 c896fe29 bellard
    case TCG_COND_GTU:
873 5d8a4f8f Richard Henderson
        tcg_out_brcond32(s, TCG_COND_GTU, args[1], args[3], const_args[3],
874 5d8a4f8f Richard Henderson
                         args[5], small);
875 f75b56c1 Richard Henderson
        tcg_out_jxx(s, JCC_JNE, label_next, 1);
876 5d8a4f8f Richard Henderson
        tcg_out_brcond32(s, TCG_COND_GTU, args[0], args[2], const_args[2],
877 5d8a4f8f Richard Henderson
                         args[5], small);
878 c896fe29 bellard
        break;
879 c896fe29 bellard
    case TCG_COND_GEU:
880 5d8a4f8f Richard Henderson
        tcg_out_brcond32(s, TCG_COND_GTU, args[1], args[3], const_args[3],
881 5d8a4f8f Richard Henderson
                         args[5], small);
882 f75b56c1 Richard Henderson
        tcg_out_jxx(s, JCC_JNE, label_next, 1);
883 5d8a4f8f Richard Henderson
        tcg_out_brcond32(s, TCG_COND_GEU, args[0], args[2], const_args[2],
884 5d8a4f8f Richard Henderson
                         args[5], small);
885 c896fe29 bellard
        break;
886 c896fe29 bellard
    default:
887 c896fe29 bellard
        tcg_abort();
888 c896fe29 bellard
    }
889 9d6fca70 Stefan Weil
    tcg_out_label(s, label_next, s->code_ptr);
890 c896fe29 bellard
}
891 5d8a4f8f Richard Henderson
#endif
892 c896fe29 bellard
893 5d8a4f8f Richard Henderson
static void tcg_out_setcond32(TCGContext *s, TCGCond cond, TCGArg dest,
894 5d8a4f8f Richard Henderson
                              TCGArg arg1, TCGArg arg2, int const_arg2)
895 1d2699ae Richard Henderson
{
896 5d8a4f8f Richard Henderson
    tcg_out_cmp(s, arg1, arg2, const_arg2, 0);
897 32a8ffb9 Richard Henderson
    tcg_out_modrm(s, OPC_SETCC | tcg_cond_to_jcc[cond], 0, dest);
898 a369a702 Richard Henderson
    tcg_out_ext8u(s, dest, dest);
899 1d2699ae Richard Henderson
}
900 1d2699ae Richard Henderson
901 5d8a4f8f Richard Henderson
#if TCG_TARGET_REG_BITS == 64
902 5d8a4f8f Richard Henderson
static void tcg_out_setcond64(TCGContext *s, TCGCond cond, TCGArg dest,
903 5d8a4f8f Richard Henderson
                              TCGArg arg1, TCGArg arg2, int const_arg2)
904 5d8a4f8f Richard Henderson
{
905 5d8a4f8f Richard Henderson
    tcg_out_cmp(s, arg1, arg2, const_arg2, P_REXW);
906 5d8a4f8f Richard Henderson
    tcg_out_modrm(s, OPC_SETCC | tcg_cond_to_jcc[cond], 0, dest);
907 5d8a4f8f Richard Henderson
    tcg_out_ext8u(s, dest, dest);
908 5d8a4f8f Richard Henderson
}
909 5d8a4f8f Richard Henderson
#else
910 1d2699ae Richard Henderson
static void tcg_out_setcond2(TCGContext *s, const TCGArg *args,
911 1d2699ae Richard Henderson
                             const int *const_args)
912 1d2699ae Richard Henderson
{
913 1d2699ae Richard Henderson
    TCGArg new_args[6];
914 1d2699ae Richard Henderson
    int label_true, label_over;
915 1d2699ae Richard Henderson
916 1d2699ae Richard Henderson
    memcpy(new_args, args+1, 5*sizeof(TCGArg));
917 1d2699ae Richard Henderson
918 1d2699ae Richard Henderson
    if (args[0] == args[1] || args[0] == args[2]
919 1d2699ae Richard Henderson
        || (!const_args[3] && args[0] == args[3])
920 1d2699ae Richard Henderson
        || (!const_args[4] && args[0] == args[4])) {
921 1d2699ae Richard Henderson
        /* When the destination overlaps with one of the argument
922 1d2699ae Richard Henderson
           registers, don't do anything tricky.  */
923 1d2699ae Richard Henderson
        label_true = gen_new_label();
924 1d2699ae Richard Henderson
        label_over = gen_new_label();
925 1d2699ae Richard Henderson
926 1d2699ae Richard Henderson
        new_args[5] = label_true;
927 1d2699ae Richard Henderson
        tcg_out_brcond2(s, new_args, const_args+1, 1);
928 1d2699ae Richard Henderson
929 1d2699ae Richard Henderson
        tcg_out_movi(s, TCG_TYPE_I32, args[0], 0);
930 1d2699ae Richard Henderson
        tcg_out_jxx(s, JCC_JMP, label_over, 1);
931 9d6fca70 Stefan Weil
        tcg_out_label(s, label_true, s->code_ptr);
932 1d2699ae Richard Henderson
933 1d2699ae Richard Henderson
        tcg_out_movi(s, TCG_TYPE_I32, args[0], 1);
934 9d6fca70 Stefan Weil
        tcg_out_label(s, label_over, s->code_ptr);
935 1d2699ae Richard Henderson
    } else {
936 1d2699ae Richard Henderson
        /* When the destination does not overlap one of the arguments,
937 1d2699ae Richard Henderson
           clear the destination first, jump if cond false, and emit an
938 1d2699ae Richard Henderson
           increment in the true case.  This results in smaller code.  */
939 1d2699ae Richard Henderson
940 1d2699ae Richard Henderson
        tcg_out_movi(s, TCG_TYPE_I32, args[0], 0);
941 1d2699ae Richard Henderson
942 1d2699ae Richard Henderson
        label_over = gen_new_label();
943 1d2699ae Richard Henderson
        new_args[4] = tcg_invert_cond(new_args[4]);
944 1d2699ae Richard Henderson
        new_args[5] = label_over;
945 1d2699ae Richard Henderson
        tcg_out_brcond2(s, new_args, const_args+1, 1);
946 1d2699ae Richard Henderson
947 1d2699ae Richard Henderson
        tgen_arithi(s, ARITH_ADD, args[0], 1, 0);
948 9d6fca70 Stefan Weil
        tcg_out_label(s, label_over, s->code_ptr);
949 1d2699ae Richard Henderson
    }
950 1d2699ae Richard Henderson
}
951 5d8a4f8f Richard Henderson
#endif
952 5d8a4f8f Richard Henderson
953 d0a16297 Richard Henderson
static void tcg_out_movcond32(TCGContext *s, TCGCond cond, TCGArg dest,
954 d0a16297 Richard Henderson
                              TCGArg c1, TCGArg c2, int const_c2,
955 d0a16297 Richard Henderson
                              TCGArg v1)
956 d0a16297 Richard Henderson
{
957 d0a16297 Richard Henderson
    tcg_out_cmp(s, c1, c2, const_c2, 0);
958 76a347e1 Richard Henderson
    if (have_cmov) {
959 76a347e1 Richard Henderson
        tcg_out_modrm(s, OPC_CMOVCC | tcg_cond_to_jcc[cond], dest, v1);
960 76a347e1 Richard Henderson
    } else {
961 76a347e1 Richard Henderson
        int over = gen_new_label();
962 76a347e1 Richard Henderson
        tcg_out_jxx(s, tcg_cond_to_jcc[tcg_invert_cond(cond)], over, 1);
963 76a347e1 Richard Henderson
        tcg_out_mov(s, TCG_TYPE_I32, dest, v1);
964 76a347e1 Richard Henderson
        tcg_out_label(s, over, s->code_ptr);
965 76a347e1 Richard Henderson
    }
966 d0a16297 Richard Henderson
}
967 d0a16297 Richard Henderson
968 d0a16297 Richard Henderson
#if TCG_TARGET_REG_BITS == 64
969 d0a16297 Richard Henderson
static void tcg_out_movcond64(TCGContext *s, TCGCond cond, TCGArg dest,
970 d0a16297 Richard Henderson
                              TCGArg c1, TCGArg c2, int const_c2,
971 d0a16297 Richard Henderson
                              TCGArg v1)
972 d0a16297 Richard Henderson
{
973 d0a16297 Richard Henderson
    tcg_out_cmp(s, c1, c2, const_c2, P_REXW);
974 d0a16297 Richard Henderson
    tcg_out_modrm(s, OPC_CMOVCC | tcg_cond_to_jcc[cond] | P_REXW, dest, v1);
975 d0a16297 Richard Henderson
}
976 d0a16297 Richard Henderson
#endif
977 d0a16297 Richard Henderson
978 5d8a4f8f Richard Henderson
static void tcg_out_branch(TCGContext *s, int call, tcg_target_long dest)
979 5d8a4f8f Richard Henderson
{
980 5d8a4f8f Richard Henderson
    tcg_target_long disp = dest - (tcg_target_long)s->code_ptr - 5;
981 5d8a4f8f Richard Henderson
982 5d8a4f8f Richard Henderson
    if (disp == (int32_t)disp) {
983 5d8a4f8f Richard Henderson
        tcg_out_opc(s, call ? OPC_CALL_Jz : OPC_JMP_long, 0, 0, 0);
984 5d8a4f8f Richard Henderson
        tcg_out32(s, disp);
985 5d8a4f8f Richard Henderson
    } else {
986 5d8a4f8f Richard Henderson
        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R10, dest);
987 5d8a4f8f Richard Henderson
        tcg_out_modrm(s, OPC_GRP5,
988 5d8a4f8f Richard Henderson
                      call ? EXT5_CALLN_Ev : EXT5_JMPN_Ev, TCG_REG_R10);
989 5d8a4f8f Richard Henderson
    }
990 5d8a4f8f Richard Henderson
}
991 5d8a4f8f Richard Henderson
992 5d8a4f8f Richard Henderson
static inline void tcg_out_calli(TCGContext *s, tcg_target_long dest)
993 5d8a4f8f Richard Henderson
{
994 5d8a4f8f Richard Henderson
    tcg_out_branch(s, 1, dest);
995 5d8a4f8f Richard Henderson
}
996 1d2699ae Richard Henderson
997 5d8a4f8f Richard Henderson
static void tcg_out_jmp(TCGContext *s, tcg_target_long dest)
998 aadb21a4 Richard Henderson
{
999 5d8a4f8f Richard Henderson
    tcg_out_branch(s, 0, dest);
1000 aadb21a4 Richard Henderson
}
1001 aadb21a4 Richard Henderson
1002 c896fe29 bellard
#if defined(CONFIG_SOFTMMU)
1003 79383c9c blueswir1
1004 022c62cb Paolo Bonzini
#include "exec/softmmu_defs.h"
1005 c896fe29 bellard
1006 e141ab52 Blue Swirl
/* helper signature: helper_ld_mmu(CPUState *env, target_ulong addr,
1007 e141ab52 Blue Swirl
   int mmu_idx) */
1008 e141ab52 Blue Swirl
static const void *qemu_ld_helpers[4] = {
1009 e141ab52 Blue Swirl
    helper_ldb_mmu,
1010 e141ab52 Blue Swirl
    helper_ldw_mmu,
1011 e141ab52 Blue Swirl
    helper_ldl_mmu,
1012 e141ab52 Blue Swirl
    helper_ldq_mmu,
1013 e141ab52 Blue Swirl
};
1014 e141ab52 Blue Swirl
1015 e141ab52 Blue Swirl
/* helper signature: helper_st_mmu(CPUState *env, target_ulong addr,
1016 e141ab52 Blue Swirl
   uintxx_t val, int mmu_idx) */
1017 e141ab52 Blue Swirl
static const void *qemu_st_helpers[4] = {
1018 e141ab52 Blue Swirl
    helper_stb_mmu,
1019 e141ab52 Blue Swirl
    helper_stw_mmu,
1020 e141ab52 Blue Swirl
    helper_stl_mmu,
1021 e141ab52 Blue Swirl
    helper_stq_mmu,
1022 e141ab52 Blue Swirl
};
1023 8516a044 Richard Henderson
1024 b76f0d8c Yeongkyoon Lee
static void add_qemu_ldst_label(TCGContext *s,
1025 b76f0d8c Yeongkyoon Lee
                                int is_ld,
1026 b76f0d8c Yeongkyoon Lee
                                int opc,
1027 b76f0d8c Yeongkyoon Lee
                                int data_reg,
1028 b76f0d8c Yeongkyoon Lee
                                int data_reg2,
1029 b76f0d8c Yeongkyoon Lee
                                int addrlo_reg,
1030 b76f0d8c Yeongkyoon Lee
                                int addrhi_reg,
1031 b76f0d8c Yeongkyoon Lee
                                int mem_index,
1032 b76f0d8c Yeongkyoon Lee
                                uint8_t *raddr,
1033 b76f0d8c Yeongkyoon Lee
                                uint8_t **label_ptr);
1034 b76f0d8c Yeongkyoon Lee
1035 8516a044 Richard Henderson
/* Perform the TLB load and compare.
1036 8516a044 Richard Henderson

1037 8516a044 Richard Henderson
   Inputs:
1038 8516a044 Richard Henderson
   ADDRLO_IDX contains the index into ARGS of the low part of the
1039 8516a044 Richard Henderson
   address; the high part of the address is at ADDR_LOW_IDX+1.
1040 8516a044 Richard Henderson

1041 8516a044 Richard Henderson
   MEM_INDEX and S_BITS are the memory context and log2 size of the load.
1042 8516a044 Richard Henderson

1043 8516a044 Richard Henderson
   WHICH is the offset into the CPUTLBEntry structure of the slot to read.
1044 8516a044 Richard Henderson
   This should be offsetof addr_read or addr_write.
1045 8516a044 Richard Henderson

1046 8516a044 Richard Henderson
   Outputs:
1047 8516a044 Richard Henderson
   LABEL_PTRS is filled with 1 (32-bit addresses) or 2 (64-bit addresses)
1048 8516a044 Richard Henderson
   positions of the displacements of forward jumps to the TLB miss case.
1049 8516a044 Richard Henderson

1050 166792f7 Aurelien Jarno
   Second argument register is loaded with the low part of the address.
1051 5d8a4f8f Richard Henderson
   In the TLB hit case, it has been adjusted as indicated by the TLB
1052 5d8a4f8f Richard Henderson
   and so is a host address.  In the TLB miss case, it continues to
1053 5d8a4f8f Richard Henderson
   hold a guest address.
1054 8516a044 Richard Henderson

1055 166792f7 Aurelien Jarno
   First argument register is clobbered.  */
1056 8516a044 Richard Henderson
1057 c28b14c6 Aurelien Jarno
static inline void tcg_out_tlb_load(TCGContext *s, int addrlo_idx,
1058 c28b14c6 Aurelien Jarno
                                    int mem_index, int s_bits,
1059 4309a79b Aurelien Jarno
                                    const TCGArg *args,
1060 c28b14c6 Aurelien Jarno
                                    uint8_t **label_ptr, int which)
1061 8516a044 Richard Henderson
{
1062 8516a044 Richard Henderson
    const int addrlo = args[addrlo_idx];
1063 b18212c6 Stefan Weil
    const int r0 = TCG_REG_L0;
1064 b18212c6 Stefan Weil
    const int r1 = TCG_REG_L1;
1065 5d8a4f8f Richard Henderson
    TCGType type = TCG_TYPE_I32;
1066 5d8a4f8f Richard Henderson
    int rexw = 0;
1067 5d8a4f8f Richard Henderson
1068 5d8a4f8f Richard Henderson
    if (TCG_TARGET_REG_BITS == 64 && TARGET_LONG_BITS == 64) {
1069 5d8a4f8f Richard Henderson
        type = TCG_TYPE_I64;
1070 5d8a4f8f Richard Henderson
        rexw = P_REXW;
1071 5d8a4f8f Richard Henderson
    }
1072 8516a044 Richard Henderson
1073 5d8a4f8f Richard Henderson
    tcg_out_mov(s, type, r0, addrlo);
1074 166792f7 Aurelien Jarno
    tcg_out_mov(s, type, r1, addrlo);
1075 8516a044 Richard Henderson
1076 166792f7 Aurelien Jarno
    tcg_out_shifti(s, SHIFT_SHR + rexw, r0,
1077 5d8a4f8f Richard Henderson
                   TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
1078 8516a044 Richard Henderson
1079 5d8a4f8f Richard Henderson
    tgen_arithi(s, ARITH_AND + rexw, r1,
1080 166792f7 Aurelien Jarno
                TARGET_PAGE_MASK | ((1 << s_bits) - 1), 0);
1081 166792f7 Aurelien Jarno
    tgen_arithi(s, ARITH_AND + rexw, r0,
1082 5d8a4f8f Richard Henderson
                (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS, 0);
1083 8516a044 Richard Henderson
1084 166792f7 Aurelien Jarno
    tcg_out_modrm_sib_offset(s, OPC_LEA + P_REXW, r0, TCG_AREG0, r0, 0,
1085 9349b4f9 Andreas Fรคrber
                             offsetof(CPUArchState, tlb_table[mem_index][0])
1086 8516a044 Richard Henderson
                             + which);
1087 8516a044 Richard Henderson
1088 166792f7 Aurelien Jarno
    /* cmp 0(r0), r1 */
1089 166792f7 Aurelien Jarno
    tcg_out_modrm_offset(s, OPC_CMP_GvEv + rexw, r1, r0, 0);
1090 8516a044 Richard Henderson
1091 166792f7 Aurelien Jarno
    tcg_out_mov(s, type, r1, addrlo);
1092 8516a044 Richard Henderson
1093 b76f0d8c Yeongkyoon Lee
    /* jne slow_path */
1094 b76f0d8c Yeongkyoon Lee
    tcg_out_opc(s, OPC_JCC_long + JCC_JNE, 0, 0, 0);
1095 8516a044 Richard Henderson
    label_ptr[0] = s->code_ptr;
1096 b76f0d8c Yeongkyoon Lee
    s->code_ptr += 4;
1097 8516a044 Richard Henderson
1098 5d8a4f8f Richard Henderson
    if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
1099 166792f7 Aurelien Jarno
        /* cmp 4(r0), addrhi */
1100 166792f7 Aurelien Jarno
        tcg_out_modrm_offset(s, OPC_CMP_GvEv, args[addrlo_idx+1], r0, 4);
1101 8516a044 Richard Henderson
1102 b76f0d8c Yeongkyoon Lee
        /* jne slow_path */
1103 b76f0d8c Yeongkyoon Lee
        tcg_out_opc(s, OPC_JCC_long + JCC_JNE, 0, 0, 0);
1104 8516a044 Richard Henderson
        label_ptr[1] = s->code_ptr;
1105 b76f0d8c Yeongkyoon Lee
        s->code_ptr += 4;
1106 8516a044 Richard Henderson
    }
1107 8516a044 Richard Henderson
1108 8516a044 Richard Henderson
    /* TLB Hit.  */
1109 8516a044 Richard Henderson
1110 166792f7 Aurelien Jarno
    /* add addend(r0), r1 */
1111 166792f7 Aurelien Jarno
    tcg_out_modrm_offset(s, OPC_ADD_GvEv + P_REXW, r1, r0,
1112 8516a044 Richard Henderson
                         offsetof(CPUTLBEntry, addend) - which);
1113 8516a044 Richard Henderson
}
1114 44b37ace Richard Henderson
#elif defined(__x86_64__) && defined(__linux__)
1115 44b37ace Richard Henderson
# include <asm/prctl.h>
1116 44b37ace Richard Henderson
# include <sys/prctl.h>
1117 44b37ace Richard Henderson
1118 44b37ace Richard Henderson
int arch_prctl(int code, unsigned long addr);
1119 44b37ace Richard Henderson
1120 44b37ace Richard Henderson
static int guest_base_flags;
1121 44b37ace Richard Henderson
static inline void setup_guest_base_seg(void)
1122 44b37ace Richard Henderson
{
1123 44b37ace Richard Henderson
    if (arch_prctl(ARCH_SET_GS, GUEST_BASE) == 0) {
1124 44b37ace Richard Henderson
        guest_base_flags = P_GS;
1125 44b37ace Richard Henderson
    }
1126 44b37ace Richard Henderson
}
1127 44b37ace Richard Henderson
#else
1128 44b37ace Richard Henderson
# define guest_base_flags 0
1129 44b37ace Richard Henderson
static inline void setup_guest_base_seg(void) { }
1130 44b37ace Richard Henderson
#endif /* SOFTMMU */
1131 c896fe29 bellard
1132 be5a4eb7 Richard Henderson
static void tcg_out_qemu_ld_direct(TCGContext *s, int datalo, int datahi,
1133 44b37ace Richard Henderson
                                   int base, tcg_target_long ofs, int seg,
1134 44b37ace Richard Henderson
                                   int sizeop)
1135 be5a4eb7 Richard Henderson
{
1136 be5a4eb7 Richard Henderson
#ifdef TARGET_WORDS_BIGENDIAN
1137 be5a4eb7 Richard Henderson
    const int bswap = 1;
1138 be5a4eb7 Richard Henderson
#else
1139 be5a4eb7 Richard Henderson
    const int bswap = 0;
1140 379f6698 Paul Brook
#endif
1141 be5a4eb7 Richard Henderson
    switch (sizeop) {
1142 be5a4eb7 Richard Henderson
    case 0:
1143 44b37ace Richard Henderson
        tcg_out_modrm_offset(s, OPC_MOVZBL + seg, datalo, base, ofs);
1144 be5a4eb7 Richard Henderson
        break;
1145 be5a4eb7 Richard Henderson
    case 0 | 4:
1146 44b37ace Richard Henderson
        tcg_out_modrm_offset(s, OPC_MOVSBL + P_REXW + seg, datalo, base, ofs);
1147 be5a4eb7 Richard Henderson
        break;
1148 be5a4eb7 Richard Henderson
    case 1:
1149 44b37ace Richard Henderson
        tcg_out_modrm_offset(s, OPC_MOVZWL + seg, datalo, base, ofs);
1150 be5a4eb7 Richard Henderson
        if (bswap) {
1151 be5a4eb7 Richard Henderson
            tcg_out_rolw_8(s, datalo);
1152 be5a4eb7 Richard Henderson
        }
1153 be5a4eb7 Richard Henderson
        break;
1154 be5a4eb7 Richard Henderson
    case 1 | 4:
1155 be5a4eb7 Richard Henderson
        if (bswap) {
1156 44b37ace Richard Henderson
            tcg_out_modrm_offset(s, OPC_MOVZWL + seg, datalo, base, ofs);
1157 be5a4eb7 Richard Henderson
            tcg_out_rolw_8(s, datalo);
1158 5d8a4f8f Richard Henderson
            tcg_out_modrm(s, OPC_MOVSWL + P_REXW, datalo, datalo);
1159 5d8a4f8f Richard Henderson
        } else {
1160 44b37ace Richard Henderson
            tcg_out_modrm_offset(s, OPC_MOVSWL + P_REXW + seg,
1161 44b37ace Richard Henderson
                                 datalo, base, ofs);
1162 be5a4eb7 Richard Henderson
        }
1163 be5a4eb7 Richard Henderson
        break;
1164 be5a4eb7 Richard Henderson
    case 2:
1165 44b37ace Richard Henderson
        tcg_out_modrm_offset(s, OPC_MOVL_GvEv + seg, datalo, base, ofs);
1166 be5a4eb7 Richard Henderson
        if (bswap) {
1167 be5a4eb7 Richard Henderson
            tcg_out_bswap32(s, datalo);
1168 be5a4eb7 Richard Henderson
        }
1169 be5a4eb7 Richard Henderson
        break;
1170 5d8a4f8f Richard Henderson
#if TCG_TARGET_REG_BITS == 64
1171 5d8a4f8f Richard Henderson
    case 2 | 4:
1172 be5a4eb7 Richard Henderson
        if (bswap) {
1173 44b37ace Richard Henderson
            tcg_out_modrm_offset(s, OPC_MOVL_GvEv + seg, datalo, base, ofs);
1174 5d8a4f8f Richard Henderson
            tcg_out_bswap32(s, datalo);
1175 5d8a4f8f Richard Henderson
            tcg_out_ext32s(s, datalo, datalo);
1176 be5a4eb7 Richard Henderson
        } else {
1177 44b37ace Richard Henderson
            tcg_out_modrm_offset(s, OPC_MOVSLQ + seg, datalo, base, ofs);
1178 be5a4eb7 Richard Henderson
        }
1179 5d8a4f8f Richard Henderson
        break;
1180 5d8a4f8f Richard Henderson
#endif
1181 5d8a4f8f Richard Henderson
    case 3:
1182 5d8a4f8f Richard Henderson
        if (TCG_TARGET_REG_BITS == 64) {
1183 44b37ace Richard Henderson
            tcg_out_modrm_offset(s, OPC_MOVL_GvEv + P_REXW + seg,
1184 44b37ace Richard Henderson
                                 datalo, base, ofs);
1185 5d8a4f8f Richard Henderson
            if (bswap) {
1186 5d8a4f8f Richard Henderson
                tcg_out_bswap64(s, datalo);
1187 5d8a4f8f Richard Henderson
            }
1188 5d8a4f8f Richard Henderson
        } else {
1189 5d8a4f8f Richard Henderson
            if (bswap) {
1190 5d8a4f8f Richard Henderson
                int t = datalo;
1191 5d8a4f8f Richard Henderson
                datalo = datahi;
1192 5d8a4f8f Richard Henderson
                datahi = t;
1193 5d8a4f8f Richard Henderson
            }
1194 5d8a4f8f Richard Henderson
            if (base != datalo) {
1195 44b37ace Richard Henderson
                tcg_out_modrm_offset(s, OPC_MOVL_GvEv + seg,
1196 44b37ace Richard Henderson
                                     datalo, base, ofs);
1197 44b37ace Richard Henderson
                tcg_out_modrm_offset(s, OPC_MOVL_GvEv + seg,
1198 44b37ace Richard Henderson
                                     datahi, base, ofs + 4);
1199 5d8a4f8f Richard Henderson
            } else {
1200 44b37ace Richard Henderson
                tcg_out_modrm_offset(s, OPC_MOVL_GvEv + seg,
1201 44b37ace Richard Henderson
                                     datahi, base, ofs + 4);
1202 44b37ace Richard Henderson
                tcg_out_modrm_offset(s, OPC_MOVL_GvEv + seg,
1203 44b37ace Richard Henderson
                                     datalo, base, ofs);
1204 5d8a4f8f Richard Henderson
            }
1205 5d8a4f8f Richard Henderson
            if (bswap) {
1206 5d8a4f8f Richard Henderson
                tcg_out_bswap32(s, datalo);
1207 5d8a4f8f Richard Henderson
                tcg_out_bswap32(s, datahi);
1208 5d8a4f8f Richard Henderson
            }
1209 be5a4eb7 Richard Henderson
        }
1210 be5a4eb7 Richard Henderson
        break;
1211 be5a4eb7 Richard Henderson
    default:
1212 be5a4eb7 Richard Henderson
        tcg_abort();
1213 be5a4eb7 Richard Henderson
    }
1214 be5a4eb7 Richard Henderson
}
1215 379f6698 Paul Brook
1216 c896fe29 bellard
/* XXX: qemu_ld and qemu_st could be modified to clobber only EDX and
1217 c896fe29 bellard
   EAX. It will be useful once fixed registers globals are less
1218 c896fe29 bellard
   common. */
1219 c896fe29 bellard
static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
1220 c896fe29 bellard
                            int opc)
1221 c896fe29 bellard
{
1222 1a6dc1e4 Richard Henderson
    int data_reg, data_reg2 = 0;
1223 8516a044 Richard Henderson
    int addrlo_idx;
1224 c896fe29 bellard
#if defined(CONFIG_SOFTMMU)
1225 6a18ae2d Blue Swirl
    int mem_index, s_bits;
1226 b76f0d8c Yeongkyoon Lee
    uint8_t *label_ptr[2];
1227 c896fe29 bellard
#endif
1228 c896fe29 bellard
1229 8516a044 Richard Henderson
    data_reg = args[0];
1230 8516a044 Richard Henderson
    addrlo_idx = 1;
1231 5d8a4f8f Richard Henderson
    if (TCG_TARGET_REG_BITS == 32 && opc == 3) {
1232 8516a044 Richard Henderson
        data_reg2 = args[1];
1233 8516a044 Richard Henderson
        addrlo_idx = 2;
1234 1a6dc1e4 Richard Henderson
    }
1235 c896fe29 bellard
1236 c896fe29 bellard
#if defined(CONFIG_SOFTMMU)
1237 5d8a4f8f Richard Henderson
    mem_index = args[addrlo_idx + 1 + (TARGET_LONG_BITS > TCG_TARGET_REG_BITS)];
1238 8516a044 Richard Henderson
    s_bits = opc & 3;
1239 1a6dc1e4 Richard Henderson
1240 8516a044 Richard Henderson
    tcg_out_tlb_load(s, addrlo_idx, mem_index, s_bits, args,
1241 8516a044 Richard Henderson
                     label_ptr, offsetof(CPUTLBEntry, addr_read));
1242 1a6dc1e4 Richard Henderson
1243 1a6dc1e4 Richard Henderson
    /* TLB Hit.  */
1244 44b37ace Richard Henderson
    tcg_out_qemu_ld_direct(s, data_reg, data_reg2, TCG_REG_L1, 0, 0, opc);
1245 c896fe29 bellard
1246 b76f0d8c Yeongkyoon Lee
    /* Record the current context of a load into ldst label */
1247 b76f0d8c Yeongkyoon Lee
    add_qemu_ldst_label(s,
1248 b76f0d8c Yeongkyoon Lee
                        1,
1249 b76f0d8c Yeongkyoon Lee
                        opc,
1250 b76f0d8c Yeongkyoon Lee
                        data_reg,
1251 b76f0d8c Yeongkyoon Lee
                        data_reg2,
1252 b76f0d8c Yeongkyoon Lee
                        args[addrlo_idx],
1253 b76f0d8c Yeongkyoon Lee
                        args[addrlo_idx + 1],
1254 b76f0d8c Yeongkyoon Lee
                        mem_index,
1255 b76f0d8c Yeongkyoon Lee
                        s->code_ptr,
1256 b76f0d8c Yeongkyoon Lee
                        label_ptr);
1257 c896fe29 bellard
#else
1258 5d8a4f8f Richard Henderson
    {
1259 5d8a4f8f Richard Henderson
        int32_t offset = GUEST_BASE;
1260 5d8a4f8f Richard Henderson
        int base = args[addrlo_idx];
1261 44b37ace Richard Henderson
        int seg = 0;
1262 44b37ace Richard Henderson
1263 44b37ace Richard Henderson
        /* ??? We assume all operations have left us with register contents
1264 44b37ace Richard Henderson
           that are zero extended.  So far this appears to be true.  If we
1265 44b37ace Richard Henderson
           want to enforce this, we can either do an explicit zero-extension
1266 44b37ace Richard Henderson
           here, or (if GUEST_BASE == 0, or a segment register is in use)
1267 44b37ace Richard Henderson
           use the ADDR32 prefix.  For now, do nothing.  */
1268 44b37ace Richard Henderson
        if (GUEST_BASE && guest_base_flags) {
1269 44b37ace Richard Henderson
            seg = guest_base_flags;
1270 44b37ace Richard Henderson
            offset = 0;
1271 44b37ace Richard Henderson
        } else if (TCG_TARGET_REG_BITS == 64 && offset != GUEST_BASE) {
1272 44b37ace Richard Henderson
            tcg_out_movi(s, TCG_TYPE_I64, TCG_REG_L1, GUEST_BASE);
1273 44b37ace Richard Henderson
            tgen_arithr(s, ARITH_ADD + P_REXW, TCG_REG_L1, base);
1274 44b37ace Richard Henderson
            base = TCG_REG_L1;
1275 44b37ace Richard Henderson
            offset = 0;
1276 5d8a4f8f Richard Henderson
        }
1277 5d8a4f8f Richard Henderson
1278 44b37ace Richard Henderson
        tcg_out_qemu_ld_direct(s, data_reg, data_reg2, base, offset, seg, opc);
1279 5d8a4f8f Richard Henderson
    }
1280 c896fe29 bellard
#endif
1281 be5a4eb7 Richard Henderson
}
1282 c896fe29 bellard
1283 be5a4eb7 Richard Henderson
static void tcg_out_qemu_st_direct(TCGContext *s, int datalo, int datahi,
1284 44b37ace Richard Henderson
                                   int base, tcg_target_long ofs, int seg,
1285 44b37ace Richard Henderson
                                   int sizeop)
1286 be5a4eb7 Richard Henderson
{
1287 c896fe29 bellard
#ifdef TARGET_WORDS_BIGENDIAN
1288 be5a4eb7 Richard Henderson
    const int bswap = 1;
1289 c896fe29 bellard
#else
1290 be5a4eb7 Richard Henderson
    const int bswap = 0;
1291 c896fe29 bellard
#endif
1292 be5a4eb7 Richard Henderson
    /* ??? Ideally we wouldn't need a scratch register.  For user-only,
1293 be5a4eb7 Richard Henderson
       we could perform the bswap twice to restore the original value
1294 be5a4eb7 Richard Henderson
       instead of moving to the scratch.  But as it is, the L constraint
1295 166792f7 Aurelien Jarno
       means that TCG_REG_L0 is definitely free here.  */
1296 166792f7 Aurelien Jarno
    const int scratch = TCG_REG_L0;
1297 be5a4eb7 Richard Henderson
1298 be5a4eb7 Richard Henderson
    switch (sizeop) {
1299 c896fe29 bellard
    case 0:
1300 44b37ace Richard Henderson
        tcg_out_modrm_offset(s, OPC_MOVB_EvGv + P_REXB_R + seg,
1301 44b37ace Richard Henderson
                             datalo, base, ofs);
1302 c896fe29 bellard
        break;
1303 c896fe29 bellard
    case 1:
1304 c896fe29 bellard
        if (bswap) {
1305 3b6dac34 Richard Henderson
            tcg_out_mov(s, TCG_TYPE_I32, scratch, datalo);
1306 be5a4eb7 Richard Henderson
            tcg_out_rolw_8(s, scratch);
1307 be5a4eb7 Richard Henderson
            datalo = scratch;
1308 c896fe29 bellard
        }
1309 44b37ace Richard Henderson
        tcg_out_modrm_offset(s, OPC_MOVL_EvGv + P_DATA16 + seg,
1310 44b37ace Richard Henderson
                             datalo, base, ofs);
1311 c896fe29 bellard
        break;
1312 c896fe29 bellard
    case 2:
1313 c896fe29 bellard
        if (bswap) {
1314 3b6dac34 Richard Henderson
            tcg_out_mov(s, TCG_TYPE_I32, scratch, datalo);
1315 be5a4eb7 Richard Henderson
            tcg_out_bswap32(s, scratch);
1316 be5a4eb7 Richard Henderson
            datalo = scratch;
1317 c896fe29 bellard
        }
1318 44b37ace Richard Henderson
        tcg_out_modrm_offset(s, OPC_MOVL_EvGv + seg, datalo, base, ofs);
1319 c896fe29 bellard
        break;
1320 c896fe29 bellard
    case 3:
1321 5d8a4f8f Richard Henderson
        if (TCG_TARGET_REG_BITS == 64) {
1322 5d8a4f8f Richard Henderson
            if (bswap) {
1323 5d8a4f8f Richard Henderson
                tcg_out_mov(s, TCG_TYPE_I64, scratch, datalo);
1324 5d8a4f8f Richard Henderson
                tcg_out_bswap64(s, scratch);
1325 5d8a4f8f Richard Henderson
                datalo = scratch;
1326 5d8a4f8f Richard Henderson
            }
1327 44b37ace Richard Henderson
            tcg_out_modrm_offset(s, OPC_MOVL_EvGv + P_REXW + seg,
1328 44b37ace Richard Henderson
                                 datalo, base, ofs);
1329 5d8a4f8f Richard Henderson
        } else if (bswap) {
1330 3b6dac34 Richard Henderson
            tcg_out_mov(s, TCG_TYPE_I32, scratch, datahi);
1331 be5a4eb7 Richard Henderson
            tcg_out_bswap32(s, scratch);
1332 44b37ace Richard Henderson
            tcg_out_modrm_offset(s, OPC_MOVL_EvGv + seg, scratch, base, ofs);
1333 3b6dac34 Richard Henderson
            tcg_out_mov(s, TCG_TYPE_I32, scratch, datalo);
1334 be5a4eb7 Richard Henderson
            tcg_out_bswap32(s, scratch);
1335 44b37ace Richard Henderson
            tcg_out_modrm_offset(s, OPC_MOVL_EvGv + seg, scratch, base, ofs+4);
1336 c896fe29 bellard
        } else {
1337 44b37ace Richard Henderson
            tcg_out_modrm_offset(s, OPC_MOVL_EvGv + seg, datalo, base, ofs);
1338 44b37ace Richard Henderson
            tcg_out_modrm_offset(s, OPC_MOVL_EvGv + seg, datahi, base, ofs+4);
1339 c896fe29 bellard
        }
1340 c896fe29 bellard
        break;
1341 c896fe29 bellard
    default:
1342 c896fe29 bellard
        tcg_abort();
1343 c896fe29 bellard
    }
1344 c896fe29 bellard
}
1345 c896fe29 bellard
1346 c896fe29 bellard
static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
1347 c896fe29 bellard
                            int opc)
1348 c896fe29 bellard
{
1349 1a6dc1e4 Richard Henderson
    int data_reg, data_reg2 = 0;
1350 8516a044 Richard Henderson
    int addrlo_idx;
1351 c896fe29 bellard
#if defined(CONFIG_SOFTMMU)
1352 8516a044 Richard Henderson
    int mem_index, s_bits;
1353 b76f0d8c Yeongkyoon Lee
    uint8_t *label_ptr[2];
1354 c896fe29 bellard
#endif
1355 c896fe29 bellard
1356 8516a044 Richard Henderson
    data_reg = args[0];
1357 8516a044 Richard Henderson
    addrlo_idx = 1;
1358 5d8a4f8f Richard Henderson
    if (TCG_TARGET_REG_BITS == 32 && opc == 3) {
1359 8516a044 Richard Henderson
        data_reg2 = args[1];
1360 8516a044 Richard Henderson
        addrlo_idx = 2;
1361 1a6dc1e4 Richard Henderson
    }
1362 c896fe29 bellard
1363 c896fe29 bellard
#if defined(CONFIG_SOFTMMU)
1364 5d8a4f8f Richard Henderson
    mem_index = args[addrlo_idx + 1 + (TARGET_LONG_BITS > TCG_TARGET_REG_BITS)];
1365 8516a044 Richard Henderson
    s_bits = opc;
1366 1a6dc1e4 Richard Henderson
1367 8516a044 Richard Henderson
    tcg_out_tlb_load(s, addrlo_idx, mem_index, s_bits, args,
1368 8516a044 Richard Henderson
                     label_ptr, offsetof(CPUTLBEntry, addr_write));
1369 1a6dc1e4 Richard Henderson
1370 1a6dc1e4 Richard Henderson
    /* TLB Hit.  */
1371 44b37ace Richard Henderson
    tcg_out_qemu_st_direct(s, data_reg, data_reg2, TCG_REG_L1, 0, 0, opc);
1372 c896fe29 bellard
1373 b76f0d8c Yeongkyoon Lee
    /* Record the current context of a store into ldst label */
1374 b76f0d8c Yeongkyoon Lee
    add_qemu_ldst_label(s,
1375 b76f0d8c Yeongkyoon Lee
                        0,
1376 b76f0d8c Yeongkyoon Lee
                        opc,
1377 b76f0d8c Yeongkyoon Lee
                        data_reg,
1378 b76f0d8c Yeongkyoon Lee
                        data_reg2,
1379 b76f0d8c Yeongkyoon Lee
                        args[addrlo_idx],
1380 b76f0d8c Yeongkyoon Lee
                        args[addrlo_idx + 1],
1381 b76f0d8c Yeongkyoon Lee
                        mem_index,
1382 b76f0d8c Yeongkyoon Lee
                        s->code_ptr,
1383 b76f0d8c Yeongkyoon Lee
                        label_ptr);
1384 b76f0d8c Yeongkyoon Lee
#else
1385 b76f0d8c Yeongkyoon Lee
    {
1386 b76f0d8c Yeongkyoon Lee
        int32_t offset = GUEST_BASE;
1387 b76f0d8c Yeongkyoon Lee
        int base = args[addrlo_idx];
1388 b76f0d8c Yeongkyoon Lee
        int seg = 0;
1389 b76f0d8c Yeongkyoon Lee
1390 b76f0d8c Yeongkyoon Lee
        /* ??? We assume all operations have left us with register contents
1391 b76f0d8c Yeongkyoon Lee
           that are zero extended.  So far this appears to be true.  If we
1392 b76f0d8c Yeongkyoon Lee
           want to enforce this, we can either do an explicit zero-extension
1393 b76f0d8c Yeongkyoon Lee
           here, or (if GUEST_BASE == 0, or a segment register is in use)
1394 b76f0d8c Yeongkyoon Lee
           use the ADDR32 prefix.  For now, do nothing.  */
1395 b76f0d8c Yeongkyoon Lee
        if (GUEST_BASE && guest_base_flags) {
1396 b76f0d8c Yeongkyoon Lee
            seg = guest_base_flags;
1397 b76f0d8c Yeongkyoon Lee
            offset = 0;
1398 b76f0d8c Yeongkyoon Lee
        } else if (TCG_TARGET_REG_BITS == 64 && offset != GUEST_BASE) {
1399 b76f0d8c Yeongkyoon Lee
            tcg_out_movi(s, TCG_TYPE_I64, TCG_REG_L1, GUEST_BASE);
1400 b76f0d8c Yeongkyoon Lee
            tgen_arithr(s, ARITH_ADD + P_REXW, TCG_REG_L1, base);
1401 b76f0d8c Yeongkyoon Lee
            base = TCG_REG_L1;
1402 b76f0d8c Yeongkyoon Lee
            offset = 0;
1403 b76f0d8c Yeongkyoon Lee
        }
1404 b76f0d8c Yeongkyoon Lee
1405 b76f0d8c Yeongkyoon Lee
        tcg_out_qemu_st_direct(s, data_reg, data_reg2, base, offset, seg, opc);
1406 b76f0d8c Yeongkyoon Lee
    }
1407 b76f0d8c Yeongkyoon Lee
#endif
1408 b76f0d8c Yeongkyoon Lee
}
1409 b76f0d8c Yeongkyoon Lee
1410 b76f0d8c Yeongkyoon Lee
#if defined(CONFIG_SOFTMMU)
1411 b76f0d8c Yeongkyoon Lee
/*
1412 b76f0d8c Yeongkyoon Lee
 * Record the context of a call to the out of line helper code for the slow path
1413 b76f0d8c Yeongkyoon Lee
 * for a load or store, so that we can later generate the correct helper code
1414 b76f0d8c Yeongkyoon Lee
 */
1415 b76f0d8c Yeongkyoon Lee
static void add_qemu_ldst_label(TCGContext *s,
1416 b76f0d8c Yeongkyoon Lee
                                int is_ld,
1417 b76f0d8c Yeongkyoon Lee
                                int opc,
1418 b76f0d8c Yeongkyoon Lee
                                int data_reg,
1419 b76f0d8c Yeongkyoon Lee
                                int data_reg2,
1420 b76f0d8c Yeongkyoon Lee
                                int addrlo_reg,
1421 b76f0d8c Yeongkyoon Lee
                                int addrhi_reg,
1422 b76f0d8c Yeongkyoon Lee
                                int mem_index,
1423 b76f0d8c Yeongkyoon Lee
                                uint8_t *raddr,
1424 b76f0d8c Yeongkyoon Lee
                                uint8_t **label_ptr)
1425 b76f0d8c Yeongkyoon Lee
{
1426 b76f0d8c Yeongkyoon Lee
    int idx;
1427 b76f0d8c Yeongkyoon Lee
    TCGLabelQemuLdst *label;
1428 b76f0d8c Yeongkyoon Lee
1429 b76f0d8c Yeongkyoon Lee
    if (s->nb_qemu_ldst_labels >= TCG_MAX_QEMU_LDST) {
1430 b76f0d8c Yeongkyoon Lee
        tcg_abort();
1431 b76f0d8c Yeongkyoon Lee
    }
1432 b76f0d8c Yeongkyoon Lee
1433 b76f0d8c Yeongkyoon Lee
    idx = s->nb_qemu_ldst_labels++;
1434 b76f0d8c Yeongkyoon Lee
    label = (TCGLabelQemuLdst *)&s->qemu_ldst_labels[idx];
1435 b76f0d8c Yeongkyoon Lee
    label->is_ld = is_ld;
1436 b76f0d8c Yeongkyoon Lee
    label->opc = opc;
1437 b76f0d8c Yeongkyoon Lee
    label->datalo_reg = data_reg;
1438 b76f0d8c Yeongkyoon Lee
    label->datahi_reg = data_reg2;
1439 b76f0d8c Yeongkyoon Lee
    label->addrlo_reg = addrlo_reg;
1440 b76f0d8c Yeongkyoon Lee
    label->addrhi_reg = addrhi_reg;
1441 b76f0d8c Yeongkyoon Lee
    label->mem_index = mem_index;
1442 b76f0d8c Yeongkyoon Lee
    label->raddr = raddr;
1443 b76f0d8c Yeongkyoon Lee
    label->label_ptr[0] = label_ptr[0];
1444 b76f0d8c Yeongkyoon Lee
    if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
1445 b76f0d8c Yeongkyoon Lee
        label->label_ptr[1] = label_ptr[1];
1446 b76f0d8c Yeongkyoon Lee
    }
1447 b76f0d8c Yeongkyoon Lee
}
1448 b76f0d8c Yeongkyoon Lee
1449 b76f0d8c Yeongkyoon Lee
/*
1450 b76f0d8c Yeongkyoon Lee
 * Generate code for the slow path for a load at the end of block
1451 b76f0d8c Yeongkyoon Lee
 */
1452 b76f0d8c Yeongkyoon Lee
static void tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *label)
1453 b76f0d8c Yeongkyoon Lee
{
1454 b76f0d8c Yeongkyoon Lee
    int s_bits;
1455 b76f0d8c Yeongkyoon Lee
    int opc = label->opc;
1456 b76f0d8c Yeongkyoon Lee
    int mem_index = label->mem_index;
1457 b76f0d8c Yeongkyoon Lee
#if TCG_TARGET_REG_BITS == 32
1458 b76f0d8c Yeongkyoon Lee
    int stack_adjust;
1459 b76f0d8c Yeongkyoon Lee
    int addrlo_reg = label->addrlo_reg;
1460 b76f0d8c Yeongkyoon Lee
    int addrhi_reg = label->addrhi_reg;
1461 b76f0d8c Yeongkyoon Lee
#endif
1462 b76f0d8c Yeongkyoon Lee
    int data_reg = label->datalo_reg;
1463 b76f0d8c Yeongkyoon Lee
    int data_reg2 = label->datahi_reg;
1464 b76f0d8c Yeongkyoon Lee
    uint8_t *raddr = label->raddr;
1465 b76f0d8c Yeongkyoon Lee
    uint8_t **label_ptr = &label->label_ptr[0];
1466 b76f0d8c Yeongkyoon Lee
1467 b76f0d8c Yeongkyoon Lee
    s_bits = opc & 3;
1468 b76f0d8c Yeongkyoon Lee
1469 b76f0d8c Yeongkyoon Lee
    /* resolve label address */
1470 b76f0d8c Yeongkyoon Lee
    *(uint32_t *)label_ptr[0] = (uint32_t)(s->code_ptr - label_ptr[0] - 4);
1471 b76f0d8c Yeongkyoon Lee
    if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
1472 b76f0d8c Yeongkyoon Lee
        *(uint32_t *)label_ptr[1] = (uint32_t)(s->code_ptr - label_ptr[1] - 4);
1473 b76f0d8c Yeongkyoon Lee
    }
1474 b76f0d8c Yeongkyoon Lee
1475 b76f0d8c Yeongkyoon Lee
#if TCG_TARGET_REG_BITS == 32
1476 b76f0d8c Yeongkyoon Lee
    tcg_out_pushi(s, mem_index);
1477 b76f0d8c Yeongkyoon Lee
    stack_adjust = 4;
1478 b76f0d8c Yeongkyoon Lee
    if (TARGET_LONG_BITS == 64) {
1479 b76f0d8c Yeongkyoon Lee
        tcg_out_push(s, addrhi_reg);
1480 b76f0d8c Yeongkyoon Lee
        stack_adjust += 4;
1481 b76f0d8c Yeongkyoon Lee
    }
1482 b76f0d8c Yeongkyoon Lee
    tcg_out_push(s, addrlo_reg);
1483 b76f0d8c Yeongkyoon Lee
    stack_adjust += 4;
1484 b76f0d8c Yeongkyoon Lee
    tcg_out_push(s, TCG_AREG0);
1485 b76f0d8c Yeongkyoon Lee
    stack_adjust += 4;
1486 b76f0d8c Yeongkyoon Lee
#else
1487 b76f0d8c Yeongkyoon Lee
    tcg_out_mov(s, TCG_TYPE_I64, tcg_target_call_iarg_regs[0], TCG_AREG0);
1488 b76f0d8c Yeongkyoon Lee
    /* The second argument is already loaded with addrlo.  */
1489 b76f0d8c Yeongkyoon Lee
    tcg_out_movi(s, TCG_TYPE_I32, tcg_target_call_iarg_regs[2], mem_index);
1490 b76f0d8c Yeongkyoon Lee
#endif
1491 b76f0d8c Yeongkyoon Lee
1492 b76f0d8c Yeongkyoon Lee
    /* Code generation of qemu_ld/st's slow path calling MMU helper
1493 b76f0d8c Yeongkyoon Lee

1494 b76f0d8c Yeongkyoon Lee
       PRE_PROC ...
1495 b76f0d8c Yeongkyoon Lee
       call MMU helper
1496 b76f0d8c Yeongkyoon Lee
       jmp POST_PROC (2b) : short forward jump <- GETRA()
1497 b76f0d8c Yeongkyoon Lee
       jmp next_code (5b) : dummy long backward jump which is never executed
1498 b76f0d8c Yeongkyoon Lee
       POST_PROC ... : do post-processing <- GETRA() + 7
1499 b76f0d8c Yeongkyoon Lee
       jmp next_code : jump to the code corresponding to next IR of qemu_ld/st
1500 b76f0d8c Yeongkyoon Lee
    */
1501 b76f0d8c Yeongkyoon Lee
1502 b76f0d8c Yeongkyoon Lee
    tcg_out_calli(s, (tcg_target_long)qemu_ld_helpers[s_bits]);
1503 b76f0d8c Yeongkyoon Lee
1504 b76f0d8c Yeongkyoon Lee
    /* Jump to post-processing code */
1505 1a6dc1e4 Richard Henderson
    tcg_out8(s, OPC_JMP_short);
1506 b76f0d8c Yeongkyoon Lee
    tcg_out8(s, 5);
1507 b76f0d8c Yeongkyoon Lee
    /* Dummy backward jump having information of fast path'pc for MMU helpers */
1508 b76f0d8c Yeongkyoon Lee
    tcg_out8(s, OPC_JMP_long);
1509 b76f0d8c Yeongkyoon Lee
    *(int32_t *)s->code_ptr = (int32_t)(raddr - s->code_ptr - 4);
1510 b76f0d8c Yeongkyoon Lee
    s->code_ptr += 4;
1511 78686523 Richard Henderson
1512 b76f0d8c Yeongkyoon Lee
#if TCG_TARGET_REG_BITS == 32
1513 b76f0d8c Yeongkyoon Lee
    if (stack_adjust == (TCG_TARGET_REG_BITS / 8)) {
1514 b76f0d8c Yeongkyoon Lee
        /* Pop and discard.  This is 2 bytes smaller than the add.  */
1515 b76f0d8c Yeongkyoon Lee
        tcg_out_pop(s, TCG_REG_ECX);
1516 b76f0d8c Yeongkyoon Lee
    } else if (stack_adjust != 0) {
1517 b76f0d8c Yeongkyoon Lee
        tcg_out_addi(s, TCG_REG_CALL_STACK, stack_adjust);
1518 b76f0d8c Yeongkyoon Lee
    }
1519 b76f0d8c Yeongkyoon Lee
#endif
1520 1a6dc1e4 Richard Henderson
1521 b76f0d8c Yeongkyoon Lee
    switch(opc) {
1522 b76f0d8c Yeongkyoon Lee
    case 0 | 4:
1523 b76f0d8c Yeongkyoon Lee
        tcg_out_ext8s(s, data_reg, TCG_REG_EAX, P_REXW);
1524 b76f0d8c Yeongkyoon Lee
        break;
1525 b76f0d8c Yeongkyoon Lee
    case 1 | 4:
1526 b76f0d8c Yeongkyoon Lee
        tcg_out_ext16s(s, data_reg, TCG_REG_EAX, P_REXW);
1527 b76f0d8c Yeongkyoon Lee
        break;
1528 b76f0d8c Yeongkyoon Lee
    case 0:
1529 b76f0d8c Yeongkyoon Lee
        tcg_out_ext8u(s, data_reg, TCG_REG_EAX);
1530 b76f0d8c Yeongkyoon Lee
        break;
1531 b76f0d8c Yeongkyoon Lee
    case 1:
1532 b76f0d8c Yeongkyoon Lee
        tcg_out_ext16u(s, data_reg, TCG_REG_EAX);
1533 b76f0d8c Yeongkyoon Lee
        break;
1534 b76f0d8c Yeongkyoon Lee
    case 2:
1535 b76f0d8c Yeongkyoon Lee
        tcg_out_mov(s, TCG_TYPE_I32, data_reg, TCG_REG_EAX);
1536 b76f0d8c Yeongkyoon Lee
        break;
1537 b76f0d8c Yeongkyoon Lee
#if TCG_TARGET_REG_BITS == 64
1538 b76f0d8c Yeongkyoon Lee
    case 2 | 4:
1539 b76f0d8c Yeongkyoon Lee
        tcg_out_ext32s(s, data_reg, TCG_REG_EAX);
1540 b76f0d8c Yeongkyoon Lee
        break;
1541 b76f0d8c Yeongkyoon Lee
#endif
1542 b76f0d8c Yeongkyoon Lee
    case 3:
1543 b76f0d8c Yeongkyoon Lee
        if (TCG_TARGET_REG_BITS == 64) {
1544 b76f0d8c Yeongkyoon Lee
            tcg_out_mov(s, TCG_TYPE_I64, data_reg, TCG_REG_RAX);
1545 b76f0d8c Yeongkyoon Lee
        } else if (data_reg == TCG_REG_EDX) {
1546 b76f0d8c Yeongkyoon Lee
            /* xchg %edx, %eax */
1547 b76f0d8c Yeongkyoon Lee
            tcg_out_opc(s, OPC_XCHG_ax_r32 + TCG_REG_EDX, 0, 0, 0);
1548 b76f0d8c Yeongkyoon Lee
            tcg_out_mov(s, TCG_TYPE_I32, data_reg2, TCG_REG_EAX);
1549 b76f0d8c Yeongkyoon Lee
        } else {
1550 b76f0d8c Yeongkyoon Lee
            tcg_out_mov(s, TCG_TYPE_I32, data_reg, TCG_REG_EAX);
1551 b76f0d8c Yeongkyoon Lee
            tcg_out_mov(s, TCG_TYPE_I32, data_reg2, TCG_REG_EDX);
1552 b76f0d8c Yeongkyoon Lee
        }
1553 b76f0d8c Yeongkyoon Lee
        break;
1554 b76f0d8c Yeongkyoon Lee
    default:
1555 b76f0d8c Yeongkyoon Lee
        tcg_abort();
1556 b76f0d8c Yeongkyoon Lee
    }
1557 b76f0d8c Yeongkyoon Lee
1558 b76f0d8c Yeongkyoon Lee
    /* Jump to the code corresponding to next IR of qemu_st */
1559 b76f0d8c Yeongkyoon Lee
    tcg_out_jmp(s, (tcg_target_long)raddr);
1560 b76f0d8c Yeongkyoon Lee
}
1561 b76f0d8c Yeongkyoon Lee
1562 b76f0d8c Yeongkyoon Lee
/*
1563 b76f0d8c Yeongkyoon Lee
 * Generate code for the slow path for a store at the end of block
1564 b76f0d8c Yeongkyoon Lee
 */
1565 b76f0d8c Yeongkyoon Lee
static void tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *label)
1566 b76f0d8c Yeongkyoon Lee
{
1567 b76f0d8c Yeongkyoon Lee
    int s_bits;
1568 b76f0d8c Yeongkyoon Lee
    int stack_adjust;
1569 b76f0d8c Yeongkyoon Lee
    int opc = label->opc;
1570 b76f0d8c Yeongkyoon Lee
    int mem_index = label->mem_index;
1571 b76f0d8c Yeongkyoon Lee
    int data_reg = label->datalo_reg;
1572 b76f0d8c Yeongkyoon Lee
#if TCG_TARGET_REG_BITS == 32
1573 b76f0d8c Yeongkyoon Lee
    int data_reg2 = label->datahi_reg;
1574 b76f0d8c Yeongkyoon Lee
    int addrlo_reg = label->addrlo_reg;
1575 b76f0d8c Yeongkyoon Lee
    int addrhi_reg = label->addrhi_reg;
1576 b76f0d8c Yeongkyoon Lee
#endif
1577 b76f0d8c Yeongkyoon Lee
    uint8_t *raddr = label->raddr;
1578 b76f0d8c Yeongkyoon Lee
    uint8_t **label_ptr = &label->label_ptr[0];
1579 b76f0d8c Yeongkyoon Lee
1580 b76f0d8c Yeongkyoon Lee
    s_bits = opc & 3;
1581 b76f0d8c Yeongkyoon Lee
1582 b76f0d8c Yeongkyoon Lee
    /* resolve label address */
1583 b76f0d8c Yeongkyoon Lee
    *(uint32_t *)label_ptr[0] = (uint32_t)(s->code_ptr - label_ptr[0] - 4);
1584 5d8a4f8f Richard Henderson
    if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
1585 b76f0d8c Yeongkyoon Lee
        *(uint32_t *)label_ptr[1] = (uint32_t)(s->code_ptr - label_ptr[1] - 4);
1586 1a6dc1e4 Richard Henderson
    }
1587 c896fe29 bellard
1588 6a18ae2d Blue Swirl
#if TCG_TARGET_REG_BITS == 32
1589 6a18ae2d Blue Swirl
    tcg_out_pushi(s, mem_index);
1590 6a18ae2d Blue Swirl
    stack_adjust = 4;
1591 6a18ae2d Blue Swirl
    if (opc == 3) {
1592 6a18ae2d Blue Swirl
        tcg_out_push(s, data_reg2);
1593 6a18ae2d Blue Swirl
        stack_adjust += 4;
1594 6a18ae2d Blue Swirl
    }
1595 6a18ae2d Blue Swirl
    tcg_out_push(s, data_reg);
1596 6a18ae2d Blue Swirl
    stack_adjust += 4;
1597 6a18ae2d Blue Swirl
    if (TARGET_LONG_BITS == 64) {
1598 b76f0d8c Yeongkyoon Lee
        tcg_out_push(s, addrhi_reg);
1599 6a18ae2d Blue Swirl
        stack_adjust += 4;
1600 c896fe29 bellard
    }
1601 b76f0d8c Yeongkyoon Lee
    tcg_out_push(s, addrlo_reg);
1602 6a18ae2d Blue Swirl
    stack_adjust += 4;
1603 e141ab52 Blue Swirl
    tcg_out_push(s, TCG_AREG0);
1604 e141ab52 Blue Swirl
    stack_adjust += 4;
1605 6a18ae2d Blue Swirl
#else
1606 166792f7 Aurelien Jarno
    tcg_out_mov(s, TCG_TYPE_I64, tcg_target_call_iarg_regs[0], TCG_AREG0);
1607 166792f7 Aurelien Jarno
    /* The second argument is already loaded with addrlo.  */
1608 6a18ae2d Blue Swirl
    tcg_out_mov(s, (opc == 3 ? TCG_TYPE_I64 : TCG_TYPE_I32),
1609 166792f7 Aurelien Jarno
                tcg_target_call_iarg_regs[2], data_reg);
1610 166792f7 Aurelien Jarno
    tcg_out_movi(s, TCG_TYPE_I32, tcg_target_call_iarg_regs[3], mem_index);
1611 6a18ae2d Blue Swirl
    stack_adjust = 0;
1612 e141ab52 Blue Swirl
#endif
1613 aadb21a4 Richard Henderson
1614 b76f0d8c Yeongkyoon Lee
    /* Code generation of qemu_ld/st's slow path calling MMU helper
1615 b76f0d8c Yeongkyoon Lee

1616 b76f0d8c Yeongkyoon Lee
       PRE_PROC ...
1617 b76f0d8c Yeongkyoon Lee
       call MMU helper
1618 b76f0d8c Yeongkyoon Lee
       jmp POST_PROC (2b) : short forward jump <- GETRA()
1619 b76f0d8c Yeongkyoon Lee
       jmp next_code (5b) : dummy long backward jump which is never executed
1620 b76f0d8c Yeongkyoon Lee
       POST_PROC ... : do post-processing <- GETRA() + 7
1621 b76f0d8c Yeongkyoon Lee
       jmp next_code : jump to the code corresponding to next IR of qemu_ld/st
1622 b76f0d8c Yeongkyoon Lee
    */
1623 b76f0d8c Yeongkyoon Lee
1624 aadb21a4 Richard Henderson
    tcg_out_calli(s, (tcg_target_long)qemu_st_helpers[s_bits]);
1625 aadb21a4 Richard Henderson
1626 b76f0d8c Yeongkyoon Lee
    /* Jump to post-processing code */
1627 b76f0d8c Yeongkyoon Lee
    tcg_out8(s, OPC_JMP_short);
1628 b76f0d8c Yeongkyoon Lee
    tcg_out8(s, 5);
1629 b76f0d8c Yeongkyoon Lee
    /* Dummy backward jump having information of fast path'pc for MMU helpers */
1630 b76f0d8c Yeongkyoon Lee
    tcg_out8(s, OPC_JMP_long);
1631 b76f0d8c Yeongkyoon Lee
    *(int32_t *)s->code_ptr = (int32_t)(raddr - s->code_ptr - 4);
1632 b76f0d8c Yeongkyoon Lee
    s->code_ptr += 4;
1633 b76f0d8c Yeongkyoon Lee
1634 5d8a4f8f Richard Henderson
    if (stack_adjust == (TCG_TARGET_REG_BITS / 8)) {
1635 aadb21a4 Richard Henderson
        /* Pop and discard.  This is 2 bytes smaller than the add.  */
1636 aadb21a4 Richard Henderson
        tcg_out_pop(s, TCG_REG_ECX);
1637 aadb21a4 Richard Henderson
    } else if (stack_adjust != 0) {
1638 e83c80f7 Blue Swirl
        tcg_out_addi(s, TCG_REG_CALL_STACK, stack_adjust);
1639 aadb21a4 Richard Henderson
    }
1640 aadb21a4 Richard Henderson
1641 b76f0d8c Yeongkyoon Lee
    /* Jump to the code corresponding to next IR of qemu_st */
1642 b76f0d8c Yeongkyoon Lee
    tcg_out_jmp(s, (tcg_target_long)raddr);
1643 b76f0d8c Yeongkyoon Lee
}
1644 44b37ace Richard Henderson
1645 b76f0d8c Yeongkyoon Lee
/*
1646 b76f0d8c Yeongkyoon Lee
 * Generate TB finalization at the end of block
1647 b76f0d8c Yeongkyoon Lee
 */
1648 b76f0d8c Yeongkyoon Lee
void tcg_out_tb_finalize(TCGContext *s)
1649 b76f0d8c Yeongkyoon Lee
{
1650 b76f0d8c Yeongkyoon Lee
    int i;
1651 b76f0d8c Yeongkyoon Lee
    TCGLabelQemuLdst *label;
1652 b76f0d8c Yeongkyoon Lee
1653 b76f0d8c Yeongkyoon Lee
    /* qemu_ld/st slow paths */
1654 b76f0d8c Yeongkyoon Lee
    for (i = 0; i < s->nb_qemu_ldst_labels; i++) {
1655 b76f0d8c Yeongkyoon Lee
        label = (TCGLabelQemuLdst *)&s->qemu_ldst_labels[i];
1656 b76f0d8c Yeongkyoon Lee
        if (label->is_ld) {
1657 b76f0d8c Yeongkyoon Lee
            tcg_out_qemu_ld_slow_path(s, label);
1658 b76f0d8c Yeongkyoon Lee
        } else {
1659 b76f0d8c Yeongkyoon Lee
            tcg_out_qemu_st_slow_path(s, label);
1660 5d8a4f8f Richard Henderson
        }
1661 5d8a4f8f Richard Henderson
    }
1662 c896fe29 bellard
}
1663 b76f0d8c Yeongkyoon Lee
#endif  /* CONFIG_SOFTMMU */
1664 c896fe29 bellard
1665 a9751609 Richard Henderson
static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
1666 c896fe29 bellard
                              const TCGArg *args, const int *const_args)
1667 c896fe29 bellard
{
1668 5d8a4f8f Richard Henderson
    int c, rexw = 0;
1669 5d8a4f8f Richard Henderson
1670 5d8a4f8f Richard Henderson
#if TCG_TARGET_REG_BITS == 64
1671 5d8a4f8f Richard Henderson
# define OP_32_64(x) \
1672 5d8a4f8f Richard Henderson
        case glue(glue(INDEX_op_, x), _i64): \
1673 5d8a4f8f Richard Henderson
            rexw = P_REXW; /* FALLTHRU */    \
1674 5d8a4f8f Richard Henderson
        case glue(glue(INDEX_op_, x), _i32)
1675 5d8a4f8f Richard Henderson
#else
1676 5d8a4f8f Richard Henderson
# define OP_32_64(x) \
1677 5d8a4f8f Richard Henderson
        case glue(glue(INDEX_op_, x), _i32)
1678 5d8a4f8f Richard Henderson
#endif
1679 78686523 Richard Henderson
1680 c896fe29 bellard
    switch(opc) {
1681 c896fe29 bellard
    case INDEX_op_exit_tb:
1682 5d8a4f8f Richard Henderson
        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_EAX, args[0]);
1683 5d8a4f8f Richard Henderson
        tcg_out_jmp(s, (tcg_target_long) tb_ret_addr);
1684 c896fe29 bellard
        break;
1685 c896fe29 bellard
    case INDEX_op_goto_tb:
1686 c896fe29 bellard
        if (s->tb_jmp_offset) {
1687 c896fe29 bellard
            /* direct jump method */
1688 da441cff Richard Henderson
            tcg_out8(s, OPC_JMP_long); /* jmp im */
1689 c896fe29 bellard
            s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
1690 c896fe29 bellard
            tcg_out32(s, 0);
1691 c896fe29 bellard
        } else {
1692 c896fe29 bellard
            /* indirect jump method */
1693 9363dedb Richard Henderson
            tcg_out_modrm_offset(s, OPC_GRP5, EXT5_JMPN_Ev, -1,
1694 c896fe29 bellard
                                 (tcg_target_long)(s->tb_next + args[0]));
1695 c896fe29 bellard
        }
1696 c896fe29 bellard
        s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
1697 c896fe29 bellard
        break;
1698 c896fe29 bellard
    case INDEX_op_call:
1699 c896fe29 bellard
        if (const_args[0]) {
1700 aadb21a4 Richard Henderson
            tcg_out_calli(s, args[0]);
1701 c896fe29 bellard
        } else {
1702 aadb21a4 Richard Henderson
            /* call *reg */
1703 9363dedb Richard Henderson
            tcg_out_modrm(s, OPC_GRP5, EXT5_CALLN_Ev, args[0]);
1704 c896fe29 bellard
        }
1705 c896fe29 bellard
        break;
1706 c896fe29 bellard
    case INDEX_op_br:
1707 f75b56c1 Richard Henderson
        tcg_out_jxx(s, JCC_JMP, args[0], 0);
1708 c896fe29 bellard
        break;
1709 c896fe29 bellard
    case INDEX_op_movi_i32:
1710 c896fe29 bellard
        tcg_out_movi(s, TCG_TYPE_I32, args[0], args[1]);
1711 c896fe29 bellard
        break;
1712 5d8a4f8f Richard Henderson
    OP_32_64(ld8u):
1713 5d8a4f8f Richard Henderson
        /* Note that we can ignore REXW for the zero-extend to 64-bit.  */
1714 55e082a7 Richard Henderson
        tcg_out_modrm_offset(s, OPC_MOVZBL, args[0], args[1], args[2]);
1715 c896fe29 bellard
        break;
1716 5d8a4f8f Richard Henderson
    OP_32_64(ld8s):
1717 5d8a4f8f Richard Henderson
        tcg_out_modrm_offset(s, OPC_MOVSBL + rexw, args[0], args[1], args[2]);
1718 c896fe29 bellard
        break;
1719 5d8a4f8f Richard Henderson
    OP_32_64(ld16u):
1720 5d8a4f8f Richard Henderson
        /* Note that we can ignore REXW for the zero-extend to 64-bit.  */
1721 55e082a7 Richard Henderson
        tcg_out_modrm_offset(s, OPC_MOVZWL, args[0], args[1], args[2]);
1722 c896fe29 bellard
        break;
1723 5d8a4f8f Richard Henderson
    OP_32_64(ld16s):
1724 5d8a4f8f Richard Henderson
        tcg_out_modrm_offset(s, OPC_MOVSWL + rexw, args[0], args[1], args[2]);
1725 c896fe29 bellard
        break;
1726 5d8a4f8f Richard Henderson
#if TCG_TARGET_REG_BITS == 64
1727 5d8a4f8f Richard Henderson
    case INDEX_op_ld32u_i64:
1728 5d8a4f8f Richard Henderson
#endif
1729 c896fe29 bellard
    case INDEX_op_ld_i32:
1730 af266089 Richard Henderson
        tcg_out_ld(s, TCG_TYPE_I32, args[0], args[1], args[2]);
1731 c896fe29 bellard
        break;
1732 5d8a4f8f Richard Henderson
1733 5d8a4f8f Richard Henderson
    OP_32_64(st8):
1734 5c2d2a9e Aurelien Jarno
        if (const_args[0]) {
1735 5c2d2a9e Aurelien Jarno
            tcg_out_modrm_offset(s, OPC_MOVB_EvIz,
1736 5c2d2a9e Aurelien Jarno
                                 0, args[1], args[2]);
1737 5c2d2a9e Aurelien Jarno
            tcg_out8(s, args[0]);
1738 5c2d2a9e Aurelien Jarno
        } else {
1739 5c2d2a9e Aurelien Jarno
            tcg_out_modrm_offset(s, OPC_MOVB_EvGv | P_REXB_R,
1740 5c2d2a9e Aurelien Jarno
                                 args[0], args[1], args[2]);
1741 5c2d2a9e Aurelien Jarno
        }
1742 c896fe29 bellard
        break;
1743 5d8a4f8f Richard Henderson
    OP_32_64(st16):
1744 5c2d2a9e Aurelien Jarno
        if (const_args[0]) {
1745 5c2d2a9e Aurelien Jarno
            tcg_out_modrm_offset(s, OPC_MOVL_EvIz | P_DATA16,
1746 5c2d2a9e Aurelien Jarno
                                 0, args[1], args[2]);
1747 5c2d2a9e Aurelien Jarno
            tcg_out16(s, args[0]);
1748 5c2d2a9e Aurelien Jarno
        } else {
1749 5c2d2a9e Aurelien Jarno
            tcg_out_modrm_offset(s, OPC_MOVL_EvGv | P_DATA16,
1750 5c2d2a9e Aurelien Jarno
                                 args[0], args[1], args[2]);
1751 5c2d2a9e Aurelien Jarno
        }
1752 c896fe29 bellard
        break;
1753 5d8a4f8f Richard Henderson
#if TCG_TARGET_REG_BITS == 64
1754 5d8a4f8f Richard Henderson
    case INDEX_op_st32_i64:
1755 5d8a4f8f Richard Henderson
#endif
1756 c896fe29 bellard
    case INDEX_op_st_i32:
1757 5c2d2a9e Aurelien Jarno
        if (const_args[0]) {
1758 5c2d2a9e Aurelien Jarno
            tcg_out_modrm_offset(s, OPC_MOVL_EvIz, 0, args[1], args[2]);
1759 5c2d2a9e Aurelien Jarno
            tcg_out32(s, args[0]);
1760 5c2d2a9e Aurelien Jarno
        } else {
1761 5c2d2a9e Aurelien Jarno
            tcg_out_st(s, TCG_TYPE_I32, args[0], args[1], args[2]);
1762 5c2d2a9e Aurelien Jarno
        }
1763 c896fe29 bellard
        break;
1764 5d8a4f8f Richard Henderson
1765 5d8a4f8f Richard Henderson
    OP_32_64(add):
1766 5d1e4e85 Richard Henderson
        /* For 3-operand addition, use LEA.  */
1767 5d1e4e85 Richard Henderson
        if (args[0] != args[1]) {
1768 5d1e4e85 Richard Henderson
            TCGArg a0 = args[0], a1 = args[1], a2 = args[2], c3 = 0;
1769 5d1e4e85 Richard Henderson
1770 5d1e4e85 Richard Henderson
            if (const_args[2]) {
1771 5d1e4e85 Richard Henderson
                c3 = a2, a2 = -1;
1772 5d1e4e85 Richard Henderson
            } else if (a0 == a2) {
1773 5d1e4e85 Richard Henderson
                /* Watch out for dest = src + dest, since we've removed
1774 5d1e4e85 Richard Henderson
                   the matching constraint on the add.  */
1775 5d8a4f8f Richard Henderson
                tgen_arithr(s, ARITH_ADD + rexw, a0, a1);
1776 5d1e4e85 Richard Henderson
                break;
1777 5d1e4e85 Richard Henderson
            }
1778 5d1e4e85 Richard Henderson
1779 5d8a4f8f Richard Henderson
            tcg_out_modrm_sib_offset(s, OPC_LEA + rexw, a0, a1, a2, 0, c3);
1780 5d1e4e85 Richard Henderson
            break;
1781 5d1e4e85 Richard Henderson
        }
1782 5d1e4e85 Richard Henderson
        c = ARITH_ADD;
1783 5d1e4e85 Richard Henderson
        goto gen_arith;
1784 5d8a4f8f Richard Henderson
    OP_32_64(sub):
1785 c896fe29 bellard
        c = ARITH_SUB;
1786 c896fe29 bellard
        goto gen_arith;
1787 5d8a4f8f Richard Henderson
    OP_32_64(and):
1788 c896fe29 bellard
        c = ARITH_AND;
1789 c896fe29 bellard
        goto gen_arith;
1790 5d8a4f8f Richard Henderson
    OP_32_64(or):
1791 c896fe29 bellard
        c = ARITH_OR;
1792 c896fe29 bellard
        goto gen_arith;
1793 5d8a4f8f Richard Henderson
    OP_32_64(xor):
1794 c896fe29 bellard
        c = ARITH_XOR;
1795 c896fe29 bellard
        goto gen_arith;
1796 c896fe29 bellard
    gen_arith:
1797 c896fe29 bellard
        if (const_args[2]) {
1798 5d8a4f8f Richard Henderson
            tgen_arithi(s, c + rexw, args[0], args[2], 0);
1799 c896fe29 bellard
        } else {
1800 5d8a4f8f Richard Henderson
            tgen_arithr(s, c + rexw, args[0], args[2]);
1801 c896fe29 bellard
        }
1802 c896fe29 bellard
        break;
1803 5d8a4f8f Richard Henderson
1804 5d8a4f8f Richard Henderson
    OP_32_64(mul):
1805 c896fe29 bellard
        if (const_args[2]) {
1806 c896fe29 bellard
            int32_t val;
1807 c896fe29 bellard
            val = args[2];
1808 c896fe29 bellard
            if (val == (int8_t)val) {
1809 5d8a4f8f Richard Henderson
                tcg_out_modrm(s, OPC_IMUL_GvEvIb + rexw, args[0], args[0]);
1810 c896fe29 bellard
                tcg_out8(s, val);
1811 c896fe29 bellard
            } else {
1812 5d8a4f8f Richard Henderson
                tcg_out_modrm(s, OPC_IMUL_GvEvIz + rexw, args[0], args[0]);
1813 c896fe29 bellard
                tcg_out32(s, val);
1814 c896fe29 bellard
            }
1815 c896fe29 bellard
        } else {
1816 5d8a4f8f Richard Henderson
            tcg_out_modrm(s, OPC_IMUL_GvEv + rexw, args[0], args[2]);
1817 c896fe29 bellard
        }
1818 c896fe29 bellard
        break;
1819 5d8a4f8f Richard Henderson
1820 5d8a4f8f Richard Henderson
    OP_32_64(div2):
1821 5d8a4f8f Richard Henderson
        tcg_out_modrm(s, OPC_GRP3_Ev + rexw, EXT3_IDIV, args[4]);
1822 c896fe29 bellard
        break;
1823 5d8a4f8f Richard Henderson
    OP_32_64(divu2):
1824 5d8a4f8f Richard Henderson
        tcg_out_modrm(s, OPC_GRP3_Ev + rexw, EXT3_DIV, args[4]);
1825 c896fe29 bellard
        break;
1826 5d8a4f8f Richard Henderson
1827 5d8a4f8f Richard Henderson
    OP_32_64(shl):
1828 c896fe29 bellard
        c = SHIFT_SHL;
1829 5d8a4f8f Richard Henderson
        goto gen_shift;
1830 5d8a4f8f Richard Henderson
    OP_32_64(shr):
1831 c896fe29 bellard
        c = SHIFT_SHR;
1832 5d8a4f8f Richard Henderson
        goto gen_shift;
1833 5d8a4f8f Richard Henderson
    OP_32_64(sar):
1834 c896fe29 bellard
        c = SHIFT_SAR;
1835 5d8a4f8f Richard Henderson
        goto gen_shift;
1836 5d8a4f8f Richard Henderson
    OP_32_64(rotl):
1837 9619376c aurel32
        c = SHIFT_ROL;
1838 5d8a4f8f Richard Henderson
        goto gen_shift;
1839 5d8a4f8f Richard Henderson
    OP_32_64(rotr):
1840 9619376c aurel32
        c = SHIFT_ROR;
1841 5d8a4f8f Richard Henderson
        goto gen_shift;
1842 5d8a4f8f Richard Henderson
    gen_shift:
1843 5d8a4f8f Richard Henderson
        if (const_args[2]) {
1844 5d8a4f8f Richard Henderson
            tcg_out_shifti(s, c + rexw, args[0], args[2]);
1845 81570a70 Richard Henderson
        } else {
1846 5d8a4f8f Richard Henderson
            tcg_out_modrm(s, OPC_SHIFT_cl + rexw, c, args[0]);
1847 81570a70 Richard Henderson
        }
1848 c896fe29 bellard
        break;
1849 5d8a4f8f Richard Henderson
1850 c896fe29 bellard
    case INDEX_op_brcond_i32:
1851 5d8a4f8f Richard Henderson
        tcg_out_brcond32(s, args[2], args[0], args[1], const_args[1],
1852 5d8a4f8f Richard Henderson
                         args[3], 0);
1853 c896fe29 bellard
        break;
1854 5d8a4f8f Richard Henderson
    case INDEX_op_setcond_i32:
1855 5d8a4f8f Richard Henderson
        tcg_out_setcond32(s, args[3], args[0], args[1],
1856 5d8a4f8f Richard Henderson
                          args[2], const_args[2]);
1857 c896fe29 bellard
        break;
1858 d0a16297 Richard Henderson
    case INDEX_op_movcond_i32:
1859 d0a16297 Richard Henderson
        tcg_out_movcond32(s, args[5], args[0], args[1],
1860 d0a16297 Richard Henderson
                          args[2], const_args[2], args[3]);
1861 d0a16297 Richard Henderson
        break;
1862 c896fe29 bellard
1863 5d8a4f8f Richard Henderson
    OP_32_64(bswap16):
1864 fcb5dac1 Richard Henderson
        tcg_out_rolw_8(s, args[0]);
1865 5d40cd63 aurel32
        break;
1866 5d8a4f8f Richard Henderson
    OP_32_64(bswap32):
1867 fcb5dac1 Richard Henderson
        tcg_out_bswap32(s, args[0]);
1868 9619376c aurel32
        break;
1869 9619376c aurel32
1870 5d8a4f8f Richard Henderson
    OP_32_64(neg):
1871 5d8a4f8f Richard Henderson
        tcg_out_modrm(s, OPC_GRP3_Ev + rexw, EXT3_NEG, args[0]);
1872 9619376c aurel32
        break;
1873 5d8a4f8f Richard Henderson
    OP_32_64(not):
1874 5d8a4f8f Richard Henderson
        tcg_out_modrm(s, OPC_GRP3_Ev + rexw, EXT3_NOT, args[0]);
1875 9619376c aurel32
        break;
1876 9619376c aurel32
1877 5d8a4f8f Richard Henderson
    OP_32_64(ext8s):
1878 5d8a4f8f Richard Henderson
        tcg_out_ext8s(s, args[0], args[1], rexw);
1879 9619376c aurel32
        break;
1880 5d8a4f8f Richard Henderson
    OP_32_64(ext16s):
1881 5d8a4f8f Richard Henderson
        tcg_out_ext16s(s, args[0], args[1], rexw);
1882 9619376c aurel32
        break;
1883 5d8a4f8f Richard Henderson
    OP_32_64(ext8u):
1884 55e082a7 Richard Henderson
        tcg_out_ext8u(s, args[0], args[1]);
1885 5f0ce17f Aurelien Jarno
        break;
1886 5d8a4f8f Richard Henderson
    OP_32_64(ext16u):
1887 55e082a7 Richard Henderson
        tcg_out_ext16u(s, args[0], args[1]);
1888 5f0ce17f Aurelien Jarno
        break;
1889 9619376c aurel32
1890 c896fe29 bellard
    case INDEX_op_qemu_ld8u:
1891 c896fe29 bellard
        tcg_out_qemu_ld(s, args, 0);
1892 c896fe29 bellard
        break;
1893 c896fe29 bellard
    case INDEX_op_qemu_ld8s:
1894 c896fe29 bellard
        tcg_out_qemu_ld(s, args, 0 | 4);
1895 c896fe29 bellard
        break;
1896 c896fe29 bellard
    case INDEX_op_qemu_ld16u:
1897 c896fe29 bellard
        tcg_out_qemu_ld(s, args, 1);
1898 c896fe29 bellard
        break;
1899 c896fe29 bellard
    case INDEX_op_qemu_ld16s:
1900 c896fe29 bellard
        tcg_out_qemu_ld(s, args, 1 | 4);
1901 c896fe29 bellard
        break;
1902 5d8a4f8f Richard Henderson
#if TCG_TARGET_REG_BITS == 64
1903 5d8a4f8f Richard Henderson
    case INDEX_op_qemu_ld32u:
1904 5d8a4f8f Richard Henderson
#endif
1905 86feb1c8 Richard Henderson
    case INDEX_op_qemu_ld32:
1906 c896fe29 bellard
        tcg_out_qemu_ld(s, args, 2);
1907 c896fe29 bellard
        break;
1908 c896fe29 bellard
    case INDEX_op_qemu_ld64:
1909 c896fe29 bellard
        tcg_out_qemu_ld(s, args, 3);
1910 c896fe29 bellard
        break;
1911 78686523 Richard Henderson
1912 c896fe29 bellard
    case INDEX_op_qemu_st8:
1913 c896fe29 bellard
        tcg_out_qemu_st(s, args, 0);
1914 c896fe29 bellard
        break;
1915 c896fe29 bellard
    case INDEX_op_qemu_st16:
1916 c896fe29 bellard
        tcg_out_qemu_st(s, args, 1);
1917 c896fe29 bellard
        break;
1918 c896fe29 bellard
    case INDEX_op_qemu_st32:
1919 c896fe29 bellard
        tcg_out_qemu_st(s, args, 2);
1920 c896fe29 bellard
        break;
1921 c896fe29 bellard
    case INDEX_op_qemu_st64:
1922 c896fe29 bellard
        tcg_out_qemu_st(s, args, 3);
1923 c896fe29 bellard
        break;
1924 c896fe29 bellard
1925 5d8a4f8f Richard Henderson
    case INDEX_op_mulu2_i32:
1926 5d8a4f8f Richard Henderson
        tcg_out_modrm(s, OPC_GRP3_Ev, EXT3_MUL, args[3]);
1927 5d8a4f8f Richard Henderson
        break;
1928 5d8a4f8f Richard Henderson
    case INDEX_op_add2_i32:
1929 5d8a4f8f Richard Henderson
        if (const_args[4]) {
1930 5d8a4f8f Richard Henderson
            tgen_arithi(s, ARITH_ADD, args[0], args[4], 1);
1931 5d8a4f8f Richard Henderson
        } else {
1932 5d8a4f8f Richard Henderson
            tgen_arithr(s, ARITH_ADD, args[0], args[4]);
1933 5d8a4f8f Richard Henderson
        }
1934 5d8a4f8f Richard Henderson
        if (const_args[5]) {
1935 5d8a4f8f Richard Henderson
            tgen_arithi(s, ARITH_ADC, args[1], args[5], 1);
1936 5d8a4f8f Richard Henderson
        } else {
1937 5d8a4f8f Richard Henderson
            tgen_arithr(s, ARITH_ADC, args[1], args[5]);
1938 5d8a4f8f Richard Henderson
        }
1939 5d8a4f8f Richard Henderson
        break;
1940 5d8a4f8f Richard Henderson
    case INDEX_op_sub2_i32:
1941 5d8a4f8f Richard Henderson
        if (const_args[4]) {
1942 5d8a4f8f Richard Henderson
            tgen_arithi(s, ARITH_SUB, args[0], args[4], 1);
1943 5d8a4f8f Richard Henderson
        } else {
1944 5d8a4f8f Richard Henderson
            tgen_arithr(s, ARITH_SUB, args[0], args[4]);
1945 5d8a4f8f Richard Henderson
        }
1946 5d8a4f8f Richard Henderson
        if (const_args[5]) {
1947 5d8a4f8f Richard Henderson
            tgen_arithi(s, ARITH_SBB, args[1], args[5], 1);
1948 5d8a4f8f Richard Henderson
        } else {
1949 5d8a4f8f Richard Henderson
            tgen_arithr(s, ARITH_SBB, args[1], args[5]);
1950 5d8a4f8f Richard Henderson
        }
1951 5d8a4f8f Richard Henderson
        break;
1952 bbc863bf Richard Henderson
1953 bbc863bf Richard Henderson
#if TCG_TARGET_REG_BITS == 32
1954 bbc863bf Richard Henderson
    case INDEX_op_brcond2_i32:
1955 bbc863bf Richard Henderson
        tcg_out_brcond2(s, args, const_args, 0);
1956 bbc863bf Richard Henderson
        break;
1957 bbc863bf Richard Henderson
    case INDEX_op_setcond2_i32:
1958 bbc863bf Richard Henderson
        tcg_out_setcond2(s, args, const_args);
1959 bbc863bf Richard Henderson
        break;
1960 5d8a4f8f Richard Henderson
#else /* TCG_TARGET_REG_BITS == 64 */
1961 5d8a4f8f Richard Henderson
    case INDEX_op_movi_i64:
1962 5d8a4f8f Richard Henderson
        tcg_out_movi(s, TCG_TYPE_I64, args[0], args[1]);
1963 5d8a4f8f Richard Henderson
        break;
1964 5d8a4f8f Richard Henderson
    case INDEX_op_ld32s_i64:
1965 5d8a4f8f Richard Henderson
        tcg_out_modrm_offset(s, OPC_MOVSLQ, args[0], args[1], args[2]);
1966 5d8a4f8f Richard Henderson
        break;
1967 5d8a4f8f Richard Henderson
    case INDEX_op_ld_i64:
1968 5d8a4f8f Richard Henderson
        tcg_out_ld(s, TCG_TYPE_I64, args[0], args[1], args[2]);
1969 5d8a4f8f Richard Henderson
        break;
1970 5d8a4f8f Richard Henderson
    case INDEX_op_st_i64:
1971 5c2d2a9e Aurelien Jarno
        if (const_args[0]) {
1972 5c2d2a9e Aurelien Jarno
            tcg_out_modrm_offset(s, OPC_MOVL_EvIz | P_REXW,
1973 5c2d2a9e Aurelien Jarno
                                 0, args[1], args[2]);
1974 5c2d2a9e Aurelien Jarno
            tcg_out32(s, args[0]);
1975 5c2d2a9e Aurelien Jarno
        } else {
1976 5c2d2a9e Aurelien Jarno
            tcg_out_st(s, TCG_TYPE_I64, args[0], args[1], args[2]);
1977 5c2d2a9e Aurelien Jarno
        }
1978 5d8a4f8f Richard Henderson
        break;
1979 5d8a4f8f Richard Henderson
    case INDEX_op_qemu_ld32s:
1980 5d8a4f8f Richard Henderson
        tcg_out_qemu_ld(s, args, 2 | 4);
1981 5d8a4f8f Richard Henderson
        break;
1982 5d8a4f8f Richard Henderson
1983 5d8a4f8f Richard Henderson
    case INDEX_op_brcond_i64:
1984 5d8a4f8f Richard Henderson
        tcg_out_brcond64(s, args[2], args[0], args[1], const_args[1],
1985 5d8a4f8f Richard Henderson
                         args[3], 0);
1986 5d8a4f8f Richard Henderson
        break;
1987 5d8a4f8f Richard Henderson
    case INDEX_op_setcond_i64:
1988 5d8a4f8f Richard Henderson
        tcg_out_setcond64(s, args[3], args[0], args[1],
1989 5d8a4f8f Richard Henderson
                          args[2], const_args[2]);
1990 5d8a4f8f Richard Henderson
        break;
1991 d0a16297 Richard Henderson
    case INDEX_op_movcond_i64:
1992 d0a16297 Richard Henderson
        tcg_out_movcond64(s, args[5], args[0], args[1],
1993 d0a16297 Richard Henderson
                          args[2], const_args[2], args[3]);
1994 d0a16297 Richard Henderson
        break;
1995 5d8a4f8f Richard Henderson
1996 5d8a4f8f Richard Henderson
    case INDEX_op_bswap64_i64:
1997 5d8a4f8f Richard Henderson
        tcg_out_bswap64(s, args[0]);
1998 5d8a4f8f Richard Henderson
        break;
1999 5d8a4f8f Richard Henderson
    case INDEX_op_ext32u_i64:
2000 5d8a4f8f Richard Henderson
        tcg_out_ext32u(s, args[0], args[1]);
2001 5d8a4f8f Richard Henderson
        break;
2002 5d8a4f8f Richard Henderson
    case INDEX_op_ext32s_i64:
2003 5d8a4f8f Richard Henderson
        tcg_out_ext32s(s, args[0], args[1]);
2004 5d8a4f8f Richard Henderson
        break;
2005 5d8a4f8f Richard Henderson
#endif
2006 5d8a4f8f Richard Henderson
2007 a4773324 Jan Kiszka
    OP_32_64(deposit):
2008 a4773324 Jan Kiszka
        if (args[3] == 0 && args[4] == 8) {
2009 a4773324 Jan Kiszka
            /* load bits 0..7 */
2010 a4773324 Jan Kiszka
            tcg_out_modrm(s, OPC_MOVB_EvGv | P_REXB_R | P_REXB_RM,
2011 a4773324 Jan Kiszka
                          args[2], args[0]);
2012 a4773324 Jan Kiszka
        } else if (args[3] == 8 && args[4] == 8) {
2013 a4773324 Jan Kiszka
            /* load bits 8..15 */
2014 a4773324 Jan Kiszka
            tcg_out_modrm(s, OPC_MOVB_EvGv, args[2], args[0] + 4);
2015 a4773324 Jan Kiszka
        } else if (args[3] == 0 && args[4] == 16) {
2016 a4773324 Jan Kiszka
            /* load bits 0..15 */
2017 a4773324 Jan Kiszka
            tcg_out_modrm(s, OPC_MOVL_EvGv | P_DATA16, args[2], args[0]);
2018 a4773324 Jan Kiszka
        } else {
2019 a4773324 Jan Kiszka
            tcg_abort();
2020 a4773324 Jan Kiszka
        }
2021 a4773324 Jan Kiszka
        break;
2022 a4773324 Jan Kiszka
2023 c896fe29 bellard
    default:
2024 c896fe29 bellard
        tcg_abort();
2025 c896fe29 bellard
    }
2026 5d8a4f8f Richard Henderson
2027 5d8a4f8f Richard Henderson
#undef OP_32_64
2028 c896fe29 bellard
}
2029 c896fe29 bellard
2030 c896fe29 bellard
static const TCGTargetOpDef x86_op_defs[] = {
2031 c896fe29 bellard
    { INDEX_op_exit_tb, { } },
2032 c896fe29 bellard
    { INDEX_op_goto_tb, { } },
2033 c896fe29 bellard
    { INDEX_op_call, { "ri" } },
2034 c896fe29 bellard
    { INDEX_op_br, { } },
2035 c896fe29 bellard
    { INDEX_op_mov_i32, { "r", "r" } },
2036 c896fe29 bellard
    { INDEX_op_movi_i32, { "r" } },
2037 c896fe29 bellard
    { INDEX_op_ld8u_i32, { "r", "r" } },
2038 c896fe29 bellard
    { INDEX_op_ld8s_i32, { "r", "r" } },
2039 c896fe29 bellard
    { INDEX_op_ld16u_i32, { "r", "r" } },
2040 c896fe29 bellard
    { INDEX_op_ld16s_i32, { "r", "r" } },
2041 c896fe29 bellard
    { INDEX_op_ld_i32, { "r", "r" } },
2042 5c2d2a9e Aurelien Jarno
    { INDEX_op_st8_i32, { "qi", "r" } },
2043 5c2d2a9e Aurelien Jarno
    { INDEX_op_st16_i32, { "ri", "r" } },
2044 5c2d2a9e Aurelien Jarno
    { INDEX_op_st_i32, { "ri", "r" } },
2045 c896fe29 bellard
2046 5d1e4e85 Richard Henderson
    { INDEX_op_add_i32, { "r", "r", "ri" } },
2047 c896fe29 bellard
    { INDEX_op_sub_i32, { "r", "0", "ri" } },
2048 c896fe29 bellard
    { INDEX_op_mul_i32, { "r", "0", "ri" } },
2049 c896fe29 bellard
    { INDEX_op_div2_i32, { "a", "d", "0", "1", "r" } },
2050 c896fe29 bellard
    { INDEX_op_divu2_i32, { "a", "d", "0", "1", "r" } },
2051 c896fe29 bellard
    { INDEX_op_and_i32, { "r", "0", "ri" } },
2052 c896fe29 bellard
    { INDEX_op_or_i32, { "r", "0", "ri" } },
2053 c896fe29 bellard
    { INDEX_op_xor_i32, { "r", "0", "ri" } },
2054 c896fe29 bellard
2055 c896fe29 bellard
    { INDEX_op_shl_i32, { "r", "0", "ci" } },
2056 c896fe29 bellard
    { INDEX_op_shr_i32, { "r", "0", "ci" } },
2057 c896fe29 bellard
    { INDEX_op_sar_i32, { "r", "0", "ci" } },
2058 9619376c aurel32
    { INDEX_op_rotl_i32, { "r", "0", "ci" } },
2059 9619376c aurel32
    { INDEX_op_rotr_i32, { "r", "0", "ci" } },
2060 c896fe29 bellard
2061 c896fe29 bellard
    { INDEX_op_brcond_i32, { "r", "ri" } },
2062 c896fe29 bellard
2063 5d40cd63 aurel32
    { INDEX_op_bswap16_i32, { "r", "0" } },
2064 66896cb8 aurel32
    { INDEX_op_bswap32_i32, { "r", "0" } },
2065 9619376c aurel32
2066 9619376c aurel32
    { INDEX_op_neg_i32, { "r", "0" } },
2067 9619376c aurel32
2068 9619376c aurel32
    { INDEX_op_not_i32, { "r", "0" } },
2069 9619376c aurel32
2070 9619376c aurel32
    { INDEX_op_ext8s_i32, { "r", "q" } },
2071 9619376c aurel32
    { INDEX_op_ext16s_i32, { "r", "r" } },
2072 55e082a7 Richard Henderson
    { INDEX_op_ext8u_i32, { "r", "q" } },
2073 55e082a7 Richard Henderson
    { INDEX_op_ext16u_i32, { "r", "r" } },
2074 9619376c aurel32
2075 1d2699ae Richard Henderson
    { INDEX_op_setcond_i32, { "q", "r", "ri" } },
2076 5d8a4f8f Richard Henderson
2077 a4773324 Jan Kiszka
    { INDEX_op_deposit_i32, { "Q", "0", "Q" } },
2078 f813cb83 Aurelien Jarno
#if TCG_TARGET_HAS_movcond_i32
2079 d0a16297 Richard Henderson
    { INDEX_op_movcond_i32, { "r", "r", "ri", "r", "0" } },
2080 f813cb83 Aurelien Jarno
#endif
2081 a4773324 Jan Kiszka
2082 5d8a4f8f Richard Henderson
    { INDEX_op_mulu2_i32, { "a", "d", "a", "r" } },
2083 5d8a4f8f Richard Henderson
    { INDEX_op_add2_i32, { "r", "r", "0", "1", "ri", "ri" } },
2084 5d8a4f8f Richard Henderson
    { INDEX_op_sub2_i32, { "r", "r", "0", "1", "ri", "ri" } },
2085 bbc863bf Richard Henderson
2086 bbc863bf Richard Henderson
#if TCG_TARGET_REG_BITS == 32
2087 5d8a4f8f Richard Henderson
    { INDEX_op_brcond2_i32, { "r", "r", "ri", "ri" } },
2088 1d2699ae Richard Henderson
    { INDEX_op_setcond2_i32, { "r", "r", "r", "ri", "ri" } },
2089 5d8a4f8f Richard Henderson
#else
2090 5d8a4f8f Richard Henderson
    { INDEX_op_mov_i64, { "r", "r" } },
2091 5d8a4f8f Richard Henderson
    { INDEX_op_movi_i64, { "r" } },
2092 5d8a4f8f Richard Henderson
    { INDEX_op_ld8u_i64, { "r", "r" } },
2093 5d8a4f8f Richard Henderson
    { INDEX_op_ld8s_i64, { "r", "r" } },
2094 5d8a4f8f Richard Henderson
    { INDEX_op_ld16u_i64, { "r", "r" } },
2095 5d8a4f8f Richard Henderson
    { INDEX_op_ld16s_i64, { "r", "r" } },
2096 5d8a4f8f Richard Henderson
    { INDEX_op_ld32u_i64, { "r", "r" } },
2097 5d8a4f8f Richard Henderson
    { INDEX_op_ld32s_i64, { "r", "r" } },
2098 5d8a4f8f Richard Henderson
    { INDEX_op_ld_i64, { "r", "r" } },
2099 5c2d2a9e Aurelien Jarno
    { INDEX_op_st8_i64, { "ri", "r" } },
2100 5c2d2a9e Aurelien Jarno
    { INDEX_op_st16_i64, { "ri", "r" } },
2101 5c2d2a9e Aurelien Jarno
    { INDEX_op_st32_i64, { "ri", "r" } },
2102 5c2d2a9e Aurelien Jarno
    { INDEX_op_st_i64, { "re", "r" } },
2103 5d8a4f8f Richard Henderson
2104 163fa4b0 Paolo Bonzini
    { INDEX_op_add_i64, { "r", "r", "re" } },
2105 5d8a4f8f Richard Henderson
    { INDEX_op_mul_i64, { "r", "0", "re" } },
2106 5d8a4f8f Richard Henderson
    { INDEX_op_div2_i64, { "a", "d", "0", "1", "r" } },
2107 5d8a4f8f Richard Henderson
    { INDEX_op_divu2_i64, { "a", "d", "0", "1", "r" } },
2108 5d8a4f8f Richard Henderson
    { INDEX_op_sub_i64, { "r", "0", "re" } },
2109 5d8a4f8f Richard Henderson
    { INDEX_op_and_i64, { "r", "0", "reZ" } },
2110 5d8a4f8f Richard Henderson
    { INDEX_op_or_i64, { "r", "0", "re" } },
2111 5d8a4f8f Richard Henderson
    { INDEX_op_xor_i64, { "r", "0", "re" } },
2112 5d8a4f8f Richard Henderson
2113 5d8a4f8f Richard Henderson
    { INDEX_op_shl_i64, { "r", "0", "ci" } },
2114 5d8a4f8f Richard Henderson
    { INDEX_op_shr_i64, { "r", "0", "ci" } },
2115 5d8a4f8f Richard Henderson
    { INDEX_op_sar_i64, { "r", "0", "ci" } },
2116 5d8a4f8f Richard Henderson
    { INDEX_op_rotl_i64, { "r", "0", "ci" } },
2117 5d8a4f8f Richard Henderson
    { INDEX_op_rotr_i64, { "r", "0", "ci" } },
2118 5d8a4f8f Richard Henderson
2119 5d8a4f8f Richard Henderson
    { INDEX_op_brcond_i64, { "r", "re" } },
2120 5d8a4f8f Richard Henderson
    { INDEX_op_setcond_i64, { "r", "r", "re" } },
2121 5d8a4f8f Richard Henderson
2122 5d8a4f8f Richard Henderson
    { INDEX_op_bswap16_i64, { "r", "0" } },
2123 5d8a4f8f Richard Henderson
    { INDEX_op_bswap32_i64, { "r", "0" } },
2124 5d8a4f8f Richard Henderson
    { INDEX_op_bswap64_i64, { "r", "0" } },
2125 5d8a4f8f Richard Henderson
    { INDEX_op_neg_i64, { "r", "0" } },
2126 5d8a4f8f Richard Henderson
    { INDEX_op_not_i64, { "r", "0" } },
2127 5d8a4f8f Richard Henderson
2128 5d8a4f8f Richard Henderson
    { INDEX_op_ext8s_i64, { "r", "r" } },
2129 5d8a4f8f Richard Henderson
    { INDEX_op_ext16s_i64, { "r", "r" } },
2130 5d8a4f8f Richard Henderson
    { INDEX_op_ext32s_i64, { "r", "r" } },
2131 5d8a4f8f Richard Henderson
    { INDEX_op_ext8u_i64, { "r", "r" } },
2132 5d8a4f8f Richard Henderson
    { INDEX_op_ext16u_i64, { "r", "r" } },
2133 5d8a4f8f Richard Henderson
    { INDEX_op_ext32u_i64, { "r", "r" } },
2134 a4773324 Jan Kiszka
2135 a4773324 Jan Kiszka
    { INDEX_op_deposit_i64, { "Q", "0", "Q" } },
2136 d0a16297 Richard Henderson
    { INDEX_op_movcond_i64, { "r", "r", "re", "r", "0" } },
2137 5d8a4f8f Richard Henderson
#endif
2138 1d2699ae Richard Henderson
2139 5d8a4f8f Richard Henderson
#if TCG_TARGET_REG_BITS == 64
2140 5d8a4f8f Richard Henderson
    { INDEX_op_qemu_ld8u, { "r", "L" } },
2141 5d8a4f8f Richard Henderson
    { INDEX_op_qemu_ld8s, { "r", "L" } },
2142 5d8a4f8f Richard Henderson
    { INDEX_op_qemu_ld16u, { "r", "L" } },
2143 5d8a4f8f Richard Henderson
    { INDEX_op_qemu_ld16s, { "r", "L" } },
2144 5d8a4f8f Richard Henderson
    { INDEX_op_qemu_ld32, { "r", "L" } },
2145 5d8a4f8f Richard Henderson
    { INDEX_op_qemu_ld32u, { "r", "L" } },
2146 5d8a4f8f Richard Henderson
    { INDEX_op_qemu_ld32s, { "r", "L" } },
2147 5d8a4f8f Richard Henderson
    { INDEX_op_qemu_ld64, { "r", "L" } },
2148 5d8a4f8f Richard Henderson
2149 5d8a4f8f Richard Henderson
    { INDEX_op_qemu_st8, { "L", "L" } },
2150 5d8a4f8f Richard Henderson
    { INDEX_op_qemu_st16, { "L", "L" } },
2151 5d8a4f8f Richard Henderson
    { INDEX_op_qemu_st32, { "L", "L" } },
2152 5d8a4f8f Richard Henderson
    { INDEX_op_qemu_st64, { "L", "L" } },
2153 5d8a4f8f Richard Henderson
#elif TARGET_LONG_BITS <= TCG_TARGET_REG_BITS
2154 c896fe29 bellard
    { INDEX_op_qemu_ld8u, { "r", "L" } },
2155 c896fe29 bellard
    { INDEX_op_qemu_ld8s, { "r", "L" } },
2156 c896fe29 bellard
    { INDEX_op_qemu_ld16u, { "r", "L" } },
2157 c896fe29 bellard
    { INDEX_op_qemu_ld16s, { "r", "L" } },
2158 86feb1c8 Richard Henderson
    { INDEX_op_qemu_ld32, { "r", "L" } },
2159 c896fe29 bellard
    { INDEX_op_qemu_ld64, { "r", "r", "L" } },
2160 c896fe29 bellard
2161 c896fe29 bellard
    { INDEX_op_qemu_st8, { "cb", "L" } },
2162 c896fe29 bellard
    { INDEX_op_qemu_st16, { "L", "L" } },
2163 c896fe29 bellard
    { INDEX_op_qemu_st32, { "L", "L" } },
2164 c896fe29 bellard
    { INDEX_op_qemu_st64, { "L", "L", "L" } },
2165 c896fe29 bellard
#else
2166 c896fe29 bellard
    { INDEX_op_qemu_ld8u, { "r", "L", "L" } },
2167 c896fe29 bellard
    { INDEX_op_qemu_ld8s, { "r", "L", "L" } },
2168 c896fe29 bellard
    { INDEX_op_qemu_ld16u, { "r", "L", "L" } },
2169 c896fe29 bellard
    { INDEX_op_qemu_ld16s, { "r", "L", "L" } },
2170 86feb1c8 Richard Henderson
    { INDEX_op_qemu_ld32, { "r", "L", "L" } },
2171 c896fe29 bellard
    { INDEX_op_qemu_ld64, { "r", "r", "L", "L" } },
2172 c896fe29 bellard
2173 c896fe29 bellard
    { INDEX_op_qemu_st8, { "cb", "L", "L" } },
2174 c896fe29 bellard
    { INDEX_op_qemu_st16, { "L", "L", "L" } },
2175 c896fe29 bellard
    { INDEX_op_qemu_st32, { "L", "L", "L" } },
2176 c896fe29 bellard
    { INDEX_op_qemu_st64, { "L", "L", "L", "L" } },
2177 c896fe29 bellard
#endif
2178 c896fe29 bellard
    { -1 },
2179 c896fe29 bellard
};
2180 c896fe29 bellard
2181 b03cce8e bellard
static int tcg_target_callee_save_regs[] = {
2182 5d8a4f8f Richard Henderson
#if TCG_TARGET_REG_BITS == 64
2183 5d8a4f8f Richard Henderson
    TCG_REG_RBP,
2184 5d8a4f8f Richard Henderson
    TCG_REG_RBX,
2185 8d918718 Stefan Weil
#if defined(_WIN64)
2186 8d918718 Stefan Weil
    TCG_REG_RDI,
2187 8d918718 Stefan Weil
    TCG_REG_RSI,
2188 8d918718 Stefan Weil
#endif
2189 5d8a4f8f Richard Henderson
    TCG_REG_R12,
2190 5d8a4f8f Richard Henderson
    TCG_REG_R13,
2191 cea5f9a2 Blue Swirl
    TCG_REG_R14, /* Currently used for the global env. */
2192 5d8a4f8f Richard Henderson
    TCG_REG_R15,
2193 5d8a4f8f Richard Henderson
#else
2194 cea5f9a2 Blue Swirl
    TCG_REG_EBP, /* Currently used for the global env. */
2195 b03cce8e bellard
    TCG_REG_EBX,
2196 b03cce8e bellard
    TCG_REG_ESI,
2197 b03cce8e bellard
    TCG_REG_EDI,
2198 5d8a4f8f Richard Henderson
#endif
2199 b03cce8e bellard
};
2200 b03cce8e bellard
2201 813da627 Richard Henderson
/* Compute frame size via macros, to share between tcg_target_qemu_prologue
2202 813da627 Richard Henderson
   and tcg_register_jit.  */
2203 813da627 Richard Henderson
2204 813da627 Richard Henderson
#define PUSH_SIZE \
2205 813da627 Richard Henderson
    ((1 + ARRAY_SIZE(tcg_target_callee_save_regs)) \
2206 813da627 Richard Henderson
     * (TCG_TARGET_REG_BITS / 8))
2207 813da627 Richard Henderson
2208 813da627 Richard Henderson
#define FRAME_SIZE \
2209 813da627 Richard Henderson
    ((PUSH_SIZE \
2210 813da627 Richard Henderson
      + TCG_STATIC_CALL_ARGS_SIZE \
2211 813da627 Richard Henderson
      + CPU_TEMP_BUF_NLONGS * sizeof(long) \
2212 813da627 Richard Henderson
      + TCG_TARGET_STACK_ALIGN - 1) \
2213 813da627 Richard Henderson
     & ~(TCG_TARGET_STACK_ALIGN - 1))
2214 813da627 Richard Henderson
2215 b03cce8e bellard
/* Generate global QEMU prologue and epilogue code */
2216 e4d58b41 Richard Henderson
static void tcg_target_qemu_prologue(TCGContext *s)
2217 b03cce8e bellard
{
2218 813da627 Richard Henderson
    int i, stack_addend;
2219 78686523 Richard Henderson
2220 b03cce8e bellard
    /* TB prologue */
2221 5d8a4f8f Richard Henderson
2222 ac0275dc Blue Swirl
    /* Reserve some stack space, also for TCG temps.  */
2223 813da627 Richard Henderson
    stack_addend = FRAME_SIZE - PUSH_SIZE;
2224 ac0275dc Blue Swirl
    tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
2225 ac0275dc Blue Swirl
                  CPU_TEMP_BUF_NLONGS * sizeof(long));
2226 ac0275dc Blue Swirl
2227 ac0275dc Blue Swirl
    /* Save all callee saved registers.  */
2228 ac0275dc Blue Swirl
    for (i = 0; i < ARRAY_SIZE(tcg_target_callee_save_regs); i++) {
2229 ac0275dc Blue Swirl
        tcg_out_push(s, tcg_target_callee_save_regs[i]);
2230 ac0275dc Blue Swirl
    }
2231 ac0275dc Blue Swirl
2232 6a18ae2d Blue Swirl
#if TCG_TARGET_REG_BITS == 32
2233 6a18ae2d Blue Swirl
    tcg_out_ld(s, TCG_TYPE_PTR, TCG_AREG0, TCG_REG_ESP,
2234 6a18ae2d Blue Swirl
               (ARRAY_SIZE(tcg_target_callee_save_regs) + 1) * 4);
2235 b18212c6 Stefan Weil
    tcg_out_addi(s, TCG_REG_ESP, -stack_addend);
2236 b18212c6 Stefan Weil
    /* jmp *tb.  */
2237 b18212c6 Stefan Weil
    tcg_out_modrm_offset(s, OPC_GRP5, EXT5_JMPN_Ev, TCG_REG_ESP,
2238 b18212c6 Stefan Weil
                         (ARRAY_SIZE(tcg_target_callee_save_regs) + 2) * 4
2239 b18212c6 Stefan Weil
                         + stack_addend);
2240 6a18ae2d Blue Swirl
#else
2241 cea5f9a2 Blue Swirl
    tcg_out_mov(s, TCG_TYPE_PTR, TCG_AREG0, tcg_target_call_iarg_regs[0]);
2242 6a18ae2d Blue Swirl
    tcg_out_addi(s, TCG_REG_ESP, -stack_addend);
2243 5d8a4f8f Richard Henderson
    /* jmp *tb.  */
2244 cea5f9a2 Blue Swirl
    tcg_out_modrm(s, OPC_GRP5, EXT5_JMPN_Ev, tcg_target_call_iarg_regs[1]);
2245 b18212c6 Stefan Weil
#endif
2246 78686523 Richard Henderson
2247 b03cce8e bellard
    /* TB epilogue */
2248 b03cce8e bellard
    tb_ret_addr = s->code_ptr;
2249 5d8a4f8f Richard Henderson
2250 e83c80f7 Blue Swirl
    tcg_out_addi(s, TCG_REG_CALL_STACK, stack_addend);
2251 5d8a4f8f Richard Henderson
2252 5d8a4f8f Richard Henderson
    for (i = ARRAY_SIZE(tcg_target_callee_save_regs) - 1; i >= 0; i--) {
2253 b03cce8e bellard
        tcg_out_pop(s, tcg_target_callee_save_regs[i]);
2254 b03cce8e bellard
    }
2255 5d8a4f8f Richard Henderson
    tcg_out_opc(s, OPC_RET, 0, 0, 0);
2256 44b37ace Richard Henderson
2257 44b37ace Richard Henderson
#if !defined(CONFIG_SOFTMMU)
2258 44b37ace Richard Henderson
    /* Try to set up a segment register to point to GUEST_BASE.  */
2259 44b37ace Richard Henderson
    if (GUEST_BASE) {
2260 44b37ace Richard Henderson
        setup_guest_base_seg();
2261 44b37ace Richard Henderson
    }
2262 44b37ace Richard Henderson
#endif
2263 b03cce8e bellard
}
2264 b03cce8e bellard
2265 e4d58b41 Richard Henderson
static void tcg_target_init(TCGContext *s)
2266 c896fe29 bellard
{
2267 76a347e1 Richard Henderson
    /* For 32-bit, 99% certainty that we're running on hardware that supports
2268 76a347e1 Richard Henderson
       cmov, but we still need to check.  In case cmov is not available, we'll
2269 76a347e1 Richard Henderson
       use a small forward branch.  */
2270 76a347e1 Richard Henderson
#ifndef have_cmov
2271 76a347e1 Richard Henderson
    {
2272 76a347e1 Richard Henderson
        unsigned a, b, c, d;
2273 76a347e1 Richard Henderson
        have_cmov = (__get_cpuid(1, &a, &b, &c, &d) && (d & bit_CMOV));
2274 76a347e1 Richard Henderson
    }
2275 76a347e1 Richard Henderson
#endif
2276 76a347e1 Richard Henderson
2277 20cb400d Paul Brook
#if !defined(CONFIG_USER_ONLY)
2278 c896fe29 bellard
    /* fail safe */
2279 c896fe29 bellard
    if ((1 << CPU_TLB_ENTRY_BITS) != sizeof(CPUTLBEntry))
2280 c896fe29 bellard
        tcg_abort();
2281 20cb400d Paul Brook
#endif
2282 c896fe29 bellard
2283 5d8a4f8f Richard Henderson
    if (TCG_TARGET_REG_BITS == 64) {
2284 5d8a4f8f Richard Henderson
        tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xffff);
2285 5d8a4f8f Richard Henderson
        tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0, 0xffff);
2286 5d8a4f8f Richard Henderson
    } else {
2287 5d8a4f8f Richard Henderson
        tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xff);
2288 5d8a4f8f Richard Henderson
    }
2289 4ab50ccf Richard Henderson
2290 4ab50ccf Richard Henderson
    tcg_regset_clear(tcg_target_call_clobber_regs);
2291 4ab50ccf Richard Henderson
    tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_EAX);
2292 4ab50ccf Richard Henderson
    tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_EDX);
2293 4ab50ccf Richard Henderson
    tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_ECX);
2294 5d8a4f8f Richard Henderson
    if (TCG_TARGET_REG_BITS == 64) {
2295 8d918718 Stefan Weil
#if !defined(_WIN64)
2296 5d8a4f8f Richard Henderson
        tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_RDI);
2297 5d8a4f8f Richard Henderson
        tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_RSI);
2298 8d918718 Stefan Weil
#endif
2299 5d8a4f8f Richard Henderson
        tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R8);
2300 5d8a4f8f Richard Henderson
        tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R9);
2301 5d8a4f8f Richard Henderson
        tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R10);
2302 5d8a4f8f Richard Henderson
        tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R11);
2303 5d8a4f8f Richard Henderson
    }
2304 4ab50ccf Richard Henderson
2305 c896fe29 bellard
    tcg_regset_clear(s->reserved_regs);
2306 e83c80f7 Blue Swirl
    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
2307 c896fe29 bellard
2308 c896fe29 bellard
    tcg_add_target_add_op_defs(x86_op_defs);
2309 c896fe29 bellard
}
2310 813da627 Richard Henderson
2311 813da627 Richard Henderson
typedef struct {
2312 813da627 Richard Henderson
    uint32_t len __attribute__((aligned((sizeof(void *)))));
2313 813da627 Richard Henderson
    uint32_t id;
2314 813da627 Richard Henderson
    uint8_t version;
2315 813da627 Richard Henderson
    char augmentation[1];
2316 813da627 Richard Henderson
    uint8_t code_align;
2317 813da627 Richard Henderson
    uint8_t data_align;
2318 813da627 Richard Henderson
    uint8_t return_column;
2319 813da627 Richard Henderson
} DebugFrameCIE;
2320 813da627 Richard Henderson
2321 813da627 Richard Henderson
typedef struct {
2322 813da627 Richard Henderson
    uint32_t len __attribute__((aligned((sizeof(void *)))));
2323 813da627 Richard Henderson
    uint32_t cie_offset;
2324 813da627 Richard Henderson
    tcg_target_long func_start __attribute__((packed));
2325 813da627 Richard Henderson
    tcg_target_long func_len __attribute__((packed));
2326 813da627 Richard Henderson
    uint8_t def_cfa[4];
2327 813da627 Richard Henderson
    uint8_t reg_ofs[14];
2328 813da627 Richard Henderson
} DebugFrameFDE;
2329 813da627 Richard Henderson
2330 813da627 Richard Henderson
typedef struct {
2331 813da627 Richard Henderson
    DebugFrameCIE cie;
2332 813da627 Richard Henderson
    DebugFrameFDE fde;
2333 813da627 Richard Henderson
} DebugFrame;
2334 813da627 Richard Henderson
2335 c170cb66 Stefan Weil
#if !defined(__ELF__)
2336 c170cb66 Stefan Weil
    /* Host machine without ELF. */
2337 c170cb66 Stefan Weil
#elif TCG_TARGET_REG_BITS == 64
2338 813da627 Richard Henderson
#define ELF_HOST_MACHINE EM_X86_64
2339 813da627 Richard Henderson
static DebugFrame debug_frame = {
2340 813da627 Richard Henderson
    .cie.len = sizeof(DebugFrameCIE)-4, /* length after .len member */
2341 813da627 Richard Henderson
    .cie.id = -1,
2342 813da627 Richard Henderson
    .cie.version = 1,
2343 813da627 Richard Henderson
    .cie.code_align = 1,
2344 813da627 Richard Henderson
    .cie.data_align = 0x78,             /* sleb128 -8 */
2345 813da627 Richard Henderson
    .cie.return_column = 16,
2346 813da627 Richard Henderson
2347 813da627 Richard Henderson
    .fde.len = sizeof(DebugFrameFDE)-4, /* length after .len member */
2348 813da627 Richard Henderson
    .fde.def_cfa = {
2349 813da627 Richard Henderson
        12, 7,                          /* DW_CFA_def_cfa %rsp, ... */
2350 813da627 Richard Henderson
        (FRAME_SIZE & 0x7f) | 0x80,     /* ... uleb128 FRAME_SIZE */
2351 813da627 Richard Henderson
        (FRAME_SIZE >> 7)
2352 813da627 Richard Henderson
    },
2353 813da627 Richard Henderson
    .fde.reg_ofs = {
2354 813da627 Richard Henderson
        0x90, 1,                        /* DW_CFA_offset, %rip, -8 */
2355 813da627 Richard Henderson
        /* The following ordering must match tcg_target_callee_save_regs.  */
2356 813da627 Richard Henderson
        0x86, 2,                        /* DW_CFA_offset, %rbp, -16 */
2357 813da627 Richard Henderson
        0x83, 3,                        /* DW_CFA_offset, %rbx, -24 */
2358 813da627 Richard Henderson
        0x8c, 4,                        /* DW_CFA_offset, %r12, -32 */
2359 813da627 Richard Henderson
        0x8d, 5,                        /* DW_CFA_offset, %r13, -40 */
2360 813da627 Richard Henderson
        0x8e, 6,                        /* DW_CFA_offset, %r14, -48 */
2361 813da627 Richard Henderson
        0x8f, 7,                        /* DW_CFA_offset, %r15, -56 */
2362 813da627 Richard Henderson
    }
2363 813da627 Richard Henderson
};
2364 813da627 Richard Henderson
#else
2365 813da627 Richard Henderson
#define ELF_HOST_MACHINE EM_386
2366 813da627 Richard Henderson
static DebugFrame debug_frame = {
2367 813da627 Richard Henderson
    .cie.len = sizeof(DebugFrameCIE)-4, /* length after .len member */
2368 813da627 Richard Henderson
    .cie.id = -1,
2369 813da627 Richard Henderson
    .cie.version = 1,
2370 813da627 Richard Henderson
    .cie.code_align = 1,
2371 813da627 Richard Henderson
    .cie.data_align = 0x7c,             /* sleb128 -4 */
2372 813da627 Richard Henderson
    .cie.return_column = 8,
2373 813da627 Richard Henderson
2374 813da627 Richard Henderson
    .fde.len = sizeof(DebugFrameFDE)-4, /* length after .len member */
2375 813da627 Richard Henderson
    .fde.def_cfa = {
2376 813da627 Richard Henderson
        12, 4,                          /* DW_CFA_def_cfa %esp, ... */
2377 813da627 Richard Henderson
        (FRAME_SIZE & 0x7f) | 0x80,     /* ... uleb128 FRAME_SIZE */
2378 813da627 Richard Henderson
        (FRAME_SIZE >> 7)
2379 813da627 Richard Henderson
    },
2380 813da627 Richard Henderson
    .fde.reg_ofs = {
2381 813da627 Richard Henderson
        0x88, 1,                        /* DW_CFA_offset, %eip, -4 */
2382 813da627 Richard Henderson
        /* The following ordering must match tcg_target_callee_save_regs.  */
2383 813da627 Richard Henderson
        0x85, 2,                        /* DW_CFA_offset, %ebp, -8 */
2384 813da627 Richard Henderson
        0x83, 3,                        /* DW_CFA_offset, %ebx, -12 */
2385 813da627 Richard Henderson
        0x86, 4,                        /* DW_CFA_offset, %esi, -16 */
2386 813da627 Richard Henderson
        0x87, 5,                        /* DW_CFA_offset, %edi, -20 */
2387 813da627 Richard Henderson
    }
2388 813da627 Richard Henderson
};
2389 813da627 Richard Henderson
#endif
2390 813da627 Richard Henderson
2391 c170cb66 Stefan Weil
#if defined(ELF_HOST_MACHINE)
2392 813da627 Richard Henderson
void tcg_register_jit(void *buf, size_t buf_size)
2393 813da627 Richard Henderson
{
2394 813da627 Richard Henderson
    /* We're expecting a 2 byte uleb128 encoded value.  */
2395 813da627 Richard Henderson
    assert(FRAME_SIZE >> 14 == 0);
2396 813da627 Richard Henderson
2397 813da627 Richard Henderson
    debug_frame.fde.func_start = (tcg_target_long) buf;
2398 813da627 Richard Henderson
    debug_frame.fde.func_len = buf_size;
2399 813da627 Richard Henderson
2400 813da627 Richard Henderson
    tcg_register_jit_int(buf, buf_size, &debug_frame, sizeof(debug_frame));
2401 813da627 Richard Henderson
}
2402 c170cb66 Stefan Weil
#endif