root / tci.c @ b7fcff01
History | View | Annotate | Download (35.6 kB)
1 | 7657f4bf | Stefan Weil | /*
|
---|---|---|---|
2 | 7657f4bf | Stefan Weil | * Tiny Code Interpreter for QEMU
|
3 | 7657f4bf | Stefan Weil | *
|
4 | 7657f4bf | Stefan Weil | * Copyright (c) 2009, 2011 Stefan Weil
|
5 | 7657f4bf | Stefan Weil | *
|
6 | 7657f4bf | Stefan Weil | * This program is free software: you can redistribute it and/or modify
|
7 | 7657f4bf | Stefan Weil | * it under the terms of the GNU General Public License as published by
|
8 | 7657f4bf | Stefan Weil | * the Free Software Foundation, either version 2 of the License, or
|
9 | 7657f4bf | Stefan Weil | * (at your option) any later version.
|
10 | 7657f4bf | Stefan Weil | *
|
11 | 7657f4bf | Stefan Weil | * This program is distributed in the hope that it will be useful,
|
12 | 7657f4bf | Stefan Weil | * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
13 | 7657f4bf | Stefan Weil | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
14 | 7657f4bf | Stefan Weil | * GNU General Public License for more details.
|
15 | 7657f4bf | Stefan Weil | *
|
16 | 7657f4bf | Stefan Weil | * You should have received a copy of the GNU General Public License
|
17 | 7657f4bf | Stefan Weil | * along with this program. If not, see <http://www.gnu.org/licenses/>.
|
18 | 7657f4bf | Stefan Weil | */
|
19 | 7657f4bf | Stefan Weil | |
20 | 7657f4bf | Stefan Weil | #include "config.h" |
21 | 7657f4bf | Stefan Weil | |
22 | 7657f4bf | Stefan Weil | /* Defining NDEBUG disables assertions (which makes the code faster). */
|
23 | 17904bcf | Stefan Weil | #if !defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
|
24 | 7657f4bf | Stefan Weil | # define NDEBUG
|
25 | 7657f4bf | Stefan Weil | #endif
|
26 | 7657f4bf | Stefan Weil | |
27 | 7657f4bf | Stefan Weil | #include "qemu-common.h" |
28 | 022c62cb | Paolo Bonzini | #include "exec/exec-all.h" /* MAX_OPC_PARAM_IARGS */ |
29 | 7657f4bf | Stefan Weil | #include "tcg-op.h" |
30 | 7657f4bf | Stefan Weil | |
31 | 7657f4bf | Stefan Weil | /* Marker for missing code. */
|
32 | 7657f4bf | Stefan Weil | #define TODO() \
|
33 | 7657f4bf | Stefan Weil | do { \
|
34 | 7657f4bf | Stefan Weil | fprintf(stderr, "TODO %s:%u: %s()\n", \
|
35 | 7657f4bf | Stefan Weil | __FILE__, __LINE__, __func__); \ |
36 | 7657f4bf | Stefan Weil | tcg_abort(); \ |
37 | 7657f4bf | Stefan Weil | } while (0) |
38 | 7657f4bf | Stefan Weil | |
39 | 6673f47d | Stefan Weil | #if MAX_OPC_PARAM_IARGS != 5 |
40 | 7657f4bf | Stefan Weil | # error Fix needed, number of supported input arguments changed!
|
41 | 7657f4bf | Stefan Weil | #endif
|
42 | 7657f4bf | Stefan Weil | #if TCG_TARGET_REG_BITS == 32 |
43 | 7657f4bf | Stefan Weil | typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
|
44 | 7657f4bf | Stefan Weil | tcg_target_ulong, tcg_target_ulong, |
45 | 7657f4bf | Stefan Weil | tcg_target_ulong, tcg_target_ulong, |
46 | 6673f47d | Stefan Weil | tcg_target_ulong, tcg_target_ulong, |
47 | 7657f4bf | Stefan Weil | tcg_target_ulong, tcg_target_ulong); |
48 | 7657f4bf | Stefan Weil | #else
|
49 | 7657f4bf | Stefan Weil | typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
|
50 | 6673f47d | Stefan Weil | tcg_target_ulong, tcg_target_ulong, |
51 | 6673f47d | Stefan Weil | tcg_target_ulong); |
52 | 7657f4bf | Stefan Weil | #endif
|
53 | 7657f4bf | Stefan Weil | |
54 | 7657f4bf | Stefan Weil | /* Targets which don't use GETPC also don't need tci_tb_ptr
|
55 | 7657f4bf | Stefan Weil | which makes them a little faster. */
|
56 | 7657f4bf | Stefan Weil | #if defined(GETPC)
|
57 | c3ca0467 | Stefan Weil | uintptr_t tci_tb_ptr; |
58 | 7657f4bf | Stefan Weil | #endif
|
59 | 7657f4bf | Stefan Weil | |
60 | 7657f4bf | Stefan Weil | static tcg_target_ulong tci_reg[TCG_TARGET_NB_REGS];
|
61 | 7657f4bf | Stefan Weil | |
62 | 771142c2 | Richard Henderson | static tcg_target_ulong tci_read_reg(TCGReg index)
|
63 | 7657f4bf | Stefan Weil | { |
64 | 7657f4bf | Stefan Weil | assert(index < ARRAY_SIZE(tci_reg)); |
65 | 7657f4bf | Stefan Weil | return tci_reg[index];
|
66 | 7657f4bf | Stefan Weil | } |
67 | 7657f4bf | Stefan Weil | |
68 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
|
69 | 771142c2 | Richard Henderson | static int8_t tci_read_reg8s(TCGReg index)
|
70 | 7657f4bf | Stefan Weil | { |
71 | 7657f4bf | Stefan Weil | return (int8_t)tci_read_reg(index);
|
72 | 7657f4bf | Stefan Weil | } |
73 | 7657f4bf | Stefan Weil | #endif
|
74 | 7657f4bf | Stefan Weil | |
75 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
|
76 | 771142c2 | Richard Henderson | static int16_t tci_read_reg16s(TCGReg index)
|
77 | 7657f4bf | Stefan Weil | { |
78 | 7657f4bf | Stefan Weil | return (int16_t)tci_read_reg(index);
|
79 | 7657f4bf | Stefan Weil | } |
80 | 7657f4bf | Stefan Weil | #endif
|
81 | 7657f4bf | Stefan Weil | |
82 | 7657f4bf | Stefan Weil | #if TCG_TARGET_REG_BITS == 64 |
83 | 771142c2 | Richard Henderson | static int32_t tci_read_reg32s(TCGReg index)
|
84 | 7657f4bf | Stefan Weil | { |
85 | 7657f4bf | Stefan Weil | return (int32_t)tci_read_reg(index);
|
86 | 7657f4bf | Stefan Weil | } |
87 | 7657f4bf | Stefan Weil | #endif
|
88 | 7657f4bf | Stefan Weil | |
89 | 771142c2 | Richard Henderson | static uint8_t tci_read_reg8(TCGReg index)
|
90 | 7657f4bf | Stefan Weil | { |
91 | 7657f4bf | Stefan Weil | return (uint8_t)tci_read_reg(index);
|
92 | 7657f4bf | Stefan Weil | } |
93 | 7657f4bf | Stefan Weil | |
94 | 771142c2 | Richard Henderson | static uint16_t tci_read_reg16(TCGReg index)
|
95 | 7657f4bf | Stefan Weil | { |
96 | 7657f4bf | Stefan Weil | return (uint16_t)tci_read_reg(index);
|
97 | 7657f4bf | Stefan Weil | } |
98 | 7657f4bf | Stefan Weil | |
99 | 771142c2 | Richard Henderson | static uint32_t tci_read_reg32(TCGReg index)
|
100 | 7657f4bf | Stefan Weil | { |
101 | 7657f4bf | Stefan Weil | return (uint32_t)tci_read_reg(index);
|
102 | 7657f4bf | Stefan Weil | } |
103 | 7657f4bf | Stefan Weil | |
104 | 7657f4bf | Stefan Weil | #if TCG_TARGET_REG_BITS == 64 |
105 | 771142c2 | Richard Henderson | static uint64_t tci_read_reg64(TCGReg index)
|
106 | 7657f4bf | Stefan Weil | { |
107 | 7657f4bf | Stefan Weil | return tci_read_reg(index);
|
108 | 7657f4bf | Stefan Weil | } |
109 | 7657f4bf | Stefan Weil | #endif
|
110 | 7657f4bf | Stefan Weil | |
111 | 771142c2 | Richard Henderson | static void tci_write_reg(TCGReg index, tcg_target_ulong value) |
112 | 7657f4bf | Stefan Weil | { |
113 | 7657f4bf | Stefan Weil | assert(index < ARRAY_SIZE(tci_reg)); |
114 | 7657f4bf | Stefan Weil | assert(index != TCG_AREG0); |
115 | ee79c356 | Richard Henderson | assert(index != TCG_REG_CALL_STACK); |
116 | 7657f4bf | Stefan Weil | tci_reg[index] = value; |
117 | 7657f4bf | Stefan Weil | } |
118 | 7657f4bf | Stefan Weil | |
119 | 771142c2 | Richard Henderson | static void tci_write_reg8s(TCGReg index, int8_t value) |
120 | 7657f4bf | Stefan Weil | { |
121 | 7657f4bf | Stefan Weil | tci_write_reg(index, value); |
122 | 7657f4bf | Stefan Weil | } |
123 | 7657f4bf | Stefan Weil | |
124 | 771142c2 | Richard Henderson | static void tci_write_reg16s(TCGReg index, int16_t value) |
125 | 7657f4bf | Stefan Weil | { |
126 | 7657f4bf | Stefan Weil | tci_write_reg(index, value); |
127 | 7657f4bf | Stefan Weil | } |
128 | 7657f4bf | Stefan Weil | |
129 | 7657f4bf | Stefan Weil | #if TCG_TARGET_REG_BITS == 64 |
130 | 771142c2 | Richard Henderson | static void tci_write_reg32s(TCGReg index, int32_t value) |
131 | 7657f4bf | Stefan Weil | { |
132 | 7657f4bf | Stefan Weil | tci_write_reg(index, value); |
133 | 7657f4bf | Stefan Weil | } |
134 | 7657f4bf | Stefan Weil | #endif
|
135 | 7657f4bf | Stefan Weil | |
136 | 771142c2 | Richard Henderson | static void tci_write_reg8(TCGReg index, uint8_t value) |
137 | 7657f4bf | Stefan Weil | { |
138 | 7657f4bf | Stefan Weil | tci_write_reg(index, value); |
139 | 7657f4bf | Stefan Weil | } |
140 | 7657f4bf | Stefan Weil | |
141 | 771142c2 | Richard Henderson | static void tci_write_reg16(TCGReg index, uint16_t value) |
142 | 7657f4bf | Stefan Weil | { |
143 | 7657f4bf | Stefan Weil | tci_write_reg(index, value); |
144 | 7657f4bf | Stefan Weil | } |
145 | 7657f4bf | Stefan Weil | |
146 | 771142c2 | Richard Henderson | static void tci_write_reg32(TCGReg index, uint32_t value) |
147 | 7657f4bf | Stefan Weil | { |
148 | 7657f4bf | Stefan Weil | tci_write_reg(index, value); |
149 | 7657f4bf | Stefan Weil | } |
150 | 7657f4bf | Stefan Weil | |
151 | 7657f4bf | Stefan Weil | #if TCG_TARGET_REG_BITS == 32 |
152 | 7657f4bf | Stefan Weil | static void tci_write_reg64(uint32_t high_index, uint32_t low_index, |
153 | 7657f4bf | Stefan Weil | uint64_t value) |
154 | 7657f4bf | Stefan Weil | { |
155 | 7657f4bf | Stefan Weil | tci_write_reg(low_index, value); |
156 | 7657f4bf | Stefan Weil | tci_write_reg(high_index, value >> 32);
|
157 | 7657f4bf | Stefan Weil | } |
158 | 7657f4bf | Stefan Weil | #elif TCG_TARGET_REG_BITS == 64 |
159 | 771142c2 | Richard Henderson | static void tci_write_reg64(TCGReg index, uint64_t value) |
160 | 7657f4bf | Stefan Weil | { |
161 | 7657f4bf | Stefan Weil | tci_write_reg(index, value); |
162 | 7657f4bf | Stefan Weil | } |
163 | 7657f4bf | Stefan Weil | #endif
|
164 | 7657f4bf | Stefan Weil | |
165 | 7657f4bf | Stefan Weil | #if TCG_TARGET_REG_BITS == 32 |
166 | 7657f4bf | Stefan Weil | /* Create a 64 bit value from two 32 bit values. */
|
167 | 7657f4bf | Stefan Weil | static uint64_t tci_uint64(uint32_t high, uint32_t low)
|
168 | 7657f4bf | Stefan Weil | { |
169 | 7657f4bf | Stefan Weil | return ((uint64_t)high << 32) + low; |
170 | 7657f4bf | Stefan Weil | } |
171 | 7657f4bf | Stefan Weil | #endif
|
172 | 7657f4bf | Stefan Weil | |
173 | 7657f4bf | Stefan Weil | /* Read constant (native size) from bytecode. */
|
174 | 7657f4bf | Stefan Weil | static tcg_target_ulong tci_read_i(uint8_t **tb_ptr)
|
175 | 7657f4bf | Stefan Weil | { |
176 | 7657f4bf | Stefan Weil | tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr); |
177 | 7657f4bf | Stefan Weil | *tb_ptr += sizeof(value);
|
178 | 7657f4bf | Stefan Weil | return value;
|
179 | 7657f4bf | Stefan Weil | } |
180 | 7657f4bf | Stefan Weil | |
181 | 03fc0548 | Richard Henderson | /* Read unsigned constant (32 bit) from bytecode. */
|
182 | 7657f4bf | Stefan Weil | static uint32_t tci_read_i32(uint8_t **tb_ptr)
|
183 | 7657f4bf | Stefan Weil | { |
184 | 7657f4bf | Stefan Weil | uint32_t value = *(uint32_t *)(*tb_ptr); |
185 | 7657f4bf | Stefan Weil | *tb_ptr += sizeof(value);
|
186 | 7657f4bf | Stefan Weil | return value;
|
187 | 7657f4bf | Stefan Weil | } |
188 | 7657f4bf | Stefan Weil | |
189 | 03fc0548 | Richard Henderson | /* Read signed constant (32 bit) from bytecode. */
|
190 | 03fc0548 | Richard Henderson | static int32_t tci_read_s32(uint8_t **tb_ptr)
|
191 | 03fc0548 | Richard Henderson | { |
192 | 03fc0548 | Richard Henderson | int32_t value = *(int32_t *)(*tb_ptr); |
193 | 03fc0548 | Richard Henderson | *tb_ptr += sizeof(value);
|
194 | 03fc0548 | Richard Henderson | return value;
|
195 | 03fc0548 | Richard Henderson | } |
196 | 03fc0548 | Richard Henderson | |
197 | 7657f4bf | Stefan Weil | #if TCG_TARGET_REG_BITS == 64 |
198 | 7657f4bf | Stefan Weil | /* Read constant (64 bit) from bytecode. */
|
199 | 7657f4bf | Stefan Weil | static uint64_t tci_read_i64(uint8_t **tb_ptr)
|
200 | 7657f4bf | Stefan Weil | { |
201 | 7657f4bf | Stefan Weil | uint64_t value = *(uint64_t *)(*tb_ptr); |
202 | 7657f4bf | Stefan Weil | *tb_ptr += sizeof(value);
|
203 | 7657f4bf | Stefan Weil | return value;
|
204 | 7657f4bf | Stefan Weil | } |
205 | 7657f4bf | Stefan Weil | #endif
|
206 | 7657f4bf | Stefan Weil | |
207 | 7657f4bf | Stefan Weil | /* Read indexed register (native size) from bytecode. */
|
208 | 7657f4bf | Stefan Weil | static tcg_target_ulong tci_read_r(uint8_t **tb_ptr)
|
209 | 7657f4bf | Stefan Weil | { |
210 | 7657f4bf | Stefan Weil | tcg_target_ulong value = tci_read_reg(**tb_ptr); |
211 | 7657f4bf | Stefan Weil | *tb_ptr += 1;
|
212 | 7657f4bf | Stefan Weil | return value;
|
213 | 7657f4bf | Stefan Weil | } |
214 | 7657f4bf | Stefan Weil | |
215 | 7657f4bf | Stefan Weil | /* Read indexed register (8 bit) from bytecode. */
|
216 | 7657f4bf | Stefan Weil | static uint8_t tci_read_r8(uint8_t **tb_ptr)
|
217 | 7657f4bf | Stefan Weil | { |
218 | 7657f4bf | Stefan Weil | uint8_t value = tci_read_reg8(**tb_ptr); |
219 | 7657f4bf | Stefan Weil | *tb_ptr += 1;
|
220 | 7657f4bf | Stefan Weil | return value;
|
221 | 7657f4bf | Stefan Weil | } |
222 | 7657f4bf | Stefan Weil | |
223 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
|
224 | 7657f4bf | Stefan Weil | /* Read indexed register (8 bit signed) from bytecode. */
|
225 | 7657f4bf | Stefan Weil | static int8_t tci_read_r8s(uint8_t **tb_ptr)
|
226 | 7657f4bf | Stefan Weil | { |
227 | 7657f4bf | Stefan Weil | int8_t value = tci_read_reg8s(**tb_ptr); |
228 | 7657f4bf | Stefan Weil | *tb_ptr += 1;
|
229 | 7657f4bf | Stefan Weil | return value;
|
230 | 7657f4bf | Stefan Weil | } |
231 | 7657f4bf | Stefan Weil | #endif
|
232 | 7657f4bf | Stefan Weil | |
233 | 7657f4bf | Stefan Weil | /* Read indexed register (16 bit) from bytecode. */
|
234 | 7657f4bf | Stefan Weil | static uint16_t tci_read_r16(uint8_t **tb_ptr)
|
235 | 7657f4bf | Stefan Weil | { |
236 | 7657f4bf | Stefan Weil | uint16_t value = tci_read_reg16(**tb_ptr); |
237 | 7657f4bf | Stefan Weil | *tb_ptr += 1;
|
238 | 7657f4bf | Stefan Weil | return value;
|
239 | 7657f4bf | Stefan Weil | } |
240 | 7657f4bf | Stefan Weil | |
241 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
|
242 | 7657f4bf | Stefan Weil | /* Read indexed register (16 bit signed) from bytecode. */
|
243 | 7657f4bf | Stefan Weil | static int16_t tci_read_r16s(uint8_t **tb_ptr)
|
244 | 7657f4bf | Stefan Weil | { |
245 | 7657f4bf | Stefan Weil | int16_t value = tci_read_reg16s(**tb_ptr); |
246 | 7657f4bf | Stefan Weil | *tb_ptr += 1;
|
247 | 7657f4bf | Stefan Weil | return value;
|
248 | 7657f4bf | Stefan Weil | } |
249 | 7657f4bf | Stefan Weil | #endif
|
250 | 7657f4bf | Stefan Weil | |
251 | 7657f4bf | Stefan Weil | /* Read indexed register (32 bit) from bytecode. */
|
252 | 7657f4bf | Stefan Weil | static uint32_t tci_read_r32(uint8_t **tb_ptr)
|
253 | 7657f4bf | Stefan Weil | { |
254 | 7657f4bf | Stefan Weil | uint32_t value = tci_read_reg32(**tb_ptr); |
255 | 7657f4bf | Stefan Weil | *tb_ptr += 1;
|
256 | 7657f4bf | Stefan Weil | return value;
|
257 | 7657f4bf | Stefan Weil | } |
258 | 7657f4bf | Stefan Weil | |
259 | 7657f4bf | Stefan Weil | #if TCG_TARGET_REG_BITS == 32 |
260 | 7657f4bf | Stefan Weil | /* Read two indexed registers (2 * 32 bit) from bytecode. */
|
261 | 7657f4bf | Stefan Weil | static uint64_t tci_read_r64(uint8_t **tb_ptr)
|
262 | 7657f4bf | Stefan Weil | { |
263 | 7657f4bf | Stefan Weil | uint32_t low = tci_read_r32(tb_ptr); |
264 | 7657f4bf | Stefan Weil | return tci_uint64(tci_read_r32(tb_ptr), low);
|
265 | 7657f4bf | Stefan Weil | } |
266 | 7657f4bf | Stefan Weil | #elif TCG_TARGET_REG_BITS == 64 |
267 | 7657f4bf | Stefan Weil | /* Read indexed register (32 bit signed) from bytecode. */
|
268 | 7657f4bf | Stefan Weil | static int32_t tci_read_r32s(uint8_t **tb_ptr)
|
269 | 7657f4bf | Stefan Weil | { |
270 | 7657f4bf | Stefan Weil | int32_t value = tci_read_reg32s(**tb_ptr); |
271 | 7657f4bf | Stefan Weil | *tb_ptr += 1;
|
272 | 7657f4bf | Stefan Weil | return value;
|
273 | 7657f4bf | Stefan Weil | } |
274 | 7657f4bf | Stefan Weil | |
275 | 7657f4bf | Stefan Weil | /* Read indexed register (64 bit) from bytecode. */
|
276 | 7657f4bf | Stefan Weil | static uint64_t tci_read_r64(uint8_t **tb_ptr)
|
277 | 7657f4bf | Stefan Weil | { |
278 | 7657f4bf | Stefan Weil | uint64_t value = tci_read_reg64(**tb_ptr); |
279 | 7657f4bf | Stefan Weil | *tb_ptr += 1;
|
280 | 7657f4bf | Stefan Weil | return value;
|
281 | 7657f4bf | Stefan Weil | } |
282 | 7657f4bf | Stefan Weil | #endif
|
283 | 7657f4bf | Stefan Weil | |
284 | 7657f4bf | Stefan Weil | /* Read indexed register(s) with target address from bytecode. */
|
285 | 7657f4bf | Stefan Weil | static target_ulong tci_read_ulong(uint8_t **tb_ptr)
|
286 | 7657f4bf | Stefan Weil | { |
287 | 7657f4bf | Stefan Weil | target_ulong taddr = tci_read_r(tb_ptr); |
288 | 7657f4bf | Stefan Weil | #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
|
289 | 7657f4bf | Stefan Weil | taddr += (uint64_t)tci_read_r(tb_ptr) << 32;
|
290 | 7657f4bf | Stefan Weil | #endif
|
291 | 7657f4bf | Stefan Weil | return taddr;
|
292 | 7657f4bf | Stefan Weil | } |
293 | 7657f4bf | Stefan Weil | |
294 | 7657f4bf | Stefan Weil | /* Read indexed register or constant (native size) from bytecode. */
|
295 | 7657f4bf | Stefan Weil | static tcg_target_ulong tci_read_ri(uint8_t **tb_ptr)
|
296 | 7657f4bf | Stefan Weil | { |
297 | 7657f4bf | Stefan Weil | tcg_target_ulong value; |
298 | 771142c2 | Richard Henderson | TCGReg r = **tb_ptr; |
299 | 7657f4bf | Stefan Weil | *tb_ptr += 1;
|
300 | 7657f4bf | Stefan Weil | if (r == TCG_CONST) {
|
301 | 7657f4bf | Stefan Weil | value = tci_read_i(tb_ptr); |
302 | 7657f4bf | Stefan Weil | } else {
|
303 | 7657f4bf | Stefan Weil | value = tci_read_reg(r); |
304 | 7657f4bf | Stefan Weil | } |
305 | 7657f4bf | Stefan Weil | return value;
|
306 | 7657f4bf | Stefan Weil | } |
307 | 7657f4bf | Stefan Weil | |
308 | 7657f4bf | Stefan Weil | /* Read indexed register or constant (32 bit) from bytecode. */
|
309 | 7657f4bf | Stefan Weil | static uint32_t tci_read_ri32(uint8_t **tb_ptr)
|
310 | 7657f4bf | Stefan Weil | { |
311 | 7657f4bf | Stefan Weil | uint32_t value; |
312 | 771142c2 | Richard Henderson | TCGReg r = **tb_ptr; |
313 | 7657f4bf | Stefan Weil | *tb_ptr += 1;
|
314 | 7657f4bf | Stefan Weil | if (r == TCG_CONST) {
|
315 | 7657f4bf | Stefan Weil | value = tci_read_i32(tb_ptr); |
316 | 7657f4bf | Stefan Weil | } else {
|
317 | 7657f4bf | Stefan Weil | value = tci_read_reg32(r); |
318 | 7657f4bf | Stefan Weil | } |
319 | 7657f4bf | Stefan Weil | return value;
|
320 | 7657f4bf | Stefan Weil | } |
321 | 7657f4bf | Stefan Weil | |
322 | 7657f4bf | Stefan Weil | #if TCG_TARGET_REG_BITS == 32 |
323 | 7657f4bf | Stefan Weil | /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
|
324 | 7657f4bf | Stefan Weil | static uint64_t tci_read_ri64(uint8_t **tb_ptr)
|
325 | 7657f4bf | Stefan Weil | { |
326 | 7657f4bf | Stefan Weil | uint32_t low = tci_read_ri32(tb_ptr); |
327 | 7657f4bf | Stefan Weil | return tci_uint64(tci_read_ri32(tb_ptr), low);
|
328 | 7657f4bf | Stefan Weil | } |
329 | 7657f4bf | Stefan Weil | #elif TCG_TARGET_REG_BITS == 64 |
330 | 7657f4bf | Stefan Weil | /* Read indexed register or constant (64 bit) from bytecode. */
|
331 | 7657f4bf | Stefan Weil | static uint64_t tci_read_ri64(uint8_t **tb_ptr)
|
332 | 7657f4bf | Stefan Weil | { |
333 | 7657f4bf | Stefan Weil | uint64_t value; |
334 | 771142c2 | Richard Henderson | TCGReg r = **tb_ptr; |
335 | 7657f4bf | Stefan Weil | *tb_ptr += 1;
|
336 | 7657f4bf | Stefan Weil | if (r == TCG_CONST) {
|
337 | 7657f4bf | Stefan Weil | value = tci_read_i64(tb_ptr); |
338 | 7657f4bf | Stefan Weil | } else {
|
339 | 7657f4bf | Stefan Weil | value = tci_read_reg64(r); |
340 | 7657f4bf | Stefan Weil | } |
341 | 7657f4bf | Stefan Weil | return value;
|
342 | 7657f4bf | Stefan Weil | } |
343 | 7657f4bf | Stefan Weil | #endif
|
344 | 7657f4bf | Stefan Weil | |
345 | c6c5063c | Richard Henderson | static tcg_target_ulong tci_read_label(uint8_t **tb_ptr)
|
346 | 7657f4bf | Stefan Weil | { |
347 | c6c5063c | Richard Henderson | tcg_target_ulong label = tci_read_i(tb_ptr); |
348 | 7657f4bf | Stefan Weil | assert(label != 0);
|
349 | 7657f4bf | Stefan Weil | return label;
|
350 | 7657f4bf | Stefan Weil | } |
351 | 7657f4bf | Stefan Weil | |
352 | 7657f4bf | Stefan Weil | static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition) |
353 | 7657f4bf | Stefan Weil | { |
354 | 7657f4bf | Stefan Weil | bool result = false; |
355 | 7657f4bf | Stefan Weil | int32_t i0 = u0; |
356 | 7657f4bf | Stefan Weil | int32_t i1 = u1; |
357 | 7657f4bf | Stefan Weil | switch (condition) {
|
358 | 7657f4bf | Stefan Weil | case TCG_COND_EQ:
|
359 | 7657f4bf | Stefan Weil | result = (u0 == u1); |
360 | 7657f4bf | Stefan Weil | break;
|
361 | 7657f4bf | Stefan Weil | case TCG_COND_NE:
|
362 | 7657f4bf | Stefan Weil | result = (u0 != u1); |
363 | 7657f4bf | Stefan Weil | break;
|
364 | 7657f4bf | Stefan Weil | case TCG_COND_LT:
|
365 | 7657f4bf | Stefan Weil | result = (i0 < i1); |
366 | 7657f4bf | Stefan Weil | break;
|
367 | 7657f4bf | Stefan Weil | case TCG_COND_GE:
|
368 | 7657f4bf | Stefan Weil | result = (i0 >= i1); |
369 | 7657f4bf | Stefan Weil | break;
|
370 | 7657f4bf | Stefan Weil | case TCG_COND_LE:
|
371 | 7657f4bf | Stefan Weil | result = (i0 <= i1); |
372 | 7657f4bf | Stefan Weil | break;
|
373 | 7657f4bf | Stefan Weil | case TCG_COND_GT:
|
374 | 7657f4bf | Stefan Weil | result = (i0 > i1); |
375 | 7657f4bf | Stefan Weil | break;
|
376 | 7657f4bf | Stefan Weil | case TCG_COND_LTU:
|
377 | 7657f4bf | Stefan Weil | result = (u0 < u1); |
378 | 7657f4bf | Stefan Weil | break;
|
379 | 7657f4bf | Stefan Weil | case TCG_COND_GEU:
|
380 | 7657f4bf | Stefan Weil | result = (u0 >= u1); |
381 | 7657f4bf | Stefan Weil | break;
|
382 | 7657f4bf | Stefan Weil | case TCG_COND_LEU:
|
383 | 7657f4bf | Stefan Weil | result = (u0 <= u1); |
384 | 7657f4bf | Stefan Weil | break;
|
385 | 7657f4bf | Stefan Weil | case TCG_COND_GTU:
|
386 | 7657f4bf | Stefan Weil | result = (u0 > u1); |
387 | 7657f4bf | Stefan Weil | break;
|
388 | 7657f4bf | Stefan Weil | default:
|
389 | 7657f4bf | Stefan Weil | TODO(); |
390 | 7657f4bf | Stefan Weil | } |
391 | 7657f4bf | Stefan Weil | return result;
|
392 | 7657f4bf | Stefan Weil | } |
393 | 7657f4bf | Stefan Weil | |
394 | 7657f4bf | Stefan Weil | static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition) |
395 | 7657f4bf | Stefan Weil | { |
396 | 7657f4bf | Stefan Weil | bool result = false; |
397 | 7657f4bf | Stefan Weil | int64_t i0 = u0; |
398 | 7657f4bf | Stefan Weil | int64_t i1 = u1; |
399 | 7657f4bf | Stefan Weil | switch (condition) {
|
400 | 7657f4bf | Stefan Weil | case TCG_COND_EQ:
|
401 | 7657f4bf | Stefan Weil | result = (u0 == u1); |
402 | 7657f4bf | Stefan Weil | break;
|
403 | 7657f4bf | Stefan Weil | case TCG_COND_NE:
|
404 | 7657f4bf | Stefan Weil | result = (u0 != u1); |
405 | 7657f4bf | Stefan Weil | break;
|
406 | 7657f4bf | Stefan Weil | case TCG_COND_LT:
|
407 | 7657f4bf | Stefan Weil | result = (i0 < i1); |
408 | 7657f4bf | Stefan Weil | break;
|
409 | 7657f4bf | Stefan Weil | case TCG_COND_GE:
|
410 | 7657f4bf | Stefan Weil | result = (i0 >= i1); |
411 | 7657f4bf | Stefan Weil | break;
|
412 | 7657f4bf | Stefan Weil | case TCG_COND_LE:
|
413 | 7657f4bf | Stefan Weil | result = (i0 <= i1); |
414 | 7657f4bf | Stefan Weil | break;
|
415 | 7657f4bf | Stefan Weil | case TCG_COND_GT:
|
416 | 7657f4bf | Stefan Weil | result = (i0 > i1); |
417 | 7657f4bf | Stefan Weil | break;
|
418 | 7657f4bf | Stefan Weil | case TCG_COND_LTU:
|
419 | 7657f4bf | Stefan Weil | result = (u0 < u1); |
420 | 7657f4bf | Stefan Weil | break;
|
421 | 7657f4bf | Stefan Weil | case TCG_COND_GEU:
|
422 | 7657f4bf | Stefan Weil | result = (u0 >= u1); |
423 | 7657f4bf | Stefan Weil | break;
|
424 | 7657f4bf | Stefan Weil | case TCG_COND_LEU:
|
425 | 7657f4bf | Stefan Weil | result = (u0 <= u1); |
426 | 7657f4bf | Stefan Weil | break;
|
427 | 7657f4bf | Stefan Weil | case TCG_COND_GTU:
|
428 | 7657f4bf | Stefan Weil | result = (u0 > u1); |
429 | 7657f4bf | Stefan Weil | break;
|
430 | 7657f4bf | Stefan Weil | default:
|
431 | 7657f4bf | Stefan Weil | TODO(); |
432 | 7657f4bf | Stefan Weil | } |
433 | 7657f4bf | Stefan Weil | return result;
|
434 | 7657f4bf | Stefan Weil | } |
435 | 7657f4bf | Stefan Weil | |
436 | 7657f4bf | Stefan Weil | /* Interpret pseudo code in tb. */
|
437 | 04d5a1da | Richard Henderson | uintptr_t tcg_qemu_tb_exec(CPUArchState *env, uint8_t *tb_ptr) |
438 | 7657f4bf | Stefan Weil | { |
439 | ee79c356 | Richard Henderson | long tcg_temps[CPU_TEMP_BUF_NLONGS];
|
440 | ee79c356 | Richard Henderson | uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS); |
441 | 04d5a1da | Richard Henderson | uintptr_t next_tb = 0;
|
442 | 7657f4bf | Stefan Weil | |
443 | 7657f4bf | Stefan Weil | tci_reg[TCG_AREG0] = (tcg_target_ulong)env; |
444 | ee79c356 | Richard Henderson | tci_reg[TCG_REG_CALL_STACK] = sp_value; |
445 | 7657f4bf | Stefan Weil | assert(tb_ptr); |
446 | 7657f4bf | Stefan Weil | |
447 | 7657f4bf | Stefan Weil | for (;;) {
|
448 | 7657f4bf | Stefan Weil | TCGOpcode opc = tb_ptr[0];
|
449 | 7657f4bf | Stefan Weil | #if !defined(NDEBUG)
|
450 | 7657f4bf | Stefan Weil | uint8_t op_size = tb_ptr[1];
|
451 | 7657f4bf | Stefan Weil | uint8_t *old_code_ptr = tb_ptr; |
452 | 7657f4bf | Stefan Weil | #endif
|
453 | 7657f4bf | Stefan Weil | tcg_target_ulong t0; |
454 | 7657f4bf | Stefan Weil | tcg_target_ulong t1; |
455 | 7657f4bf | Stefan Weil | tcg_target_ulong t2; |
456 | 7657f4bf | Stefan Weil | tcg_target_ulong label; |
457 | 7657f4bf | Stefan Weil | TCGCond condition; |
458 | 7657f4bf | Stefan Weil | target_ulong taddr; |
459 | 7657f4bf | Stefan Weil | #ifndef CONFIG_SOFTMMU
|
460 | 7657f4bf | Stefan Weil | tcg_target_ulong host_addr; |
461 | 7657f4bf | Stefan Weil | #endif
|
462 | 7657f4bf | Stefan Weil | uint8_t tmp8; |
463 | 7657f4bf | Stefan Weil | uint16_t tmp16; |
464 | 7657f4bf | Stefan Weil | uint32_t tmp32; |
465 | 7657f4bf | Stefan Weil | uint64_t tmp64; |
466 | 7657f4bf | Stefan Weil | #if TCG_TARGET_REG_BITS == 32 |
467 | 7657f4bf | Stefan Weil | uint64_t v64; |
468 | 7657f4bf | Stefan Weil | #endif
|
469 | 7657f4bf | Stefan Weil | |
470 | dea8fde8 | Richard Henderson | #if defined(GETPC)
|
471 | dea8fde8 | Richard Henderson | tci_tb_ptr = (uintptr_t)tb_ptr; |
472 | dea8fde8 | Richard Henderson | #endif
|
473 | dea8fde8 | Richard Henderson | |
474 | 7657f4bf | Stefan Weil | /* Skip opcode and size entry. */
|
475 | 7657f4bf | Stefan Weil | tb_ptr += 2;
|
476 | 7657f4bf | Stefan Weil | |
477 | 7657f4bf | Stefan Weil | switch (opc) {
|
478 | 7657f4bf | Stefan Weil | case INDEX_op_end:
|
479 | 7657f4bf | Stefan Weil | case INDEX_op_nop:
|
480 | 7657f4bf | Stefan Weil | break;
|
481 | 7657f4bf | Stefan Weil | case INDEX_op_nop1:
|
482 | 7657f4bf | Stefan Weil | case INDEX_op_nop2:
|
483 | 7657f4bf | Stefan Weil | case INDEX_op_nop3:
|
484 | 7657f4bf | Stefan Weil | case INDEX_op_nopn:
|
485 | 7657f4bf | Stefan Weil | case INDEX_op_discard:
|
486 | 7657f4bf | Stefan Weil | TODO(); |
487 | 7657f4bf | Stefan Weil | break;
|
488 | 7657f4bf | Stefan Weil | case INDEX_op_set_label:
|
489 | 7657f4bf | Stefan Weil | TODO(); |
490 | 7657f4bf | Stefan Weil | break;
|
491 | 7657f4bf | Stefan Weil | case INDEX_op_call:
|
492 | 7657f4bf | Stefan Weil | t0 = tci_read_ri(&tb_ptr); |
493 | 7657f4bf | Stefan Weil | #if TCG_TARGET_REG_BITS == 32 |
494 | 7657f4bf | Stefan Weil | tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0), |
495 | 7657f4bf | Stefan Weil | tci_read_reg(TCG_REG_R1), |
496 | 7657f4bf | Stefan Weil | tci_read_reg(TCG_REG_R2), |
497 | 7657f4bf | Stefan Weil | tci_read_reg(TCG_REG_R3), |
498 | 7657f4bf | Stefan Weil | tci_read_reg(TCG_REG_R5), |
499 | 7657f4bf | Stefan Weil | tci_read_reg(TCG_REG_R6), |
500 | 7657f4bf | Stefan Weil | tci_read_reg(TCG_REG_R7), |
501 | 6673f47d | Stefan Weil | tci_read_reg(TCG_REG_R8), |
502 | 6673f47d | Stefan Weil | tci_read_reg(TCG_REG_R9), |
503 | 6673f47d | Stefan Weil | tci_read_reg(TCG_REG_R10)); |
504 | 7657f4bf | Stefan Weil | tci_write_reg(TCG_REG_R0, tmp64); |
505 | 7657f4bf | Stefan Weil | tci_write_reg(TCG_REG_R1, tmp64 >> 32);
|
506 | 7657f4bf | Stefan Weil | #else
|
507 | 7657f4bf | Stefan Weil | tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0), |
508 | 7657f4bf | Stefan Weil | tci_read_reg(TCG_REG_R1), |
509 | 7657f4bf | Stefan Weil | tci_read_reg(TCG_REG_R2), |
510 | 6673f47d | Stefan Weil | tci_read_reg(TCG_REG_R3), |
511 | 6673f47d | Stefan Weil | tci_read_reg(TCG_REG_R5)); |
512 | 7657f4bf | Stefan Weil | tci_write_reg(TCG_REG_R0, tmp64); |
513 | 7657f4bf | Stefan Weil | #endif
|
514 | 7657f4bf | Stefan Weil | break;
|
515 | 7657f4bf | Stefan Weil | case INDEX_op_br:
|
516 | 7657f4bf | Stefan Weil | label = tci_read_label(&tb_ptr); |
517 | 7657f4bf | Stefan Weil | assert(tb_ptr == old_code_ptr + op_size); |
518 | 7657f4bf | Stefan Weil | tb_ptr = (uint8_t *)label; |
519 | 7657f4bf | Stefan Weil | continue;
|
520 | 7657f4bf | Stefan Weil | case INDEX_op_setcond_i32:
|
521 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
522 | 7657f4bf | Stefan Weil | t1 = tci_read_r32(&tb_ptr); |
523 | 7657f4bf | Stefan Weil | t2 = tci_read_ri32(&tb_ptr); |
524 | 7657f4bf | Stefan Weil | condition = *tb_ptr++; |
525 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, tci_compare32(t1, t2, condition)); |
526 | 7657f4bf | Stefan Weil | break;
|
527 | 7657f4bf | Stefan Weil | #if TCG_TARGET_REG_BITS == 32 |
528 | 7657f4bf | Stefan Weil | case INDEX_op_setcond2_i32:
|
529 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
530 | 7657f4bf | Stefan Weil | tmp64 = tci_read_r64(&tb_ptr); |
531 | 7657f4bf | Stefan Weil | v64 = tci_read_ri64(&tb_ptr); |
532 | 7657f4bf | Stefan Weil | condition = *tb_ptr++; |
533 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, tci_compare64(tmp64, v64, condition)); |
534 | 7657f4bf | Stefan Weil | break;
|
535 | 7657f4bf | Stefan Weil | #elif TCG_TARGET_REG_BITS == 64 |
536 | 7657f4bf | Stefan Weil | case INDEX_op_setcond_i64:
|
537 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
538 | 7657f4bf | Stefan Weil | t1 = tci_read_r64(&tb_ptr); |
539 | 7657f4bf | Stefan Weil | t2 = tci_read_ri64(&tb_ptr); |
540 | 7657f4bf | Stefan Weil | condition = *tb_ptr++; |
541 | 7657f4bf | Stefan Weil | tci_write_reg64(t0, tci_compare64(t1, t2, condition)); |
542 | 7657f4bf | Stefan Weil | break;
|
543 | 7657f4bf | Stefan Weil | #endif
|
544 | 7657f4bf | Stefan Weil | case INDEX_op_mov_i32:
|
545 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
546 | 7657f4bf | Stefan Weil | t1 = tci_read_r32(&tb_ptr); |
547 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, t1); |
548 | 7657f4bf | Stefan Weil | break;
|
549 | 7657f4bf | Stefan Weil | case INDEX_op_movi_i32:
|
550 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
551 | 7657f4bf | Stefan Weil | t1 = tci_read_i32(&tb_ptr); |
552 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, t1); |
553 | 7657f4bf | Stefan Weil | break;
|
554 | 7657f4bf | Stefan Weil | |
555 | 7657f4bf | Stefan Weil | /* Load/store operations (32 bit). */
|
556 | 7657f4bf | Stefan Weil | |
557 | 7657f4bf | Stefan Weil | case INDEX_op_ld8u_i32:
|
558 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
559 | 7657f4bf | Stefan Weil | t1 = tci_read_r(&tb_ptr); |
560 | 03fc0548 | Richard Henderson | t2 = tci_read_s32(&tb_ptr); |
561 | 7657f4bf | Stefan Weil | tci_write_reg8(t0, *(uint8_t *)(t1 + t2)); |
562 | 7657f4bf | Stefan Weil | break;
|
563 | 7657f4bf | Stefan Weil | case INDEX_op_ld8s_i32:
|
564 | 7657f4bf | Stefan Weil | case INDEX_op_ld16u_i32:
|
565 | 7657f4bf | Stefan Weil | TODO(); |
566 | 7657f4bf | Stefan Weil | break;
|
567 | 7657f4bf | Stefan Weil | case INDEX_op_ld16s_i32:
|
568 | 7657f4bf | Stefan Weil | TODO(); |
569 | 7657f4bf | Stefan Weil | break;
|
570 | 7657f4bf | Stefan Weil | case INDEX_op_ld_i32:
|
571 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
572 | 7657f4bf | Stefan Weil | t1 = tci_read_r(&tb_ptr); |
573 | 03fc0548 | Richard Henderson | t2 = tci_read_s32(&tb_ptr); |
574 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, *(uint32_t *)(t1 + t2)); |
575 | 7657f4bf | Stefan Weil | break;
|
576 | 7657f4bf | Stefan Weil | case INDEX_op_st8_i32:
|
577 | 7657f4bf | Stefan Weil | t0 = tci_read_r8(&tb_ptr); |
578 | 7657f4bf | Stefan Weil | t1 = tci_read_r(&tb_ptr); |
579 | 03fc0548 | Richard Henderson | t2 = tci_read_s32(&tb_ptr); |
580 | 7657f4bf | Stefan Weil | *(uint8_t *)(t1 + t2) = t0; |
581 | 7657f4bf | Stefan Weil | break;
|
582 | 7657f4bf | Stefan Weil | case INDEX_op_st16_i32:
|
583 | 7657f4bf | Stefan Weil | t0 = tci_read_r16(&tb_ptr); |
584 | 7657f4bf | Stefan Weil | t1 = tci_read_r(&tb_ptr); |
585 | 03fc0548 | Richard Henderson | t2 = tci_read_s32(&tb_ptr); |
586 | 7657f4bf | Stefan Weil | *(uint16_t *)(t1 + t2) = t0; |
587 | 7657f4bf | Stefan Weil | break;
|
588 | 7657f4bf | Stefan Weil | case INDEX_op_st_i32:
|
589 | 7657f4bf | Stefan Weil | t0 = tci_read_r32(&tb_ptr); |
590 | 7657f4bf | Stefan Weil | t1 = tci_read_r(&tb_ptr); |
591 | 03fc0548 | Richard Henderson | t2 = tci_read_s32(&tb_ptr); |
592 | ee79c356 | Richard Henderson | assert(t1 != sp_value || (int32_t)t2 < 0);
|
593 | 7657f4bf | Stefan Weil | *(uint32_t *)(t1 + t2) = t0; |
594 | 7657f4bf | Stefan Weil | break;
|
595 | 7657f4bf | Stefan Weil | |
596 | 7657f4bf | Stefan Weil | /* Arithmetic operations (32 bit). */
|
597 | 7657f4bf | Stefan Weil | |
598 | 7657f4bf | Stefan Weil | case INDEX_op_add_i32:
|
599 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
600 | 7657f4bf | Stefan Weil | t1 = tci_read_ri32(&tb_ptr); |
601 | 7657f4bf | Stefan Weil | t2 = tci_read_ri32(&tb_ptr); |
602 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, t1 + t2); |
603 | 7657f4bf | Stefan Weil | break;
|
604 | 7657f4bf | Stefan Weil | case INDEX_op_sub_i32:
|
605 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
606 | 7657f4bf | Stefan Weil | t1 = tci_read_ri32(&tb_ptr); |
607 | 7657f4bf | Stefan Weil | t2 = tci_read_ri32(&tb_ptr); |
608 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, t1 - t2); |
609 | 7657f4bf | Stefan Weil | break;
|
610 | 7657f4bf | Stefan Weil | case INDEX_op_mul_i32:
|
611 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
612 | 7657f4bf | Stefan Weil | t1 = tci_read_ri32(&tb_ptr); |
613 | 7657f4bf | Stefan Weil | t2 = tci_read_ri32(&tb_ptr); |
614 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, t1 * t2); |
615 | 7657f4bf | Stefan Weil | break;
|
616 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_div_i32
|
617 | 7657f4bf | Stefan Weil | case INDEX_op_div_i32:
|
618 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
619 | 7657f4bf | Stefan Weil | t1 = tci_read_ri32(&tb_ptr); |
620 | 7657f4bf | Stefan Weil | t2 = tci_read_ri32(&tb_ptr); |
621 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, (int32_t)t1 / (int32_t)t2); |
622 | 7657f4bf | Stefan Weil | break;
|
623 | 7657f4bf | Stefan Weil | case INDEX_op_divu_i32:
|
624 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
625 | 7657f4bf | Stefan Weil | t1 = tci_read_ri32(&tb_ptr); |
626 | 7657f4bf | Stefan Weil | t2 = tci_read_ri32(&tb_ptr); |
627 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, t1 / t2); |
628 | 7657f4bf | Stefan Weil | break;
|
629 | 7657f4bf | Stefan Weil | case INDEX_op_rem_i32:
|
630 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
631 | 7657f4bf | Stefan Weil | t1 = tci_read_ri32(&tb_ptr); |
632 | 7657f4bf | Stefan Weil | t2 = tci_read_ri32(&tb_ptr); |
633 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, (int32_t)t1 % (int32_t)t2); |
634 | 7657f4bf | Stefan Weil | break;
|
635 | 7657f4bf | Stefan Weil | case INDEX_op_remu_i32:
|
636 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
637 | 7657f4bf | Stefan Weil | t1 = tci_read_ri32(&tb_ptr); |
638 | 7657f4bf | Stefan Weil | t2 = tci_read_ri32(&tb_ptr); |
639 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, t1 % t2); |
640 | 7657f4bf | Stefan Weil | break;
|
641 | 7657f4bf | Stefan Weil | #elif TCG_TARGET_HAS_div2_i32
|
642 | 7657f4bf | Stefan Weil | case INDEX_op_div2_i32:
|
643 | 7657f4bf | Stefan Weil | case INDEX_op_divu2_i32:
|
644 | 7657f4bf | Stefan Weil | TODO(); |
645 | 7657f4bf | Stefan Weil | break;
|
646 | 7657f4bf | Stefan Weil | #endif
|
647 | 7657f4bf | Stefan Weil | case INDEX_op_and_i32:
|
648 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
649 | 7657f4bf | Stefan Weil | t1 = tci_read_ri32(&tb_ptr); |
650 | 7657f4bf | Stefan Weil | t2 = tci_read_ri32(&tb_ptr); |
651 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, t1 & t2); |
652 | 7657f4bf | Stefan Weil | break;
|
653 | 7657f4bf | Stefan Weil | case INDEX_op_or_i32:
|
654 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
655 | 7657f4bf | Stefan Weil | t1 = tci_read_ri32(&tb_ptr); |
656 | 7657f4bf | Stefan Weil | t2 = tci_read_ri32(&tb_ptr); |
657 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, t1 | t2); |
658 | 7657f4bf | Stefan Weil | break;
|
659 | 7657f4bf | Stefan Weil | case INDEX_op_xor_i32:
|
660 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
661 | 7657f4bf | Stefan Weil | t1 = tci_read_ri32(&tb_ptr); |
662 | 7657f4bf | Stefan Weil | t2 = tci_read_ri32(&tb_ptr); |
663 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, t1 ^ t2); |
664 | 7657f4bf | Stefan Weil | break;
|
665 | 7657f4bf | Stefan Weil | |
666 | 7657f4bf | Stefan Weil | /* Shift/rotate operations (32 bit). */
|
667 | 7657f4bf | Stefan Weil | |
668 | 7657f4bf | Stefan Weil | case INDEX_op_shl_i32:
|
669 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
670 | 7657f4bf | Stefan Weil | t1 = tci_read_ri32(&tb_ptr); |
671 | 7657f4bf | Stefan Weil | t2 = tci_read_ri32(&tb_ptr); |
672 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, t1 << t2); |
673 | 7657f4bf | Stefan Weil | break;
|
674 | 7657f4bf | Stefan Weil | case INDEX_op_shr_i32:
|
675 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
676 | 7657f4bf | Stefan Weil | t1 = tci_read_ri32(&tb_ptr); |
677 | 7657f4bf | Stefan Weil | t2 = tci_read_ri32(&tb_ptr); |
678 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, t1 >> t2); |
679 | 7657f4bf | Stefan Weil | break;
|
680 | 7657f4bf | Stefan Weil | case INDEX_op_sar_i32:
|
681 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
682 | 7657f4bf | Stefan Weil | t1 = tci_read_ri32(&tb_ptr); |
683 | 7657f4bf | Stefan Weil | t2 = tci_read_ri32(&tb_ptr); |
684 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, ((int32_t)t1 >> t2)); |
685 | 7657f4bf | Stefan Weil | break;
|
686 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_rot_i32
|
687 | 7657f4bf | Stefan Weil | case INDEX_op_rotl_i32:
|
688 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
689 | 7657f4bf | Stefan Weil | t1 = tci_read_ri32(&tb_ptr); |
690 | 7657f4bf | Stefan Weil | t2 = tci_read_ri32(&tb_ptr); |
691 | 3df2b8fd | Stefan Weil | tci_write_reg32(t0, rol32(t1, t2)); |
692 | 7657f4bf | Stefan Weil | break;
|
693 | 7657f4bf | Stefan Weil | case INDEX_op_rotr_i32:
|
694 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
695 | 7657f4bf | Stefan Weil | t1 = tci_read_ri32(&tb_ptr); |
696 | 7657f4bf | Stefan Weil | t2 = tci_read_ri32(&tb_ptr); |
697 | 3df2b8fd | Stefan Weil | tci_write_reg32(t0, ror32(t1, t2)); |
698 | 7657f4bf | Stefan Weil | break;
|
699 | 7657f4bf | Stefan Weil | #endif
|
700 | e24dc9fe | Stefan Weil | #if TCG_TARGET_HAS_deposit_i32
|
701 | e24dc9fe | Stefan Weil | case INDEX_op_deposit_i32:
|
702 | e24dc9fe | Stefan Weil | t0 = *tb_ptr++; |
703 | e24dc9fe | Stefan Weil | t1 = tci_read_r32(&tb_ptr); |
704 | e24dc9fe | Stefan Weil | t2 = tci_read_r32(&tb_ptr); |
705 | e24dc9fe | Stefan Weil | tmp16 = *tb_ptr++; |
706 | e24dc9fe | Stefan Weil | tmp8 = *tb_ptr++; |
707 | e24dc9fe | Stefan Weil | tmp32 = (((1 << tmp8) - 1) << tmp16); |
708 | e24dc9fe | Stefan Weil | tci_write_reg32(t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32)); |
709 | e24dc9fe | Stefan Weil | break;
|
710 | e24dc9fe | Stefan Weil | #endif
|
711 | 7657f4bf | Stefan Weil | case INDEX_op_brcond_i32:
|
712 | 7657f4bf | Stefan Weil | t0 = tci_read_r32(&tb_ptr); |
713 | 7657f4bf | Stefan Weil | t1 = tci_read_ri32(&tb_ptr); |
714 | 7657f4bf | Stefan Weil | condition = *tb_ptr++; |
715 | 7657f4bf | Stefan Weil | label = tci_read_label(&tb_ptr); |
716 | 7657f4bf | Stefan Weil | if (tci_compare32(t0, t1, condition)) {
|
717 | 7657f4bf | Stefan Weil | assert(tb_ptr == old_code_ptr + op_size); |
718 | 7657f4bf | Stefan Weil | tb_ptr = (uint8_t *)label; |
719 | 7657f4bf | Stefan Weil | continue;
|
720 | 7657f4bf | Stefan Weil | } |
721 | 7657f4bf | Stefan Weil | break;
|
722 | 7657f4bf | Stefan Weil | #if TCG_TARGET_REG_BITS == 32 |
723 | 7657f4bf | Stefan Weil | case INDEX_op_add2_i32:
|
724 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
725 | 7657f4bf | Stefan Weil | t1 = *tb_ptr++; |
726 | 7657f4bf | Stefan Weil | tmp64 = tci_read_r64(&tb_ptr); |
727 | 7657f4bf | Stefan Weil | tmp64 += tci_read_r64(&tb_ptr); |
728 | 7657f4bf | Stefan Weil | tci_write_reg64(t1, t0, tmp64); |
729 | 7657f4bf | Stefan Weil | break;
|
730 | 7657f4bf | Stefan Weil | case INDEX_op_sub2_i32:
|
731 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
732 | 7657f4bf | Stefan Weil | t1 = *tb_ptr++; |
733 | 7657f4bf | Stefan Weil | tmp64 = tci_read_r64(&tb_ptr); |
734 | 7657f4bf | Stefan Weil | tmp64 -= tci_read_r64(&tb_ptr); |
735 | 7657f4bf | Stefan Weil | tci_write_reg64(t1, t0, tmp64); |
736 | 7657f4bf | Stefan Weil | break;
|
737 | 7657f4bf | Stefan Weil | case INDEX_op_brcond2_i32:
|
738 | 7657f4bf | Stefan Weil | tmp64 = tci_read_r64(&tb_ptr); |
739 | 7657f4bf | Stefan Weil | v64 = tci_read_ri64(&tb_ptr); |
740 | 7657f4bf | Stefan Weil | condition = *tb_ptr++; |
741 | 7657f4bf | Stefan Weil | label = tci_read_label(&tb_ptr); |
742 | 7657f4bf | Stefan Weil | if (tci_compare64(tmp64, v64, condition)) {
|
743 | 7657f4bf | Stefan Weil | assert(tb_ptr == old_code_ptr + op_size); |
744 | 7657f4bf | Stefan Weil | tb_ptr = (uint8_t *)label; |
745 | 7657f4bf | Stefan Weil | continue;
|
746 | 7657f4bf | Stefan Weil | } |
747 | 7657f4bf | Stefan Weil | break;
|
748 | 7657f4bf | Stefan Weil | case INDEX_op_mulu2_i32:
|
749 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
750 | 7657f4bf | Stefan Weil | t1 = *tb_ptr++; |
751 | 7657f4bf | Stefan Weil | t2 = tci_read_r32(&tb_ptr); |
752 | 7657f4bf | Stefan Weil | tmp64 = tci_read_r32(&tb_ptr); |
753 | 7657f4bf | Stefan Weil | tci_write_reg64(t1, t0, t2 * tmp64); |
754 | 7657f4bf | Stefan Weil | break;
|
755 | 7657f4bf | Stefan Weil | #endif /* TCG_TARGET_REG_BITS == 32 */ |
756 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_ext8s_i32
|
757 | 7657f4bf | Stefan Weil | case INDEX_op_ext8s_i32:
|
758 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
759 | 7657f4bf | Stefan Weil | t1 = tci_read_r8s(&tb_ptr); |
760 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, t1); |
761 | 7657f4bf | Stefan Weil | break;
|
762 | 7657f4bf | Stefan Weil | #endif
|
763 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_ext16s_i32
|
764 | 7657f4bf | Stefan Weil | case INDEX_op_ext16s_i32:
|
765 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
766 | 7657f4bf | Stefan Weil | t1 = tci_read_r16s(&tb_ptr); |
767 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, t1); |
768 | 7657f4bf | Stefan Weil | break;
|
769 | 7657f4bf | Stefan Weil | #endif
|
770 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_ext8u_i32
|
771 | 7657f4bf | Stefan Weil | case INDEX_op_ext8u_i32:
|
772 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
773 | 7657f4bf | Stefan Weil | t1 = tci_read_r8(&tb_ptr); |
774 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, t1); |
775 | 7657f4bf | Stefan Weil | break;
|
776 | 7657f4bf | Stefan Weil | #endif
|
777 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_ext16u_i32
|
778 | 7657f4bf | Stefan Weil | case INDEX_op_ext16u_i32:
|
779 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
780 | 7657f4bf | Stefan Weil | t1 = tci_read_r16(&tb_ptr); |
781 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, t1); |
782 | 7657f4bf | Stefan Weil | break;
|
783 | 7657f4bf | Stefan Weil | #endif
|
784 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_bswap16_i32
|
785 | 7657f4bf | Stefan Weil | case INDEX_op_bswap16_i32:
|
786 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
787 | 7657f4bf | Stefan Weil | t1 = tci_read_r16(&tb_ptr); |
788 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, bswap16(t1)); |
789 | 7657f4bf | Stefan Weil | break;
|
790 | 7657f4bf | Stefan Weil | #endif
|
791 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_bswap32_i32
|
792 | 7657f4bf | Stefan Weil | case INDEX_op_bswap32_i32:
|
793 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
794 | 7657f4bf | Stefan Weil | t1 = tci_read_r32(&tb_ptr); |
795 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, bswap32(t1)); |
796 | 7657f4bf | Stefan Weil | break;
|
797 | 7657f4bf | Stefan Weil | #endif
|
798 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_not_i32
|
799 | 7657f4bf | Stefan Weil | case INDEX_op_not_i32:
|
800 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
801 | 7657f4bf | Stefan Weil | t1 = tci_read_r32(&tb_ptr); |
802 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, ~t1); |
803 | 7657f4bf | Stefan Weil | break;
|
804 | 7657f4bf | Stefan Weil | #endif
|
805 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_neg_i32
|
806 | 7657f4bf | Stefan Weil | case INDEX_op_neg_i32:
|
807 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
808 | 7657f4bf | Stefan Weil | t1 = tci_read_r32(&tb_ptr); |
809 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, -t1); |
810 | 7657f4bf | Stefan Weil | break;
|
811 | 7657f4bf | Stefan Weil | #endif
|
812 | 7657f4bf | Stefan Weil | #if TCG_TARGET_REG_BITS == 64 |
813 | 7657f4bf | Stefan Weil | case INDEX_op_mov_i64:
|
814 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
815 | 7657f4bf | Stefan Weil | t1 = tci_read_r64(&tb_ptr); |
816 | 7657f4bf | Stefan Weil | tci_write_reg64(t0, t1); |
817 | 7657f4bf | Stefan Weil | break;
|
818 | 7657f4bf | Stefan Weil | case INDEX_op_movi_i64:
|
819 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
820 | 7657f4bf | Stefan Weil | t1 = tci_read_i64(&tb_ptr); |
821 | 7657f4bf | Stefan Weil | tci_write_reg64(t0, t1); |
822 | 7657f4bf | Stefan Weil | break;
|
823 | 7657f4bf | Stefan Weil | |
824 | 7657f4bf | Stefan Weil | /* Load/store operations (64 bit). */
|
825 | 7657f4bf | Stefan Weil | |
826 | 7657f4bf | Stefan Weil | case INDEX_op_ld8u_i64:
|
827 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
828 | 7657f4bf | Stefan Weil | t1 = tci_read_r(&tb_ptr); |
829 | 03fc0548 | Richard Henderson | t2 = tci_read_s32(&tb_ptr); |
830 | 7657f4bf | Stefan Weil | tci_write_reg8(t0, *(uint8_t *)(t1 + t2)); |
831 | 7657f4bf | Stefan Weil | break;
|
832 | 7657f4bf | Stefan Weil | case INDEX_op_ld8s_i64:
|
833 | 7657f4bf | Stefan Weil | case INDEX_op_ld16u_i64:
|
834 | 7657f4bf | Stefan Weil | case INDEX_op_ld16s_i64:
|
835 | 7657f4bf | Stefan Weil | TODO(); |
836 | 7657f4bf | Stefan Weil | break;
|
837 | 7657f4bf | Stefan Weil | case INDEX_op_ld32u_i64:
|
838 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
839 | 7657f4bf | Stefan Weil | t1 = tci_read_r(&tb_ptr); |
840 | 03fc0548 | Richard Henderson | t2 = tci_read_s32(&tb_ptr); |
841 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, *(uint32_t *)(t1 + t2)); |
842 | 7657f4bf | Stefan Weil | break;
|
843 | 7657f4bf | Stefan Weil | case INDEX_op_ld32s_i64:
|
844 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
845 | 7657f4bf | Stefan Weil | t1 = tci_read_r(&tb_ptr); |
846 | 03fc0548 | Richard Henderson | t2 = tci_read_s32(&tb_ptr); |
847 | 7657f4bf | Stefan Weil | tci_write_reg32s(t0, *(int32_t *)(t1 + t2)); |
848 | 7657f4bf | Stefan Weil | break;
|
849 | 7657f4bf | Stefan Weil | case INDEX_op_ld_i64:
|
850 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
851 | 7657f4bf | Stefan Weil | t1 = tci_read_r(&tb_ptr); |
852 | 03fc0548 | Richard Henderson | t2 = tci_read_s32(&tb_ptr); |
853 | 7657f4bf | Stefan Weil | tci_write_reg64(t0, *(uint64_t *)(t1 + t2)); |
854 | 7657f4bf | Stefan Weil | break;
|
855 | 7657f4bf | Stefan Weil | case INDEX_op_st8_i64:
|
856 | 7657f4bf | Stefan Weil | t0 = tci_read_r8(&tb_ptr); |
857 | 7657f4bf | Stefan Weil | t1 = tci_read_r(&tb_ptr); |
858 | 03fc0548 | Richard Henderson | t2 = tci_read_s32(&tb_ptr); |
859 | 7657f4bf | Stefan Weil | *(uint8_t *)(t1 + t2) = t0; |
860 | 7657f4bf | Stefan Weil | break;
|
861 | 7657f4bf | Stefan Weil | case INDEX_op_st16_i64:
|
862 | 7657f4bf | Stefan Weil | t0 = tci_read_r16(&tb_ptr); |
863 | 7657f4bf | Stefan Weil | t1 = tci_read_r(&tb_ptr); |
864 | 03fc0548 | Richard Henderson | t2 = tci_read_s32(&tb_ptr); |
865 | 7657f4bf | Stefan Weil | *(uint16_t *)(t1 + t2) = t0; |
866 | 7657f4bf | Stefan Weil | break;
|
867 | 7657f4bf | Stefan Weil | case INDEX_op_st32_i64:
|
868 | 7657f4bf | Stefan Weil | t0 = tci_read_r32(&tb_ptr); |
869 | 7657f4bf | Stefan Weil | t1 = tci_read_r(&tb_ptr); |
870 | 03fc0548 | Richard Henderson | t2 = tci_read_s32(&tb_ptr); |
871 | 7657f4bf | Stefan Weil | *(uint32_t *)(t1 + t2) = t0; |
872 | 7657f4bf | Stefan Weil | break;
|
873 | 7657f4bf | Stefan Weil | case INDEX_op_st_i64:
|
874 | 7657f4bf | Stefan Weil | t0 = tci_read_r64(&tb_ptr); |
875 | 7657f4bf | Stefan Weil | t1 = tci_read_r(&tb_ptr); |
876 | 03fc0548 | Richard Henderson | t2 = tci_read_s32(&tb_ptr); |
877 | ee79c356 | Richard Henderson | assert(t1 != sp_value || (int32_t)t2 < 0);
|
878 | 7657f4bf | Stefan Weil | *(uint64_t *)(t1 + t2) = t0; |
879 | 7657f4bf | Stefan Weil | break;
|
880 | 7657f4bf | Stefan Weil | |
881 | 7657f4bf | Stefan Weil | /* Arithmetic operations (64 bit). */
|
882 | 7657f4bf | Stefan Weil | |
883 | 7657f4bf | Stefan Weil | case INDEX_op_add_i64:
|
884 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
885 | 7657f4bf | Stefan Weil | t1 = tci_read_ri64(&tb_ptr); |
886 | 7657f4bf | Stefan Weil | t2 = tci_read_ri64(&tb_ptr); |
887 | 7657f4bf | Stefan Weil | tci_write_reg64(t0, t1 + t2); |
888 | 7657f4bf | Stefan Weil | break;
|
889 | 7657f4bf | Stefan Weil | case INDEX_op_sub_i64:
|
890 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
891 | 7657f4bf | Stefan Weil | t1 = tci_read_ri64(&tb_ptr); |
892 | 7657f4bf | Stefan Weil | t2 = tci_read_ri64(&tb_ptr); |
893 | 7657f4bf | Stefan Weil | tci_write_reg64(t0, t1 - t2); |
894 | 7657f4bf | Stefan Weil | break;
|
895 | 7657f4bf | Stefan Weil | case INDEX_op_mul_i64:
|
896 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
897 | 7657f4bf | Stefan Weil | t1 = tci_read_ri64(&tb_ptr); |
898 | 7657f4bf | Stefan Weil | t2 = tci_read_ri64(&tb_ptr); |
899 | 7657f4bf | Stefan Weil | tci_write_reg64(t0, t1 * t2); |
900 | 7657f4bf | Stefan Weil | break;
|
901 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_div_i64
|
902 | 7657f4bf | Stefan Weil | case INDEX_op_div_i64:
|
903 | 7657f4bf | Stefan Weil | case INDEX_op_divu_i64:
|
904 | 7657f4bf | Stefan Weil | case INDEX_op_rem_i64:
|
905 | 7657f4bf | Stefan Weil | case INDEX_op_remu_i64:
|
906 | 7657f4bf | Stefan Weil | TODO(); |
907 | 7657f4bf | Stefan Weil | break;
|
908 | 7657f4bf | Stefan Weil | #elif TCG_TARGET_HAS_div2_i64
|
909 | 7657f4bf | Stefan Weil | case INDEX_op_div2_i64:
|
910 | 7657f4bf | Stefan Weil | case INDEX_op_divu2_i64:
|
911 | 7657f4bf | Stefan Weil | TODO(); |
912 | 7657f4bf | Stefan Weil | break;
|
913 | 7657f4bf | Stefan Weil | #endif
|
914 | 7657f4bf | Stefan Weil | case INDEX_op_and_i64:
|
915 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
916 | 7657f4bf | Stefan Weil | t1 = tci_read_ri64(&tb_ptr); |
917 | 7657f4bf | Stefan Weil | t2 = tci_read_ri64(&tb_ptr); |
918 | 7657f4bf | Stefan Weil | tci_write_reg64(t0, t1 & t2); |
919 | 7657f4bf | Stefan Weil | break;
|
920 | 7657f4bf | Stefan Weil | case INDEX_op_or_i64:
|
921 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
922 | 7657f4bf | Stefan Weil | t1 = tci_read_ri64(&tb_ptr); |
923 | 7657f4bf | Stefan Weil | t2 = tci_read_ri64(&tb_ptr); |
924 | 7657f4bf | Stefan Weil | tci_write_reg64(t0, t1 | t2); |
925 | 7657f4bf | Stefan Weil | break;
|
926 | 7657f4bf | Stefan Weil | case INDEX_op_xor_i64:
|
927 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
928 | 7657f4bf | Stefan Weil | t1 = tci_read_ri64(&tb_ptr); |
929 | 7657f4bf | Stefan Weil | t2 = tci_read_ri64(&tb_ptr); |
930 | 7657f4bf | Stefan Weil | tci_write_reg64(t0, t1 ^ t2); |
931 | 7657f4bf | Stefan Weil | break;
|
932 | 7657f4bf | Stefan Weil | |
933 | 7657f4bf | Stefan Weil | /* Shift/rotate operations (64 bit). */
|
934 | 7657f4bf | Stefan Weil | |
935 | 7657f4bf | Stefan Weil | case INDEX_op_shl_i64:
|
936 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
937 | 7657f4bf | Stefan Weil | t1 = tci_read_ri64(&tb_ptr); |
938 | 7657f4bf | Stefan Weil | t2 = tci_read_ri64(&tb_ptr); |
939 | 7657f4bf | Stefan Weil | tci_write_reg64(t0, t1 << t2); |
940 | 7657f4bf | Stefan Weil | break;
|
941 | 7657f4bf | Stefan Weil | case INDEX_op_shr_i64:
|
942 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
943 | 7657f4bf | Stefan Weil | t1 = tci_read_ri64(&tb_ptr); |
944 | 7657f4bf | Stefan Weil | t2 = tci_read_ri64(&tb_ptr); |
945 | 7657f4bf | Stefan Weil | tci_write_reg64(t0, t1 >> t2); |
946 | 7657f4bf | Stefan Weil | break;
|
947 | 7657f4bf | Stefan Weil | case INDEX_op_sar_i64:
|
948 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
949 | 7657f4bf | Stefan Weil | t1 = tci_read_ri64(&tb_ptr); |
950 | 7657f4bf | Stefan Weil | t2 = tci_read_ri64(&tb_ptr); |
951 | 7657f4bf | Stefan Weil | tci_write_reg64(t0, ((int64_t)t1 >> t2)); |
952 | 7657f4bf | Stefan Weil | break;
|
953 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_rot_i64
|
954 | 7657f4bf | Stefan Weil | case INDEX_op_rotl_i64:
|
955 | d285bf78 | Stefan Weil | t0 = *tb_ptr++; |
956 | d285bf78 | Stefan Weil | t1 = tci_read_ri64(&tb_ptr); |
957 | d285bf78 | Stefan Weil | t2 = tci_read_ri64(&tb_ptr); |
958 | 3df2b8fd | Stefan Weil | tci_write_reg64(t0, rol64(t1, t2)); |
959 | d285bf78 | Stefan Weil | break;
|
960 | 7657f4bf | Stefan Weil | case INDEX_op_rotr_i64:
|
961 | d285bf78 | Stefan Weil | t0 = *tb_ptr++; |
962 | d285bf78 | Stefan Weil | t1 = tci_read_ri64(&tb_ptr); |
963 | d285bf78 | Stefan Weil | t2 = tci_read_ri64(&tb_ptr); |
964 | 3df2b8fd | Stefan Weil | tci_write_reg64(t0, ror64(t1, t2)); |
965 | 7657f4bf | Stefan Weil | break;
|
966 | 7657f4bf | Stefan Weil | #endif
|
967 | e24dc9fe | Stefan Weil | #if TCG_TARGET_HAS_deposit_i64
|
968 | e24dc9fe | Stefan Weil | case INDEX_op_deposit_i64:
|
969 | e24dc9fe | Stefan Weil | t0 = *tb_ptr++; |
970 | e24dc9fe | Stefan Weil | t1 = tci_read_r64(&tb_ptr); |
971 | e24dc9fe | Stefan Weil | t2 = tci_read_r64(&tb_ptr); |
972 | e24dc9fe | Stefan Weil | tmp16 = *tb_ptr++; |
973 | e24dc9fe | Stefan Weil | tmp8 = *tb_ptr++; |
974 | e24dc9fe | Stefan Weil | tmp64 = (((1ULL << tmp8) - 1) << tmp16); |
975 | e24dc9fe | Stefan Weil | tci_write_reg64(t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64)); |
976 | e24dc9fe | Stefan Weil | break;
|
977 | e24dc9fe | Stefan Weil | #endif
|
978 | 7657f4bf | Stefan Weil | case INDEX_op_brcond_i64:
|
979 | 7657f4bf | Stefan Weil | t0 = tci_read_r64(&tb_ptr); |
980 | 7657f4bf | Stefan Weil | t1 = tci_read_ri64(&tb_ptr); |
981 | 7657f4bf | Stefan Weil | condition = *tb_ptr++; |
982 | 7657f4bf | Stefan Weil | label = tci_read_label(&tb_ptr); |
983 | 7657f4bf | Stefan Weil | if (tci_compare64(t0, t1, condition)) {
|
984 | 7657f4bf | Stefan Weil | assert(tb_ptr == old_code_ptr + op_size); |
985 | 7657f4bf | Stefan Weil | tb_ptr = (uint8_t *)label; |
986 | 7657f4bf | Stefan Weil | continue;
|
987 | 7657f4bf | Stefan Weil | } |
988 | 7657f4bf | Stefan Weil | break;
|
989 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_ext8u_i64
|
990 | 7657f4bf | Stefan Weil | case INDEX_op_ext8u_i64:
|
991 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
992 | 7657f4bf | Stefan Weil | t1 = tci_read_r8(&tb_ptr); |
993 | 7657f4bf | Stefan Weil | tci_write_reg64(t0, t1); |
994 | 7657f4bf | Stefan Weil | break;
|
995 | 7657f4bf | Stefan Weil | #endif
|
996 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_ext8s_i64
|
997 | 7657f4bf | Stefan Weil | case INDEX_op_ext8s_i64:
|
998 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
999 | 7657f4bf | Stefan Weil | t1 = tci_read_r8s(&tb_ptr); |
1000 | 7657f4bf | Stefan Weil | tci_write_reg64(t0, t1); |
1001 | 7657f4bf | Stefan Weil | break;
|
1002 | 7657f4bf | Stefan Weil | #endif
|
1003 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_ext16s_i64
|
1004 | 7657f4bf | Stefan Weil | case INDEX_op_ext16s_i64:
|
1005 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
1006 | 7657f4bf | Stefan Weil | t1 = tci_read_r16s(&tb_ptr); |
1007 | 7657f4bf | Stefan Weil | tci_write_reg64(t0, t1); |
1008 | 7657f4bf | Stefan Weil | break;
|
1009 | 7657f4bf | Stefan Weil | #endif
|
1010 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_ext16u_i64
|
1011 | 7657f4bf | Stefan Weil | case INDEX_op_ext16u_i64:
|
1012 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
1013 | 7657f4bf | Stefan Weil | t1 = tci_read_r16(&tb_ptr); |
1014 | 7657f4bf | Stefan Weil | tci_write_reg64(t0, t1); |
1015 | 7657f4bf | Stefan Weil | break;
|
1016 | 7657f4bf | Stefan Weil | #endif
|
1017 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_ext32s_i64
|
1018 | 7657f4bf | Stefan Weil | case INDEX_op_ext32s_i64:
|
1019 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
1020 | 7657f4bf | Stefan Weil | t1 = tci_read_r32s(&tb_ptr); |
1021 | 7657f4bf | Stefan Weil | tci_write_reg64(t0, t1); |
1022 | 7657f4bf | Stefan Weil | break;
|
1023 | 7657f4bf | Stefan Weil | #endif
|
1024 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_ext32u_i64
|
1025 | 7657f4bf | Stefan Weil | case INDEX_op_ext32u_i64:
|
1026 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
1027 | 7657f4bf | Stefan Weil | t1 = tci_read_r32(&tb_ptr); |
1028 | 7657f4bf | Stefan Weil | tci_write_reg64(t0, t1); |
1029 | 7657f4bf | Stefan Weil | break;
|
1030 | 7657f4bf | Stefan Weil | #endif
|
1031 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_bswap16_i64
|
1032 | 7657f4bf | Stefan Weil | case INDEX_op_bswap16_i64:
|
1033 | 7657f4bf | Stefan Weil | TODO(); |
1034 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
1035 | 7657f4bf | Stefan Weil | t1 = tci_read_r16(&tb_ptr); |
1036 | 7657f4bf | Stefan Weil | tci_write_reg64(t0, bswap16(t1)); |
1037 | 7657f4bf | Stefan Weil | break;
|
1038 | 7657f4bf | Stefan Weil | #endif
|
1039 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_bswap32_i64
|
1040 | 7657f4bf | Stefan Weil | case INDEX_op_bswap32_i64:
|
1041 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
1042 | 7657f4bf | Stefan Weil | t1 = tci_read_r32(&tb_ptr); |
1043 | 7657f4bf | Stefan Weil | tci_write_reg64(t0, bswap32(t1)); |
1044 | 7657f4bf | Stefan Weil | break;
|
1045 | 7657f4bf | Stefan Weil | #endif
|
1046 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_bswap64_i64
|
1047 | 7657f4bf | Stefan Weil | case INDEX_op_bswap64_i64:
|
1048 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
1049 | 7657f4bf | Stefan Weil | t1 = tci_read_r64(&tb_ptr); |
1050 | 7657f4bf | Stefan Weil | tci_write_reg64(t0, bswap64(t1)); |
1051 | 7657f4bf | Stefan Weil | break;
|
1052 | 7657f4bf | Stefan Weil | #endif
|
1053 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_not_i64
|
1054 | 7657f4bf | Stefan Weil | case INDEX_op_not_i64:
|
1055 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
1056 | 7657f4bf | Stefan Weil | t1 = tci_read_r64(&tb_ptr); |
1057 | 7657f4bf | Stefan Weil | tci_write_reg64(t0, ~t1); |
1058 | 7657f4bf | Stefan Weil | break;
|
1059 | 7657f4bf | Stefan Weil | #endif
|
1060 | 7657f4bf | Stefan Weil | #if TCG_TARGET_HAS_neg_i64
|
1061 | 7657f4bf | Stefan Weil | case INDEX_op_neg_i64:
|
1062 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
1063 | 7657f4bf | Stefan Weil | t1 = tci_read_r64(&tb_ptr); |
1064 | 7657f4bf | Stefan Weil | tci_write_reg64(t0, -t1); |
1065 | 7657f4bf | Stefan Weil | break;
|
1066 | 7657f4bf | Stefan Weil | #endif
|
1067 | 7657f4bf | Stefan Weil | #endif /* TCG_TARGET_REG_BITS == 64 */ |
1068 | 7657f4bf | Stefan Weil | |
1069 | 7657f4bf | Stefan Weil | /* QEMU specific operations. */
|
1070 | 7657f4bf | Stefan Weil | |
1071 | 7657f4bf | Stefan Weil | #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
|
1072 | 7657f4bf | Stefan Weil | case INDEX_op_debug_insn_start:
|
1073 | 7657f4bf | Stefan Weil | TODO(); |
1074 | 7657f4bf | Stefan Weil | break;
|
1075 | 7657f4bf | Stefan Weil | #else
|
1076 | 7657f4bf | Stefan Weil | case INDEX_op_debug_insn_start:
|
1077 | 7657f4bf | Stefan Weil | TODO(); |
1078 | 7657f4bf | Stefan Weil | break;
|
1079 | 7657f4bf | Stefan Weil | #endif
|
1080 | 7657f4bf | Stefan Weil | case INDEX_op_exit_tb:
|
1081 | 7657f4bf | Stefan Weil | next_tb = *(uint64_t *)tb_ptr; |
1082 | 7657f4bf | Stefan Weil | goto exit;
|
1083 | 7657f4bf | Stefan Weil | break;
|
1084 | 7657f4bf | Stefan Weil | case INDEX_op_goto_tb:
|
1085 | 7657f4bf | Stefan Weil | t0 = tci_read_i32(&tb_ptr); |
1086 | 7657f4bf | Stefan Weil | assert(tb_ptr == old_code_ptr + op_size); |
1087 | 7657f4bf | Stefan Weil | tb_ptr += (int32_t)t0; |
1088 | 7657f4bf | Stefan Weil | continue;
|
1089 | 7657f4bf | Stefan Weil | case INDEX_op_qemu_ld8u:
|
1090 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
1091 | 7657f4bf | Stefan Weil | taddr = tci_read_ulong(&tb_ptr); |
1092 | 7657f4bf | Stefan Weil | #ifdef CONFIG_SOFTMMU
|
1093 | 3b2aba2f | Stefan Weil | tmp8 = helper_ldb_mmu(env, taddr, tci_read_i(&tb_ptr)); |
1094 | 7657f4bf | Stefan Weil | #else
|
1095 | 7657f4bf | Stefan Weil | host_addr = (tcg_target_ulong)taddr; |
1096 | 7657f4bf | Stefan Weil | tmp8 = *(uint8_t *)(host_addr + GUEST_BASE); |
1097 | 7657f4bf | Stefan Weil | #endif
|
1098 | 7657f4bf | Stefan Weil | tci_write_reg8(t0, tmp8); |
1099 | 7657f4bf | Stefan Weil | break;
|
1100 | 7657f4bf | Stefan Weil | case INDEX_op_qemu_ld8s:
|
1101 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
1102 | 7657f4bf | Stefan Weil | taddr = tci_read_ulong(&tb_ptr); |
1103 | 7657f4bf | Stefan Weil | #ifdef CONFIG_SOFTMMU
|
1104 | 3b2aba2f | Stefan Weil | tmp8 = helper_ldb_mmu(env, taddr, tci_read_i(&tb_ptr)); |
1105 | 7657f4bf | Stefan Weil | #else
|
1106 | 7657f4bf | Stefan Weil | host_addr = (tcg_target_ulong)taddr; |
1107 | 7657f4bf | Stefan Weil | tmp8 = *(uint8_t *)(host_addr + GUEST_BASE); |
1108 | 7657f4bf | Stefan Weil | #endif
|
1109 | 7657f4bf | Stefan Weil | tci_write_reg8s(t0, tmp8); |
1110 | 7657f4bf | Stefan Weil | break;
|
1111 | 7657f4bf | Stefan Weil | case INDEX_op_qemu_ld16u:
|
1112 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
1113 | 7657f4bf | Stefan Weil | taddr = tci_read_ulong(&tb_ptr); |
1114 | 7657f4bf | Stefan Weil | #ifdef CONFIG_SOFTMMU
|
1115 | 3b2aba2f | Stefan Weil | tmp16 = helper_ldw_mmu(env, taddr, tci_read_i(&tb_ptr)); |
1116 | 7657f4bf | Stefan Weil | #else
|
1117 | 7657f4bf | Stefan Weil | host_addr = (tcg_target_ulong)taddr; |
1118 | 7657f4bf | Stefan Weil | tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE)); |
1119 | 7657f4bf | Stefan Weil | #endif
|
1120 | 7657f4bf | Stefan Weil | tci_write_reg16(t0, tmp16); |
1121 | 7657f4bf | Stefan Weil | break;
|
1122 | 7657f4bf | Stefan Weil | case INDEX_op_qemu_ld16s:
|
1123 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
1124 | 7657f4bf | Stefan Weil | taddr = tci_read_ulong(&tb_ptr); |
1125 | 7657f4bf | Stefan Weil | #ifdef CONFIG_SOFTMMU
|
1126 | 3b2aba2f | Stefan Weil | tmp16 = helper_ldw_mmu(env, taddr, tci_read_i(&tb_ptr)); |
1127 | 7657f4bf | Stefan Weil | #else
|
1128 | 7657f4bf | Stefan Weil | host_addr = (tcg_target_ulong)taddr; |
1129 | 7657f4bf | Stefan Weil | tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE)); |
1130 | 7657f4bf | Stefan Weil | #endif
|
1131 | 7657f4bf | Stefan Weil | tci_write_reg16s(t0, tmp16); |
1132 | 7657f4bf | Stefan Weil | break;
|
1133 | 7657f4bf | Stefan Weil | #if TCG_TARGET_REG_BITS == 64 |
1134 | 7657f4bf | Stefan Weil | case INDEX_op_qemu_ld32u:
|
1135 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
1136 | 7657f4bf | Stefan Weil | taddr = tci_read_ulong(&tb_ptr); |
1137 | 7657f4bf | Stefan Weil | #ifdef CONFIG_SOFTMMU
|
1138 | 3b2aba2f | Stefan Weil | tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr)); |
1139 | 7657f4bf | Stefan Weil | #else
|
1140 | 7657f4bf | Stefan Weil | host_addr = (tcg_target_ulong)taddr; |
1141 | 7657f4bf | Stefan Weil | tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE)); |
1142 | 7657f4bf | Stefan Weil | #endif
|
1143 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, tmp32); |
1144 | 7657f4bf | Stefan Weil | break;
|
1145 | 7657f4bf | Stefan Weil | case INDEX_op_qemu_ld32s:
|
1146 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
1147 | 7657f4bf | Stefan Weil | taddr = tci_read_ulong(&tb_ptr); |
1148 | 7657f4bf | Stefan Weil | #ifdef CONFIG_SOFTMMU
|
1149 | 3b2aba2f | Stefan Weil | tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr)); |
1150 | 7657f4bf | Stefan Weil | #else
|
1151 | 7657f4bf | Stefan Weil | host_addr = (tcg_target_ulong)taddr; |
1152 | 7657f4bf | Stefan Weil | tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE)); |
1153 | 7657f4bf | Stefan Weil | #endif
|
1154 | 7657f4bf | Stefan Weil | tci_write_reg32s(t0, tmp32); |
1155 | 7657f4bf | Stefan Weil | break;
|
1156 | 7657f4bf | Stefan Weil | #endif /* TCG_TARGET_REG_BITS == 64 */ |
1157 | 7657f4bf | Stefan Weil | case INDEX_op_qemu_ld32:
|
1158 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
1159 | 7657f4bf | Stefan Weil | taddr = tci_read_ulong(&tb_ptr); |
1160 | 7657f4bf | Stefan Weil | #ifdef CONFIG_SOFTMMU
|
1161 | 3b2aba2f | Stefan Weil | tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr)); |
1162 | 7657f4bf | Stefan Weil | #else
|
1163 | 7657f4bf | Stefan Weil | host_addr = (tcg_target_ulong)taddr; |
1164 | 7657f4bf | Stefan Weil | tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE)); |
1165 | 7657f4bf | Stefan Weil | #endif
|
1166 | 7657f4bf | Stefan Weil | tci_write_reg32(t0, tmp32); |
1167 | 7657f4bf | Stefan Weil | break;
|
1168 | 7657f4bf | Stefan Weil | case INDEX_op_qemu_ld64:
|
1169 | 7657f4bf | Stefan Weil | t0 = *tb_ptr++; |
1170 | 7657f4bf | Stefan Weil | #if TCG_TARGET_REG_BITS == 32 |
1171 | 7657f4bf | Stefan Weil | t1 = *tb_ptr++; |
1172 | 7657f4bf | Stefan Weil | #endif
|
1173 | 7657f4bf | Stefan Weil | taddr = tci_read_ulong(&tb_ptr); |
1174 | 7657f4bf | Stefan Weil | #ifdef CONFIG_SOFTMMU
|
1175 | 3b2aba2f | Stefan Weil | tmp64 = helper_ldq_mmu(env, taddr, tci_read_i(&tb_ptr)); |
1176 | 7657f4bf | Stefan Weil | #else
|
1177 | 7657f4bf | Stefan Weil | host_addr = (tcg_target_ulong)taddr; |
1178 | 7657f4bf | Stefan Weil | tmp64 = tswap64(*(uint64_t *)(host_addr + GUEST_BASE)); |
1179 | 7657f4bf | Stefan Weil | #endif
|
1180 | 7657f4bf | Stefan Weil | tci_write_reg(t0, tmp64); |
1181 | 7657f4bf | Stefan Weil | #if TCG_TARGET_REG_BITS == 32 |
1182 | 7657f4bf | Stefan Weil | tci_write_reg(t1, tmp64 >> 32);
|
1183 | 7657f4bf | Stefan Weil | #endif
|
1184 | 7657f4bf | Stefan Weil | break;
|
1185 | 7657f4bf | Stefan Weil | case INDEX_op_qemu_st8:
|
1186 | 7657f4bf | Stefan Weil | t0 = tci_read_r8(&tb_ptr); |
1187 | 7657f4bf | Stefan Weil | taddr = tci_read_ulong(&tb_ptr); |
1188 | 7657f4bf | Stefan Weil | #ifdef CONFIG_SOFTMMU
|
1189 | 7657f4bf | Stefan Weil | t2 = tci_read_i(&tb_ptr); |
1190 | 3b2aba2f | Stefan Weil | helper_stb_mmu(env, taddr, t0, t2); |
1191 | 7657f4bf | Stefan Weil | #else
|
1192 | 7657f4bf | Stefan Weil | host_addr = (tcg_target_ulong)taddr; |
1193 | 7657f4bf | Stefan Weil | *(uint8_t *)(host_addr + GUEST_BASE) = t0; |
1194 | 7657f4bf | Stefan Weil | #endif
|
1195 | 7657f4bf | Stefan Weil | break;
|
1196 | 7657f4bf | Stefan Weil | case INDEX_op_qemu_st16:
|
1197 | 7657f4bf | Stefan Weil | t0 = tci_read_r16(&tb_ptr); |
1198 | 7657f4bf | Stefan Weil | taddr = tci_read_ulong(&tb_ptr); |
1199 | 7657f4bf | Stefan Weil | #ifdef CONFIG_SOFTMMU
|
1200 | 7657f4bf | Stefan Weil | t2 = tci_read_i(&tb_ptr); |
1201 | 3b2aba2f | Stefan Weil | helper_stw_mmu(env, taddr, t0, t2); |
1202 | 7657f4bf | Stefan Weil | #else
|
1203 | 7657f4bf | Stefan Weil | host_addr = (tcg_target_ulong)taddr; |
1204 | 7657f4bf | Stefan Weil | *(uint16_t *)(host_addr + GUEST_BASE) = tswap16(t0); |
1205 | 7657f4bf | Stefan Weil | #endif
|
1206 | 7657f4bf | Stefan Weil | break;
|
1207 | 7657f4bf | Stefan Weil | case INDEX_op_qemu_st32:
|
1208 | 7657f4bf | Stefan Weil | t0 = tci_read_r32(&tb_ptr); |
1209 | 7657f4bf | Stefan Weil | taddr = tci_read_ulong(&tb_ptr); |
1210 | 7657f4bf | Stefan Weil | #ifdef CONFIG_SOFTMMU
|
1211 | 7657f4bf | Stefan Weil | t2 = tci_read_i(&tb_ptr); |
1212 | 3b2aba2f | Stefan Weil | helper_stl_mmu(env, taddr, t0, t2); |
1213 | 7657f4bf | Stefan Weil | #else
|
1214 | 7657f4bf | Stefan Weil | host_addr = (tcg_target_ulong)taddr; |
1215 | 7657f4bf | Stefan Weil | *(uint32_t *)(host_addr + GUEST_BASE) = tswap32(t0); |
1216 | 7657f4bf | Stefan Weil | #endif
|
1217 | 7657f4bf | Stefan Weil | break;
|
1218 | 7657f4bf | Stefan Weil | case INDEX_op_qemu_st64:
|
1219 | 7657f4bf | Stefan Weil | tmp64 = tci_read_r64(&tb_ptr); |
1220 | 7657f4bf | Stefan Weil | taddr = tci_read_ulong(&tb_ptr); |
1221 | 7657f4bf | Stefan Weil | #ifdef CONFIG_SOFTMMU
|
1222 | 7657f4bf | Stefan Weil | t2 = tci_read_i(&tb_ptr); |
1223 | 3b2aba2f | Stefan Weil | helper_stq_mmu(env, taddr, tmp64, t2); |
1224 | 7657f4bf | Stefan Weil | #else
|
1225 | 7657f4bf | Stefan Weil | host_addr = (tcg_target_ulong)taddr; |
1226 | 7657f4bf | Stefan Weil | *(uint64_t *)(host_addr + GUEST_BASE) = tswap64(tmp64); |
1227 | 7657f4bf | Stefan Weil | #endif
|
1228 | 7657f4bf | Stefan Weil | break;
|
1229 | 7657f4bf | Stefan Weil | default:
|
1230 | 7657f4bf | Stefan Weil | TODO(); |
1231 | 7657f4bf | Stefan Weil | break;
|
1232 | 7657f4bf | Stefan Weil | } |
1233 | 7657f4bf | Stefan Weil | assert(tb_ptr == old_code_ptr + op_size); |
1234 | 7657f4bf | Stefan Weil | } |
1235 | 7657f4bf | Stefan Weil | exit:
|
1236 | 7657f4bf | Stefan Weil | return next_tb;
|
1237 | 7657f4bf | Stefan Weil | } |