root / tci.c @ e3e87df4
History | View | Annotate | Download (35.1 kB)
1 |
/*
|
---|---|
2 |
* Tiny Code Interpreter for QEMU
|
3 |
*
|
4 |
* Copyright (c) 2009, 2011 Stefan Weil
|
5 |
*
|
6 |
* This program is free software: you can redistribute it and/or modify
|
7 |
* it under the terms of the GNU General Public License as published by
|
8 |
* the Free Software Foundation, either version 2 of the License, or
|
9 |
* (at your option) any later version.
|
10 |
*
|
11 |
* This program is distributed in the hope that it will be useful,
|
12 |
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
13 |
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
14 |
* GNU General Public License for more details.
|
15 |
*
|
16 |
* You should have received a copy of the GNU General Public License
|
17 |
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
18 |
*/
|
19 |
|
20 |
#include "config.h" |
21 |
|
22 |
/* Defining NDEBUG disables assertions (which makes the code faster). */
|
23 |
#if !defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
|
24 |
# define NDEBUG
|
25 |
#endif
|
26 |
|
27 |
#include "qemu-common.h" |
28 |
#include "dyngen-exec.h" /* env */ |
29 |
#include "exec-all.h" /* MAX_OPC_PARAM_IARGS */ |
30 |
#include "tcg-op.h" |
31 |
|
32 |
/* Marker for missing code. */
|
33 |
#define TODO() \
|
34 |
do { \
|
35 |
fprintf(stderr, "TODO %s:%u: %s()\n", \
|
36 |
__FILE__, __LINE__, __func__); \ |
37 |
tcg_abort(); \ |
38 |
} while (0) |
39 |
|
40 |
#if MAX_OPC_PARAM_IARGS != 4 |
41 |
# error Fix needed, number of supported input arguments changed!
|
42 |
#endif
|
43 |
#if TCG_TARGET_REG_BITS == 32 |
44 |
typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
|
45 |
tcg_target_ulong, tcg_target_ulong, |
46 |
tcg_target_ulong, tcg_target_ulong, |
47 |
tcg_target_ulong, tcg_target_ulong); |
48 |
#else
|
49 |
typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
|
50 |
tcg_target_ulong, tcg_target_ulong); |
51 |
#endif
|
52 |
|
53 |
/* TCI can optionally use a global register variable for env. */
|
54 |
#if !defined(AREG0)
|
55 |
CPUArchState *env; |
56 |
#endif
|
57 |
|
58 |
/* Targets which don't use GETPC also don't need tci_tb_ptr
|
59 |
which makes them a little faster. */
|
60 |
#if defined(GETPC)
|
61 |
uintptr_t tci_tb_ptr; |
62 |
#endif
|
63 |
|
64 |
static tcg_target_ulong tci_reg[TCG_TARGET_NB_REGS];
|
65 |
|
66 |
#if !defined(CONFIG_TCG_PASS_AREG0)
|
67 |
# define helper_ldb_mmu(env, addr, mmu_idx) __ldb_mmu(addr, mmu_idx)
|
68 |
# define helper_ldw_mmu(env, addr, mmu_idx) __ldw_mmu(addr, mmu_idx)
|
69 |
# define helper_ldl_mmu(env, addr, mmu_idx) __ldl_mmu(addr, mmu_idx)
|
70 |
# define helper_ldq_mmu(env, addr, mmu_idx) __ldq_mmu(addr, mmu_idx)
|
71 |
# define helper_stb_mmu(env, addr, val, mmu_idx) __stb_mmu(addr, val, mmu_idx)
|
72 |
# define helper_stw_mmu(env, addr, val, mmu_idx) __stw_mmu(addr, val, mmu_idx)
|
73 |
# define helper_stl_mmu(env, addr, val, mmu_idx) __stl_mmu(addr, val, mmu_idx)
|
74 |
# define helper_stq_mmu(env, addr, val, mmu_idx) __stq_mmu(addr, val, mmu_idx)
|
75 |
#endif /* !CONFIG_TCG_PASS_AREG0 */ |
76 |
|
77 |
static tcg_target_ulong tci_read_reg(TCGReg index)
|
78 |
{ |
79 |
assert(index < ARRAY_SIZE(tci_reg)); |
80 |
return tci_reg[index];
|
81 |
} |
82 |
|
83 |
#if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
|
84 |
static int8_t tci_read_reg8s(TCGReg index)
|
85 |
{ |
86 |
return (int8_t)tci_read_reg(index);
|
87 |
} |
88 |
#endif
|
89 |
|
90 |
#if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
|
91 |
static int16_t tci_read_reg16s(TCGReg index)
|
92 |
{ |
93 |
return (int16_t)tci_read_reg(index);
|
94 |
} |
95 |
#endif
|
96 |
|
97 |
#if TCG_TARGET_REG_BITS == 64 |
98 |
static int32_t tci_read_reg32s(TCGReg index)
|
99 |
{ |
100 |
return (int32_t)tci_read_reg(index);
|
101 |
} |
102 |
#endif
|
103 |
|
104 |
static uint8_t tci_read_reg8(TCGReg index)
|
105 |
{ |
106 |
return (uint8_t)tci_read_reg(index);
|
107 |
} |
108 |
|
109 |
static uint16_t tci_read_reg16(TCGReg index)
|
110 |
{ |
111 |
return (uint16_t)tci_read_reg(index);
|
112 |
} |
113 |
|
114 |
static uint32_t tci_read_reg32(TCGReg index)
|
115 |
{ |
116 |
return (uint32_t)tci_read_reg(index);
|
117 |
} |
118 |
|
119 |
#if TCG_TARGET_REG_BITS == 64 |
120 |
static uint64_t tci_read_reg64(TCGReg index)
|
121 |
{ |
122 |
return tci_read_reg(index);
|
123 |
} |
124 |
#endif
|
125 |
|
126 |
static void tci_write_reg(TCGReg index, tcg_target_ulong value) |
127 |
{ |
128 |
assert(index < ARRAY_SIZE(tci_reg)); |
129 |
assert(index != TCG_AREG0); |
130 |
tci_reg[index] = value; |
131 |
} |
132 |
|
133 |
static void tci_write_reg8s(TCGReg index, int8_t value) |
134 |
{ |
135 |
tci_write_reg(index, value); |
136 |
} |
137 |
|
138 |
static void tci_write_reg16s(TCGReg index, int16_t value) |
139 |
{ |
140 |
tci_write_reg(index, value); |
141 |
} |
142 |
|
143 |
#if TCG_TARGET_REG_BITS == 64 |
144 |
static void tci_write_reg32s(TCGReg index, int32_t value) |
145 |
{ |
146 |
tci_write_reg(index, value); |
147 |
} |
148 |
#endif
|
149 |
|
150 |
static void tci_write_reg8(TCGReg index, uint8_t value) |
151 |
{ |
152 |
tci_write_reg(index, value); |
153 |
} |
154 |
|
155 |
static void tci_write_reg16(TCGReg index, uint16_t value) |
156 |
{ |
157 |
tci_write_reg(index, value); |
158 |
} |
159 |
|
160 |
static void tci_write_reg32(TCGReg index, uint32_t value) |
161 |
{ |
162 |
tci_write_reg(index, value); |
163 |
} |
164 |
|
165 |
#if TCG_TARGET_REG_BITS == 32 |
166 |
static void tci_write_reg64(uint32_t high_index, uint32_t low_index, |
167 |
uint64_t value) |
168 |
{ |
169 |
tci_write_reg(low_index, value); |
170 |
tci_write_reg(high_index, value >> 32);
|
171 |
} |
172 |
#elif TCG_TARGET_REG_BITS == 64 |
173 |
static void tci_write_reg64(TCGReg index, uint64_t value) |
174 |
{ |
175 |
tci_write_reg(index, value); |
176 |
} |
177 |
#endif
|
178 |
|
179 |
#if TCG_TARGET_REG_BITS == 32 |
180 |
/* Create a 64 bit value from two 32 bit values. */
|
181 |
static uint64_t tci_uint64(uint32_t high, uint32_t low)
|
182 |
{ |
183 |
return ((uint64_t)high << 32) + low; |
184 |
} |
185 |
#endif
|
186 |
|
187 |
/* Read constant (native size) from bytecode. */
|
188 |
static tcg_target_ulong tci_read_i(uint8_t **tb_ptr)
|
189 |
{ |
190 |
tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr); |
191 |
*tb_ptr += sizeof(value);
|
192 |
return value;
|
193 |
} |
194 |
|
195 |
/* Read constant (32 bit) from bytecode. */
|
196 |
static uint32_t tci_read_i32(uint8_t **tb_ptr)
|
197 |
{ |
198 |
uint32_t value = *(uint32_t *)(*tb_ptr); |
199 |
*tb_ptr += sizeof(value);
|
200 |
return value;
|
201 |
} |
202 |
|
203 |
#if TCG_TARGET_REG_BITS == 64 |
204 |
/* Read constant (64 bit) from bytecode. */
|
205 |
static uint64_t tci_read_i64(uint8_t **tb_ptr)
|
206 |
{ |
207 |
uint64_t value = *(uint64_t *)(*tb_ptr); |
208 |
*tb_ptr += sizeof(value);
|
209 |
return value;
|
210 |
} |
211 |
#endif
|
212 |
|
213 |
/* Read indexed register (native size) from bytecode. */
|
214 |
static tcg_target_ulong tci_read_r(uint8_t **tb_ptr)
|
215 |
{ |
216 |
tcg_target_ulong value = tci_read_reg(**tb_ptr); |
217 |
*tb_ptr += 1;
|
218 |
return value;
|
219 |
} |
220 |
|
221 |
/* Read indexed register (8 bit) from bytecode. */
|
222 |
static uint8_t tci_read_r8(uint8_t **tb_ptr)
|
223 |
{ |
224 |
uint8_t value = tci_read_reg8(**tb_ptr); |
225 |
*tb_ptr += 1;
|
226 |
return value;
|
227 |
} |
228 |
|
229 |
#if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
|
230 |
/* Read indexed register (8 bit signed) from bytecode. */
|
231 |
static int8_t tci_read_r8s(uint8_t **tb_ptr)
|
232 |
{ |
233 |
int8_t value = tci_read_reg8s(**tb_ptr); |
234 |
*tb_ptr += 1;
|
235 |
return value;
|
236 |
} |
237 |
#endif
|
238 |
|
239 |
/* Read indexed register (16 bit) from bytecode. */
|
240 |
static uint16_t tci_read_r16(uint8_t **tb_ptr)
|
241 |
{ |
242 |
uint16_t value = tci_read_reg16(**tb_ptr); |
243 |
*tb_ptr += 1;
|
244 |
return value;
|
245 |
} |
246 |
|
247 |
#if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
|
248 |
/* Read indexed register (16 bit signed) from bytecode. */
|
249 |
static int16_t tci_read_r16s(uint8_t **tb_ptr)
|
250 |
{ |
251 |
int16_t value = tci_read_reg16s(**tb_ptr); |
252 |
*tb_ptr += 1;
|
253 |
return value;
|
254 |
} |
255 |
#endif
|
256 |
|
257 |
/* Read indexed register (32 bit) from bytecode. */
|
258 |
static uint32_t tci_read_r32(uint8_t **tb_ptr)
|
259 |
{ |
260 |
uint32_t value = tci_read_reg32(**tb_ptr); |
261 |
*tb_ptr += 1;
|
262 |
return value;
|
263 |
} |
264 |
|
265 |
#if TCG_TARGET_REG_BITS == 32 |
266 |
/* Read two indexed registers (2 * 32 bit) from bytecode. */
|
267 |
static uint64_t tci_read_r64(uint8_t **tb_ptr)
|
268 |
{ |
269 |
uint32_t low = tci_read_r32(tb_ptr); |
270 |
return tci_uint64(tci_read_r32(tb_ptr), low);
|
271 |
} |
272 |
#elif TCG_TARGET_REG_BITS == 64 |
273 |
/* Read indexed register (32 bit signed) from bytecode. */
|
274 |
static int32_t tci_read_r32s(uint8_t **tb_ptr)
|
275 |
{ |
276 |
int32_t value = tci_read_reg32s(**tb_ptr); |
277 |
*tb_ptr += 1;
|
278 |
return value;
|
279 |
} |
280 |
|
281 |
/* Read indexed register (64 bit) from bytecode. */
|
282 |
static uint64_t tci_read_r64(uint8_t **tb_ptr)
|
283 |
{ |
284 |
uint64_t value = tci_read_reg64(**tb_ptr); |
285 |
*tb_ptr += 1;
|
286 |
return value;
|
287 |
} |
288 |
#endif
|
289 |
|
290 |
/* Read indexed register(s) with target address from bytecode. */
|
291 |
static target_ulong tci_read_ulong(uint8_t **tb_ptr)
|
292 |
{ |
293 |
target_ulong taddr = tci_read_r(tb_ptr); |
294 |
#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
|
295 |
taddr += (uint64_t)tci_read_r(tb_ptr) << 32;
|
296 |
#endif
|
297 |
return taddr;
|
298 |
} |
299 |
|
300 |
/* Read indexed register or constant (native size) from bytecode. */
|
301 |
static tcg_target_ulong tci_read_ri(uint8_t **tb_ptr)
|
302 |
{ |
303 |
tcg_target_ulong value; |
304 |
TCGReg r = **tb_ptr; |
305 |
*tb_ptr += 1;
|
306 |
if (r == TCG_CONST) {
|
307 |
value = tci_read_i(tb_ptr); |
308 |
} else {
|
309 |
value = tci_read_reg(r); |
310 |
} |
311 |
return value;
|
312 |
} |
313 |
|
314 |
/* Read indexed register or constant (32 bit) from bytecode. */
|
315 |
static uint32_t tci_read_ri32(uint8_t **tb_ptr)
|
316 |
{ |
317 |
uint32_t value; |
318 |
TCGReg r = **tb_ptr; |
319 |
*tb_ptr += 1;
|
320 |
if (r == TCG_CONST) {
|
321 |
value = tci_read_i32(tb_ptr); |
322 |
} else {
|
323 |
value = tci_read_reg32(r); |
324 |
} |
325 |
return value;
|
326 |
} |
327 |
|
328 |
#if TCG_TARGET_REG_BITS == 32 |
329 |
/* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
|
330 |
static uint64_t tci_read_ri64(uint8_t **tb_ptr)
|
331 |
{ |
332 |
uint32_t low = tci_read_ri32(tb_ptr); |
333 |
return tci_uint64(tci_read_ri32(tb_ptr), low);
|
334 |
} |
335 |
#elif TCG_TARGET_REG_BITS == 64 |
336 |
/* Read indexed register or constant (64 bit) from bytecode. */
|
337 |
static uint64_t tci_read_ri64(uint8_t **tb_ptr)
|
338 |
{ |
339 |
uint64_t value; |
340 |
TCGReg r = **tb_ptr; |
341 |
*tb_ptr += 1;
|
342 |
if (r == TCG_CONST) {
|
343 |
value = tci_read_i64(tb_ptr); |
344 |
} else {
|
345 |
value = tci_read_reg64(r); |
346 |
} |
347 |
return value;
|
348 |
} |
349 |
#endif
|
350 |
|
351 |
static target_ulong tci_read_label(uint8_t **tb_ptr)
|
352 |
{ |
353 |
target_ulong label = tci_read_i(tb_ptr); |
354 |
assert(label != 0);
|
355 |
return label;
|
356 |
} |
357 |
|
358 |
static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition) |
359 |
{ |
360 |
bool result = false; |
361 |
int32_t i0 = u0; |
362 |
int32_t i1 = u1; |
363 |
switch (condition) {
|
364 |
case TCG_COND_EQ:
|
365 |
result = (u0 == u1); |
366 |
break;
|
367 |
case TCG_COND_NE:
|
368 |
result = (u0 != u1); |
369 |
break;
|
370 |
case TCG_COND_LT:
|
371 |
result = (i0 < i1); |
372 |
break;
|
373 |
case TCG_COND_GE:
|
374 |
result = (i0 >= i1); |
375 |
break;
|
376 |
case TCG_COND_LE:
|
377 |
result = (i0 <= i1); |
378 |
break;
|
379 |
case TCG_COND_GT:
|
380 |
result = (i0 > i1); |
381 |
break;
|
382 |
case TCG_COND_LTU:
|
383 |
result = (u0 < u1); |
384 |
break;
|
385 |
case TCG_COND_GEU:
|
386 |
result = (u0 >= u1); |
387 |
break;
|
388 |
case TCG_COND_LEU:
|
389 |
result = (u0 <= u1); |
390 |
break;
|
391 |
case TCG_COND_GTU:
|
392 |
result = (u0 > u1); |
393 |
break;
|
394 |
default:
|
395 |
TODO(); |
396 |
} |
397 |
return result;
|
398 |
} |
399 |
|
400 |
static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition) |
401 |
{ |
402 |
bool result = false; |
403 |
int64_t i0 = u0; |
404 |
int64_t i1 = u1; |
405 |
switch (condition) {
|
406 |
case TCG_COND_EQ:
|
407 |
result = (u0 == u1); |
408 |
break;
|
409 |
case TCG_COND_NE:
|
410 |
result = (u0 != u1); |
411 |
break;
|
412 |
case TCG_COND_LT:
|
413 |
result = (i0 < i1); |
414 |
break;
|
415 |
case TCG_COND_GE:
|
416 |
result = (i0 >= i1); |
417 |
break;
|
418 |
case TCG_COND_LE:
|
419 |
result = (i0 <= i1); |
420 |
break;
|
421 |
case TCG_COND_GT:
|
422 |
result = (i0 > i1); |
423 |
break;
|
424 |
case TCG_COND_LTU:
|
425 |
result = (u0 < u1); |
426 |
break;
|
427 |
case TCG_COND_GEU:
|
428 |
result = (u0 >= u1); |
429 |
break;
|
430 |
case TCG_COND_LEU:
|
431 |
result = (u0 <= u1); |
432 |
break;
|
433 |
case TCG_COND_GTU:
|
434 |
result = (u0 > u1); |
435 |
break;
|
436 |
default:
|
437 |
TODO(); |
438 |
} |
439 |
return result;
|
440 |
} |
441 |
|
442 |
/* Interpret pseudo code in tb. */
|
443 |
tcg_target_ulong tcg_qemu_tb_exec(CPUArchState *cpustate, uint8_t *tb_ptr) |
444 |
{ |
445 |
tcg_target_ulong next_tb = 0;
|
446 |
|
447 |
env = cpustate; |
448 |
tci_reg[TCG_AREG0] = (tcg_target_ulong)env; |
449 |
assert(tb_ptr); |
450 |
|
451 |
for (;;) {
|
452 |
#if defined(GETPC)
|
453 |
tci_tb_ptr = (uintptr_t)tb_ptr; |
454 |
#endif
|
455 |
TCGOpcode opc = tb_ptr[0];
|
456 |
#if !defined(NDEBUG)
|
457 |
uint8_t op_size = tb_ptr[1];
|
458 |
uint8_t *old_code_ptr = tb_ptr; |
459 |
#endif
|
460 |
tcg_target_ulong t0; |
461 |
tcg_target_ulong t1; |
462 |
tcg_target_ulong t2; |
463 |
tcg_target_ulong label; |
464 |
TCGCond condition; |
465 |
target_ulong taddr; |
466 |
#ifndef CONFIG_SOFTMMU
|
467 |
tcg_target_ulong host_addr; |
468 |
#endif
|
469 |
uint8_t tmp8; |
470 |
uint16_t tmp16; |
471 |
uint32_t tmp32; |
472 |
uint64_t tmp64; |
473 |
#if TCG_TARGET_REG_BITS == 32 |
474 |
uint64_t v64; |
475 |
#endif
|
476 |
|
477 |
/* Skip opcode and size entry. */
|
478 |
tb_ptr += 2;
|
479 |
|
480 |
switch (opc) {
|
481 |
case INDEX_op_end:
|
482 |
case INDEX_op_nop:
|
483 |
break;
|
484 |
case INDEX_op_nop1:
|
485 |
case INDEX_op_nop2:
|
486 |
case INDEX_op_nop3:
|
487 |
case INDEX_op_nopn:
|
488 |
case INDEX_op_discard:
|
489 |
TODO(); |
490 |
break;
|
491 |
case INDEX_op_set_label:
|
492 |
TODO(); |
493 |
break;
|
494 |
case INDEX_op_call:
|
495 |
t0 = tci_read_ri(&tb_ptr); |
496 |
#if TCG_TARGET_REG_BITS == 32 |
497 |
tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0), |
498 |
tci_read_reg(TCG_REG_R1), |
499 |
tci_read_reg(TCG_REG_R2), |
500 |
tci_read_reg(TCG_REG_R3), |
501 |
tci_read_reg(TCG_REG_R5), |
502 |
tci_read_reg(TCG_REG_R6), |
503 |
tci_read_reg(TCG_REG_R7), |
504 |
tci_read_reg(TCG_REG_R8)); |
505 |
tci_write_reg(TCG_REG_R0, tmp64); |
506 |
tci_write_reg(TCG_REG_R1, tmp64 >> 32);
|
507 |
#else
|
508 |
tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0), |
509 |
tci_read_reg(TCG_REG_R1), |
510 |
tci_read_reg(TCG_REG_R2), |
511 |
tci_read_reg(TCG_REG_R3)); |
512 |
tci_write_reg(TCG_REG_R0, tmp64); |
513 |
#endif
|
514 |
break;
|
515 |
case INDEX_op_jmp:
|
516 |
case INDEX_op_br:
|
517 |
label = tci_read_label(&tb_ptr); |
518 |
assert(tb_ptr == old_code_ptr + op_size); |
519 |
tb_ptr = (uint8_t *)label; |
520 |
continue;
|
521 |
case INDEX_op_setcond_i32:
|
522 |
t0 = *tb_ptr++; |
523 |
t1 = tci_read_r32(&tb_ptr); |
524 |
t2 = tci_read_ri32(&tb_ptr); |
525 |
condition = *tb_ptr++; |
526 |
tci_write_reg32(t0, tci_compare32(t1, t2, condition)); |
527 |
break;
|
528 |
#if TCG_TARGET_REG_BITS == 32 |
529 |
case INDEX_op_setcond2_i32:
|
530 |
t0 = *tb_ptr++; |
531 |
tmp64 = tci_read_r64(&tb_ptr); |
532 |
v64 = tci_read_ri64(&tb_ptr); |
533 |
condition = *tb_ptr++; |
534 |
tci_write_reg32(t0, tci_compare64(tmp64, v64, condition)); |
535 |
break;
|
536 |
#elif TCG_TARGET_REG_BITS == 64 |
537 |
case INDEX_op_setcond_i64:
|
538 |
t0 = *tb_ptr++; |
539 |
t1 = tci_read_r64(&tb_ptr); |
540 |
t2 = tci_read_ri64(&tb_ptr); |
541 |
condition = *tb_ptr++; |
542 |
tci_write_reg64(t0, tci_compare64(t1, t2, condition)); |
543 |
break;
|
544 |
#endif
|
545 |
case INDEX_op_mov_i32:
|
546 |
t0 = *tb_ptr++; |
547 |
t1 = tci_read_r32(&tb_ptr); |
548 |
tci_write_reg32(t0, t1); |
549 |
break;
|
550 |
case INDEX_op_movi_i32:
|
551 |
t0 = *tb_ptr++; |
552 |
t1 = tci_read_i32(&tb_ptr); |
553 |
tci_write_reg32(t0, t1); |
554 |
break;
|
555 |
|
556 |
/* Load/store operations (32 bit). */
|
557 |
|
558 |
case INDEX_op_ld8u_i32:
|
559 |
t0 = *tb_ptr++; |
560 |
t1 = tci_read_r(&tb_ptr); |
561 |
t2 = tci_read_i32(&tb_ptr); |
562 |
tci_write_reg8(t0, *(uint8_t *)(t1 + t2)); |
563 |
break;
|
564 |
case INDEX_op_ld8s_i32:
|
565 |
case INDEX_op_ld16u_i32:
|
566 |
TODO(); |
567 |
break;
|
568 |
case INDEX_op_ld16s_i32:
|
569 |
TODO(); |
570 |
break;
|
571 |
case INDEX_op_ld_i32:
|
572 |
t0 = *tb_ptr++; |
573 |
t1 = tci_read_r(&tb_ptr); |
574 |
t2 = tci_read_i32(&tb_ptr); |
575 |
tci_write_reg32(t0, *(uint32_t *)(t1 + t2)); |
576 |
break;
|
577 |
case INDEX_op_st8_i32:
|
578 |
t0 = tci_read_r8(&tb_ptr); |
579 |
t1 = tci_read_r(&tb_ptr); |
580 |
t2 = tci_read_i32(&tb_ptr); |
581 |
*(uint8_t *)(t1 + t2) = t0; |
582 |
break;
|
583 |
case INDEX_op_st16_i32:
|
584 |
t0 = tci_read_r16(&tb_ptr); |
585 |
t1 = tci_read_r(&tb_ptr); |
586 |
t2 = tci_read_i32(&tb_ptr); |
587 |
*(uint16_t *)(t1 + t2) = t0; |
588 |
break;
|
589 |
case INDEX_op_st_i32:
|
590 |
t0 = tci_read_r32(&tb_ptr); |
591 |
t1 = tci_read_r(&tb_ptr); |
592 |
t2 = tci_read_i32(&tb_ptr); |
593 |
*(uint32_t *)(t1 + t2) = t0; |
594 |
break;
|
595 |
|
596 |
/* Arithmetic operations (32 bit). */
|
597 |
|
598 |
case INDEX_op_add_i32:
|
599 |
t0 = *tb_ptr++; |
600 |
t1 = tci_read_ri32(&tb_ptr); |
601 |
t2 = tci_read_ri32(&tb_ptr); |
602 |
tci_write_reg32(t0, t1 + t2); |
603 |
break;
|
604 |
case INDEX_op_sub_i32:
|
605 |
t0 = *tb_ptr++; |
606 |
t1 = tci_read_ri32(&tb_ptr); |
607 |
t2 = tci_read_ri32(&tb_ptr); |
608 |
tci_write_reg32(t0, t1 - t2); |
609 |
break;
|
610 |
case INDEX_op_mul_i32:
|
611 |
t0 = *tb_ptr++; |
612 |
t1 = tci_read_ri32(&tb_ptr); |
613 |
t2 = tci_read_ri32(&tb_ptr); |
614 |
tci_write_reg32(t0, t1 * t2); |
615 |
break;
|
616 |
#if TCG_TARGET_HAS_div_i32
|
617 |
case INDEX_op_div_i32:
|
618 |
t0 = *tb_ptr++; |
619 |
t1 = tci_read_ri32(&tb_ptr); |
620 |
t2 = tci_read_ri32(&tb_ptr); |
621 |
tci_write_reg32(t0, (int32_t)t1 / (int32_t)t2); |
622 |
break;
|
623 |
case INDEX_op_divu_i32:
|
624 |
t0 = *tb_ptr++; |
625 |
t1 = tci_read_ri32(&tb_ptr); |
626 |
t2 = tci_read_ri32(&tb_ptr); |
627 |
tci_write_reg32(t0, t1 / t2); |
628 |
break;
|
629 |
case INDEX_op_rem_i32:
|
630 |
t0 = *tb_ptr++; |
631 |
t1 = tci_read_ri32(&tb_ptr); |
632 |
t2 = tci_read_ri32(&tb_ptr); |
633 |
tci_write_reg32(t0, (int32_t)t1 % (int32_t)t2); |
634 |
break;
|
635 |
case INDEX_op_remu_i32:
|
636 |
t0 = *tb_ptr++; |
637 |
t1 = tci_read_ri32(&tb_ptr); |
638 |
t2 = tci_read_ri32(&tb_ptr); |
639 |
tci_write_reg32(t0, t1 % t2); |
640 |
break;
|
641 |
#elif TCG_TARGET_HAS_div2_i32
|
642 |
case INDEX_op_div2_i32:
|
643 |
case INDEX_op_divu2_i32:
|
644 |
TODO(); |
645 |
break;
|
646 |
#endif
|
647 |
case INDEX_op_and_i32:
|
648 |
t0 = *tb_ptr++; |
649 |
t1 = tci_read_ri32(&tb_ptr); |
650 |
t2 = tci_read_ri32(&tb_ptr); |
651 |
tci_write_reg32(t0, t1 & t2); |
652 |
break;
|
653 |
case INDEX_op_or_i32:
|
654 |
t0 = *tb_ptr++; |
655 |
t1 = tci_read_ri32(&tb_ptr); |
656 |
t2 = tci_read_ri32(&tb_ptr); |
657 |
tci_write_reg32(t0, t1 | t2); |
658 |
break;
|
659 |
case INDEX_op_xor_i32:
|
660 |
t0 = *tb_ptr++; |
661 |
t1 = tci_read_ri32(&tb_ptr); |
662 |
t2 = tci_read_ri32(&tb_ptr); |
663 |
tci_write_reg32(t0, t1 ^ t2); |
664 |
break;
|
665 |
|
666 |
/* Shift/rotate operations (32 bit). */
|
667 |
|
668 |
case INDEX_op_shl_i32:
|
669 |
t0 = *tb_ptr++; |
670 |
t1 = tci_read_ri32(&tb_ptr); |
671 |
t2 = tci_read_ri32(&tb_ptr); |
672 |
tci_write_reg32(t0, t1 << t2); |
673 |
break;
|
674 |
case INDEX_op_shr_i32:
|
675 |
t0 = *tb_ptr++; |
676 |
t1 = tci_read_ri32(&tb_ptr); |
677 |
t2 = tci_read_ri32(&tb_ptr); |
678 |
tci_write_reg32(t0, t1 >> t2); |
679 |
break;
|
680 |
case INDEX_op_sar_i32:
|
681 |
t0 = *tb_ptr++; |
682 |
t1 = tci_read_ri32(&tb_ptr); |
683 |
t2 = tci_read_ri32(&tb_ptr); |
684 |
tci_write_reg32(t0, ((int32_t)t1 >> t2)); |
685 |
break;
|
686 |
#if TCG_TARGET_HAS_rot_i32
|
687 |
case INDEX_op_rotl_i32:
|
688 |
t0 = *tb_ptr++; |
689 |
t1 = tci_read_ri32(&tb_ptr); |
690 |
t2 = tci_read_ri32(&tb_ptr); |
691 |
tci_write_reg32(t0, (t1 << t2) | (t1 >> (32 - t2)));
|
692 |
break;
|
693 |
case INDEX_op_rotr_i32:
|
694 |
t0 = *tb_ptr++; |
695 |
t1 = tci_read_ri32(&tb_ptr); |
696 |
t2 = tci_read_ri32(&tb_ptr); |
697 |
tci_write_reg32(t0, (t1 >> t2) | (t1 << (32 - t2)));
|
698 |
break;
|
699 |
#endif
|
700 |
case INDEX_op_brcond_i32:
|
701 |
t0 = tci_read_r32(&tb_ptr); |
702 |
t1 = tci_read_ri32(&tb_ptr); |
703 |
condition = *tb_ptr++; |
704 |
label = tci_read_label(&tb_ptr); |
705 |
if (tci_compare32(t0, t1, condition)) {
|
706 |
assert(tb_ptr == old_code_ptr + op_size); |
707 |
tb_ptr = (uint8_t *)label; |
708 |
continue;
|
709 |
} |
710 |
break;
|
711 |
#if TCG_TARGET_REG_BITS == 32 |
712 |
case INDEX_op_add2_i32:
|
713 |
t0 = *tb_ptr++; |
714 |
t1 = *tb_ptr++; |
715 |
tmp64 = tci_read_r64(&tb_ptr); |
716 |
tmp64 += tci_read_r64(&tb_ptr); |
717 |
tci_write_reg64(t1, t0, tmp64); |
718 |
break;
|
719 |
case INDEX_op_sub2_i32:
|
720 |
t0 = *tb_ptr++; |
721 |
t1 = *tb_ptr++; |
722 |
tmp64 = tci_read_r64(&tb_ptr); |
723 |
tmp64 -= tci_read_r64(&tb_ptr); |
724 |
tci_write_reg64(t1, t0, tmp64); |
725 |
break;
|
726 |
case INDEX_op_brcond2_i32:
|
727 |
tmp64 = tci_read_r64(&tb_ptr); |
728 |
v64 = tci_read_ri64(&tb_ptr); |
729 |
condition = *tb_ptr++; |
730 |
label = tci_read_label(&tb_ptr); |
731 |
if (tci_compare64(tmp64, v64, condition)) {
|
732 |
assert(tb_ptr == old_code_ptr + op_size); |
733 |
tb_ptr = (uint8_t *)label; |
734 |
continue;
|
735 |
} |
736 |
break;
|
737 |
case INDEX_op_mulu2_i32:
|
738 |
t0 = *tb_ptr++; |
739 |
t1 = *tb_ptr++; |
740 |
t2 = tci_read_r32(&tb_ptr); |
741 |
tmp64 = tci_read_r32(&tb_ptr); |
742 |
tci_write_reg64(t1, t0, t2 * tmp64); |
743 |
break;
|
744 |
#endif /* TCG_TARGET_REG_BITS == 32 */ |
745 |
#if TCG_TARGET_HAS_ext8s_i32
|
746 |
case INDEX_op_ext8s_i32:
|
747 |
t0 = *tb_ptr++; |
748 |
t1 = tci_read_r8s(&tb_ptr); |
749 |
tci_write_reg32(t0, t1); |
750 |
break;
|
751 |
#endif
|
752 |
#if TCG_TARGET_HAS_ext16s_i32
|
753 |
case INDEX_op_ext16s_i32:
|
754 |
t0 = *tb_ptr++; |
755 |
t1 = tci_read_r16s(&tb_ptr); |
756 |
tci_write_reg32(t0, t1); |
757 |
break;
|
758 |
#endif
|
759 |
#if TCG_TARGET_HAS_ext8u_i32
|
760 |
case INDEX_op_ext8u_i32:
|
761 |
t0 = *tb_ptr++; |
762 |
t1 = tci_read_r8(&tb_ptr); |
763 |
tci_write_reg32(t0, t1); |
764 |
break;
|
765 |
#endif
|
766 |
#if TCG_TARGET_HAS_ext16u_i32
|
767 |
case INDEX_op_ext16u_i32:
|
768 |
t0 = *tb_ptr++; |
769 |
t1 = tci_read_r16(&tb_ptr); |
770 |
tci_write_reg32(t0, t1); |
771 |
break;
|
772 |
#endif
|
773 |
#if TCG_TARGET_HAS_bswap16_i32
|
774 |
case INDEX_op_bswap16_i32:
|
775 |
t0 = *tb_ptr++; |
776 |
t1 = tci_read_r16(&tb_ptr); |
777 |
tci_write_reg32(t0, bswap16(t1)); |
778 |
break;
|
779 |
#endif
|
780 |
#if TCG_TARGET_HAS_bswap32_i32
|
781 |
case INDEX_op_bswap32_i32:
|
782 |
t0 = *tb_ptr++; |
783 |
t1 = tci_read_r32(&tb_ptr); |
784 |
tci_write_reg32(t0, bswap32(t1)); |
785 |
break;
|
786 |
#endif
|
787 |
#if TCG_TARGET_HAS_not_i32
|
788 |
case INDEX_op_not_i32:
|
789 |
t0 = *tb_ptr++; |
790 |
t1 = tci_read_r32(&tb_ptr); |
791 |
tci_write_reg32(t0, ~t1); |
792 |
break;
|
793 |
#endif
|
794 |
#if TCG_TARGET_HAS_neg_i32
|
795 |
case INDEX_op_neg_i32:
|
796 |
t0 = *tb_ptr++; |
797 |
t1 = tci_read_r32(&tb_ptr); |
798 |
tci_write_reg32(t0, -t1); |
799 |
break;
|
800 |
#endif
|
801 |
#if TCG_TARGET_REG_BITS == 64 |
802 |
case INDEX_op_mov_i64:
|
803 |
t0 = *tb_ptr++; |
804 |
t1 = tci_read_r64(&tb_ptr); |
805 |
tci_write_reg64(t0, t1); |
806 |
break;
|
807 |
case INDEX_op_movi_i64:
|
808 |
t0 = *tb_ptr++; |
809 |
t1 = tci_read_i64(&tb_ptr); |
810 |
tci_write_reg64(t0, t1); |
811 |
break;
|
812 |
|
813 |
/* Load/store operations (64 bit). */
|
814 |
|
815 |
case INDEX_op_ld8u_i64:
|
816 |
t0 = *tb_ptr++; |
817 |
t1 = tci_read_r(&tb_ptr); |
818 |
t2 = tci_read_i32(&tb_ptr); |
819 |
tci_write_reg8(t0, *(uint8_t *)(t1 + t2)); |
820 |
break;
|
821 |
case INDEX_op_ld8s_i64:
|
822 |
case INDEX_op_ld16u_i64:
|
823 |
case INDEX_op_ld16s_i64:
|
824 |
TODO(); |
825 |
break;
|
826 |
case INDEX_op_ld32u_i64:
|
827 |
t0 = *tb_ptr++; |
828 |
t1 = tci_read_r(&tb_ptr); |
829 |
t2 = tci_read_i32(&tb_ptr); |
830 |
tci_write_reg32(t0, *(uint32_t *)(t1 + t2)); |
831 |
break;
|
832 |
case INDEX_op_ld32s_i64:
|
833 |
t0 = *tb_ptr++; |
834 |
t1 = tci_read_r(&tb_ptr); |
835 |
t2 = tci_read_i32(&tb_ptr); |
836 |
tci_write_reg32s(t0, *(int32_t *)(t1 + t2)); |
837 |
break;
|
838 |
case INDEX_op_ld_i64:
|
839 |
t0 = *tb_ptr++; |
840 |
t1 = tci_read_r(&tb_ptr); |
841 |
t2 = tci_read_i32(&tb_ptr); |
842 |
tci_write_reg64(t0, *(uint64_t *)(t1 + t2)); |
843 |
break;
|
844 |
case INDEX_op_st8_i64:
|
845 |
t0 = tci_read_r8(&tb_ptr); |
846 |
t1 = tci_read_r(&tb_ptr); |
847 |
t2 = tci_read_i32(&tb_ptr); |
848 |
*(uint8_t *)(t1 + t2) = t0; |
849 |
break;
|
850 |
case INDEX_op_st16_i64:
|
851 |
t0 = tci_read_r16(&tb_ptr); |
852 |
t1 = tci_read_r(&tb_ptr); |
853 |
t2 = tci_read_i32(&tb_ptr); |
854 |
*(uint16_t *)(t1 + t2) = t0; |
855 |
break;
|
856 |
case INDEX_op_st32_i64:
|
857 |
t0 = tci_read_r32(&tb_ptr); |
858 |
t1 = tci_read_r(&tb_ptr); |
859 |
t2 = tci_read_i32(&tb_ptr); |
860 |
*(uint32_t *)(t1 + t2) = t0; |
861 |
break;
|
862 |
case INDEX_op_st_i64:
|
863 |
t0 = tci_read_r64(&tb_ptr); |
864 |
t1 = tci_read_r(&tb_ptr); |
865 |
t2 = tci_read_i32(&tb_ptr); |
866 |
*(uint64_t *)(t1 + t2) = t0; |
867 |
break;
|
868 |
|
869 |
/* Arithmetic operations (64 bit). */
|
870 |
|
871 |
case INDEX_op_add_i64:
|
872 |
t0 = *tb_ptr++; |
873 |
t1 = tci_read_ri64(&tb_ptr); |
874 |
t2 = tci_read_ri64(&tb_ptr); |
875 |
tci_write_reg64(t0, t1 + t2); |
876 |
break;
|
877 |
case INDEX_op_sub_i64:
|
878 |
t0 = *tb_ptr++; |
879 |
t1 = tci_read_ri64(&tb_ptr); |
880 |
t2 = tci_read_ri64(&tb_ptr); |
881 |
tci_write_reg64(t0, t1 - t2); |
882 |
break;
|
883 |
case INDEX_op_mul_i64:
|
884 |
t0 = *tb_ptr++; |
885 |
t1 = tci_read_ri64(&tb_ptr); |
886 |
t2 = tci_read_ri64(&tb_ptr); |
887 |
tci_write_reg64(t0, t1 * t2); |
888 |
break;
|
889 |
#if TCG_TARGET_HAS_div_i64
|
890 |
case INDEX_op_div_i64:
|
891 |
case INDEX_op_divu_i64:
|
892 |
case INDEX_op_rem_i64:
|
893 |
case INDEX_op_remu_i64:
|
894 |
TODO(); |
895 |
break;
|
896 |
#elif TCG_TARGET_HAS_div2_i64
|
897 |
case INDEX_op_div2_i64:
|
898 |
case INDEX_op_divu2_i64:
|
899 |
TODO(); |
900 |
break;
|
901 |
#endif
|
902 |
case INDEX_op_and_i64:
|
903 |
t0 = *tb_ptr++; |
904 |
t1 = tci_read_ri64(&tb_ptr); |
905 |
t2 = tci_read_ri64(&tb_ptr); |
906 |
tci_write_reg64(t0, t1 & t2); |
907 |
break;
|
908 |
case INDEX_op_or_i64:
|
909 |
t0 = *tb_ptr++; |
910 |
t1 = tci_read_ri64(&tb_ptr); |
911 |
t2 = tci_read_ri64(&tb_ptr); |
912 |
tci_write_reg64(t0, t1 | t2); |
913 |
break;
|
914 |
case INDEX_op_xor_i64:
|
915 |
t0 = *tb_ptr++; |
916 |
t1 = tci_read_ri64(&tb_ptr); |
917 |
t2 = tci_read_ri64(&tb_ptr); |
918 |
tci_write_reg64(t0, t1 ^ t2); |
919 |
break;
|
920 |
|
921 |
/* Shift/rotate operations (64 bit). */
|
922 |
|
923 |
case INDEX_op_shl_i64:
|
924 |
t0 = *tb_ptr++; |
925 |
t1 = tci_read_ri64(&tb_ptr); |
926 |
t2 = tci_read_ri64(&tb_ptr); |
927 |
tci_write_reg64(t0, t1 << t2); |
928 |
break;
|
929 |
case INDEX_op_shr_i64:
|
930 |
t0 = *tb_ptr++; |
931 |
t1 = tci_read_ri64(&tb_ptr); |
932 |
t2 = tci_read_ri64(&tb_ptr); |
933 |
tci_write_reg64(t0, t1 >> t2); |
934 |
break;
|
935 |
case INDEX_op_sar_i64:
|
936 |
t0 = *tb_ptr++; |
937 |
t1 = tci_read_ri64(&tb_ptr); |
938 |
t2 = tci_read_ri64(&tb_ptr); |
939 |
tci_write_reg64(t0, ((int64_t)t1 >> t2)); |
940 |
break;
|
941 |
#if TCG_TARGET_HAS_rot_i64
|
942 |
case INDEX_op_rotl_i64:
|
943 |
case INDEX_op_rotr_i64:
|
944 |
TODO(); |
945 |
break;
|
946 |
#endif
|
947 |
case INDEX_op_brcond_i64:
|
948 |
t0 = tci_read_r64(&tb_ptr); |
949 |
t1 = tci_read_ri64(&tb_ptr); |
950 |
condition = *tb_ptr++; |
951 |
label = tci_read_label(&tb_ptr); |
952 |
if (tci_compare64(t0, t1, condition)) {
|
953 |
assert(tb_ptr == old_code_ptr + op_size); |
954 |
tb_ptr = (uint8_t *)label; |
955 |
continue;
|
956 |
} |
957 |
break;
|
958 |
#if TCG_TARGET_HAS_ext8u_i64
|
959 |
case INDEX_op_ext8u_i64:
|
960 |
t0 = *tb_ptr++; |
961 |
t1 = tci_read_r8(&tb_ptr); |
962 |
tci_write_reg64(t0, t1); |
963 |
break;
|
964 |
#endif
|
965 |
#if TCG_TARGET_HAS_ext8s_i64
|
966 |
case INDEX_op_ext8s_i64:
|
967 |
t0 = *tb_ptr++; |
968 |
t1 = tci_read_r8s(&tb_ptr); |
969 |
tci_write_reg64(t0, t1); |
970 |
break;
|
971 |
#endif
|
972 |
#if TCG_TARGET_HAS_ext16s_i64
|
973 |
case INDEX_op_ext16s_i64:
|
974 |
t0 = *tb_ptr++; |
975 |
t1 = tci_read_r16s(&tb_ptr); |
976 |
tci_write_reg64(t0, t1); |
977 |
break;
|
978 |
#endif
|
979 |
#if TCG_TARGET_HAS_ext16u_i64
|
980 |
case INDEX_op_ext16u_i64:
|
981 |
t0 = *tb_ptr++; |
982 |
t1 = tci_read_r16(&tb_ptr); |
983 |
tci_write_reg64(t0, t1); |
984 |
break;
|
985 |
#endif
|
986 |
#if TCG_TARGET_HAS_ext32s_i64
|
987 |
case INDEX_op_ext32s_i64:
|
988 |
t0 = *tb_ptr++; |
989 |
t1 = tci_read_r32s(&tb_ptr); |
990 |
tci_write_reg64(t0, t1); |
991 |
break;
|
992 |
#endif
|
993 |
#if TCG_TARGET_HAS_ext32u_i64
|
994 |
case INDEX_op_ext32u_i64:
|
995 |
t0 = *tb_ptr++; |
996 |
t1 = tci_read_r32(&tb_ptr); |
997 |
tci_write_reg64(t0, t1); |
998 |
break;
|
999 |
#endif
|
1000 |
#if TCG_TARGET_HAS_bswap16_i64
|
1001 |
case INDEX_op_bswap16_i64:
|
1002 |
TODO(); |
1003 |
t0 = *tb_ptr++; |
1004 |
t1 = tci_read_r16(&tb_ptr); |
1005 |
tci_write_reg64(t0, bswap16(t1)); |
1006 |
break;
|
1007 |
#endif
|
1008 |
#if TCG_TARGET_HAS_bswap32_i64
|
1009 |
case INDEX_op_bswap32_i64:
|
1010 |
t0 = *tb_ptr++; |
1011 |
t1 = tci_read_r32(&tb_ptr); |
1012 |
tci_write_reg64(t0, bswap32(t1)); |
1013 |
break;
|
1014 |
#endif
|
1015 |
#if TCG_TARGET_HAS_bswap64_i64
|
1016 |
case INDEX_op_bswap64_i64:
|
1017 |
TODO(); |
1018 |
t0 = *tb_ptr++; |
1019 |
t1 = tci_read_r64(&tb_ptr); |
1020 |
tci_write_reg64(t0, bswap64(t1)); |
1021 |
break;
|
1022 |
#endif
|
1023 |
#if TCG_TARGET_HAS_not_i64
|
1024 |
case INDEX_op_not_i64:
|
1025 |
t0 = *tb_ptr++; |
1026 |
t1 = tci_read_r64(&tb_ptr); |
1027 |
tci_write_reg64(t0, ~t1); |
1028 |
break;
|
1029 |
#endif
|
1030 |
#if TCG_TARGET_HAS_neg_i64
|
1031 |
case INDEX_op_neg_i64:
|
1032 |
t0 = *tb_ptr++; |
1033 |
t1 = tci_read_r64(&tb_ptr); |
1034 |
tci_write_reg64(t0, -t1); |
1035 |
break;
|
1036 |
#endif
|
1037 |
#endif /* TCG_TARGET_REG_BITS == 64 */ |
1038 |
|
1039 |
/* QEMU specific operations. */
|
1040 |
|
1041 |
#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
|
1042 |
case INDEX_op_debug_insn_start:
|
1043 |
TODO(); |
1044 |
break;
|
1045 |
#else
|
1046 |
case INDEX_op_debug_insn_start:
|
1047 |
TODO(); |
1048 |
break;
|
1049 |
#endif
|
1050 |
case INDEX_op_exit_tb:
|
1051 |
next_tb = *(uint64_t *)tb_ptr; |
1052 |
goto exit;
|
1053 |
break;
|
1054 |
case INDEX_op_goto_tb:
|
1055 |
t0 = tci_read_i32(&tb_ptr); |
1056 |
assert(tb_ptr == old_code_ptr + op_size); |
1057 |
tb_ptr += (int32_t)t0; |
1058 |
continue;
|
1059 |
case INDEX_op_qemu_ld8u:
|
1060 |
t0 = *tb_ptr++; |
1061 |
taddr = tci_read_ulong(&tb_ptr); |
1062 |
#ifdef CONFIG_SOFTMMU
|
1063 |
tmp8 = helper_ldb_mmu(env, taddr, tci_read_i(&tb_ptr)); |
1064 |
#else
|
1065 |
host_addr = (tcg_target_ulong)taddr; |
1066 |
assert(taddr == host_addr); |
1067 |
tmp8 = *(uint8_t *)(host_addr + GUEST_BASE); |
1068 |
#endif
|
1069 |
tci_write_reg8(t0, tmp8); |
1070 |
break;
|
1071 |
case INDEX_op_qemu_ld8s:
|
1072 |
t0 = *tb_ptr++; |
1073 |
taddr = tci_read_ulong(&tb_ptr); |
1074 |
#ifdef CONFIG_SOFTMMU
|
1075 |
tmp8 = helper_ldb_mmu(env, taddr, tci_read_i(&tb_ptr)); |
1076 |
#else
|
1077 |
host_addr = (tcg_target_ulong)taddr; |
1078 |
assert(taddr == host_addr); |
1079 |
tmp8 = *(uint8_t *)(host_addr + GUEST_BASE); |
1080 |
#endif
|
1081 |
tci_write_reg8s(t0, tmp8); |
1082 |
break;
|
1083 |
case INDEX_op_qemu_ld16u:
|
1084 |
t0 = *tb_ptr++; |
1085 |
taddr = tci_read_ulong(&tb_ptr); |
1086 |
#ifdef CONFIG_SOFTMMU
|
1087 |
tmp16 = helper_ldw_mmu(env, taddr, tci_read_i(&tb_ptr)); |
1088 |
#else
|
1089 |
host_addr = (tcg_target_ulong)taddr; |
1090 |
assert(taddr == host_addr); |
1091 |
tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE)); |
1092 |
#endif
|
1093 |
tci_write_reg16(t0, tmp16); |
1094 |
break;
|
1095 |
case INDEX_op_qemu_ld16s:
|
1096 |
t0 = *tb_ptr++; |
1097 |
taddr = tci_read_ulong(&tb_ptr); |
1098 |
#ifdef CONFIG_SOFTMMU
|
1099 |
tmp16 = helper_ldw_mmu(env, taddr, tci_read_i(&tb_ptr)); |
1100 |
#else
|
1101 |
host_addr = (tcg_target_ulong)taddr; |
1102 |
assert(taddr == host_addr); |
1103 |
tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE)); |
1104 |
#endif
|
1105 |
tci_write_reg16s(t0, tmp16); |
1106 |
break;
|
1107 |
#if TCG_TARGET_REG_BITS == 64 |
1108 |
case INDEX_op_qemu_ld32u:
|
1109 |
t0 = *tb_ptr++; |
1110 |
taddr = tci_read_ulong(&tb_ptr); |
1111 |
#ifdef CONFIG_SOFTMMU
|
1112 |
tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr)); |
1113 |
#else
|
1114 |
host_addr = (tcg_target_ulong)taddr; |
1115 |
assert(taddr == host_addr); |
1116 |
tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE)); |
1117 |
#endif
|
1118 |
tci_write_reg32(t0, tmp32); |
1119 |
break;
|
1120 |
case INDEX_op_qemu_ld32s:
|
1121 |
t0 = *tb_ptr++; |
1122 |
taddr = tci_read_ulong(&tb_ptr); |
1123 |
#ifdef CONFIG_SOFTMMU
|
1124 |
tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr)); |
1125 |
#else
|
1126 |
host_addr = (tcg_target_ulong)taddr; |
1127 |
assert(taddr == host_addr); |
1128 |
tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE)); |
1129 |
#endif
|
1130 |
tci_write_reg32s(t0, tmp32); |
1131 |
break;
|
1132 |
#endif /* TCG_TARGET_REG_BITS == 64 */ |
1133 |
case INDEX_op_qemu_ld32:
|
1134 |
t0 = *tb_ptr++; |
1135 |
taddr = tci_read_ulong(&tb_ptr); |
1136 |
#ifdef CONFIG_SOFTMMU
|
1137 |
tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr)); |
1138 |
#else
|
1139 |
host_addr = (tcg_target_ulong)taddr; |
1140 |
assert(taddr == host_addr); |
1141 |
tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE)); |
1142 |
#endif
|
1143 |
tci_write_reg32(t0, tmp32); |
1144 |
break;
|
1145 |
case INDEX_op_qemu_ld64:
|
1146 |
t0 = *tb_ptr++; |
1147 |
#if TCG_TARGET_REG_BITS == 32 |
1148 |
t1 = *tb_ptr++; |
1149 |
#endif
|
1150 |
taddr = tci_read_ulong(&tb_ptr); |
1151 |
#ifdef CONFIG_SOFTMMU
|
1152 |
tmp64 = helper_ldq_mmu(env, taddr, tci_read_i(&tb_ptr)); |
1153 |
#else
|
1154 |
host_addr = (tcg_target_ulong)taddr; |
1155 |
assert(taddr == host_addr); |
1156 |
tmp64 = tswap64(*(uint64_t *)(host_addr + GUEST_BASE)); |
1157 |
#endif
|
1158 |
tci_write_reg(t0, tmp64); |
1159 |
#if TCG_TARGET_REG_BITS == 32 |
1160 |
tci_write_reg(t1, tmp64 >> 32);
|
1161 |
#endif
|
1162 |
break;
|
1163 |
case INDEX_op_qemu_st8:
|
1164 |
t0 = tci_read_r8(&tb_ptr); |
1165 |
taddr = tci_read_ulong(&tb_ptr); |
1166 |
#ifdef CONFIG_SOFTMMU
|
1167 |
t2 = tci_read_i(&tb_ptr); |
1168 |
helper_stb_mmu(env, taddr, t0, t2); |
1169 |
#else
|
1170 |
host_addr = (tcg_target_ulong)taddr; |
1171 |
assert(taddr == host_addr); |
1172 |
*(uint8_t *)(host_addr + GUEST_BASE) = t0; |
1173 |
#endif
|
1174 |
break;
|
1175 |
case INDEX_op_qemu_st16:
|
1176 |
t0 = tci_read_r16(&tb_ptr); |
1177 |
taddr = tci_read_ulong(&tb_ptr); |
1178 |
#ifdef CONFIG_SOFTMMU
|
1179 |
t2 = tci_read_i(&tb_ptr); |
1180 |
helper_stw_mmu(env, taddr, t0, t2); |
1181 |
#else
|
1182 |
host_addr = (tcg_target_ulong)taddr; |
1183 |
assert(taddr == host_addr); |
1184 |
*(uint16_t *)(host_addr + GUEST_BASE) = tswap16(t0); |
1185 |
#endif
|
1186 |
break;
|
1187 |
case INDEX_op_qemu_st32:
|
1188 |
t0 = tci_read_r32(&tb_ptr); |
1189 |
taddr = tci_read_ulong(&tb_ptr); |
1190 |
#ifdef CONFIG_SOFTMMU
|
1191 |
t2 = tci_read_i(&tb_ptr); |
1192 |
helper_stl_mmu(env, taddr, t0, t2); |
1193 |
#else
|
1194 |
host_addr = (tcg_target_ulong)taddr; |
1195 |
assert(taddr == host_addr); |
1196 |
*(uint32_t *)(host_addr + GUEST_BASE) = tswap32(t0); |
1197 |
#endif
|
1198 |
break;
|
1199 |
case INDEX_op_qemu_st64:
|
1200 |
tmp64 = tci_read_r64(&tb_ptr); |
1201 |
taddr = tci_read_ulong(&tb_ptr); |
1202 |
#ifdef CONFIG_SOFTMMU
|
1203 |
t2 = tci_read_i(&tb_ptr); |
1204 |
helper_stq_mmu(env, taddr, tmp64, t2); |
1205 |
#else
|
1206 |
host_addr = (tcg_target_ulong)taddr; |
1207 |
assert(taddr == host_addr); |
1208 |
*(uint64_t *)(host_addr + GUEST_BASE) = tswap64(tmp64); |
1209 |
#endif
|
1210 |
break;
|
1211 |
default:
|
1212 |
TODO(); |
1213 |
break;
|
1214 |
} |
1215 |
assert(tb_ptr == old_code_ptr + op_size); |
1216 |
} |
1217 |
exit:
|
1218 |
return next_tb;
|
1219 |
} |