root / tci.c @ feature-archipelago
History | View | Annotate | Download (35.6 kB)
1 |
/*
|
---|---|
2 |
* Tiny Code Interpreter for QEMU
|
3 |
*
|
4 |
* Copyright (c) 2009, 2011 Stefan Weil
|
5 |
*
|
6 |
* This program is free software: you can redistribute it and/or modify
|
7 |
* it under the terms of the GNU General Public License as published by
|
8 |
* the Free Software Foundation, either version 2 of the License, or
|
9 |
* (at your option) any later version.
|
10 |
*
|
11 |
* This program is distributed in the hope that it will be useful,
|
12 |
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
13 |
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
14 |
* GNU General Public License for more details.
|
15 |
*
|
16 |
* You should have received a copy of the GNU General Public License
|
17 |
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
18 |
*/
|
19 |
|
20 |
#include "config.h" |
21 |
|
22 |
/* Defining NDEBUG disables assertions (which makes the code faster). */
|
23 |
#if !defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
|
24 |
# define NDEBUG
|
25 |
#endif
|
26 |
|
27 |
#include "qemu-common.h" |
28 |
#include "exec/exec-all.h" /* MAX_OPC_PARAM_IARGS */ |
29 |
#include "tcg-op.h" |
30 |
|
31 |
/* Marker for missing code. */
|
32 |
#define TODO() \
|
33 |
do { \
|
34 |
fprintf(stderr, "TODO %s:%u: %s()\n", \
|
35 |
__FILE__, __LINE__, __func__); \ |
36 |
tcg_abort(); \ |
37 |
} while (0) |
38 |
|
39 |
#if MAX_OPC_PARAM_IARGS != 5 |
40 |
# error Fix needed, number of supported input arguments changed!
|
41 |
#endif
|
42 |
#if TCG_TARGET_REG_BITS == 32 |
43 |
typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
|
44 |
tcg_target_ulong, tcg_target_ulong, |
45 |
tcg_target_ulong, tcg_target_ulong, |
46 |
tcg_target_ulong, tcg_target_ulong, |
47 |
tcg_target_ulong, tcg_target_ulong); |
48 |
#else
|
49 |
typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
|
50 |
tcg_target_ulong, tcg_target_ulong, |
51 |
tcg_target_ulong); |
52 |
#endif
|
53 |
|
54 |
/* Targets which don't use GETPC also don't need tci_tb_ptr
|
55 |
which makes them a little faster. */
|
56 |
#if defined(GETPC)
|
57 |
uintptr_t tci_tb_ptr; |
58 |
#endif
|
59 |
|
60 |
static tcg_target_ulong tci_reg[TCG_TARGET_NB_REGS];
|
61 |
|
62 |
static tcg_target_ulong tci_read_reg(TCGReg index)
|
63 |
{ |
64 |
assert(index < ARRAY_SIZE(tci_reg)); |
65 |
return tci_reg[index];
|
66 |
} |
67 |
|
68 |
#if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
|
69 |
static int8_t tci_read_reg8s(TCGReg index)
|
70 |
{ |
71 |
return (int8_t)tci_read_reg(index);
|
72 |
} |
73 |
#endif
|
74 |
|
75 |
#if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
|
76 |
static int16_t tci_read_reg16s(TCGReg index)
|
77 |
{ |
78 |
return (int16_t)tci_read_reg(index);
|
79 |
} |
80 |
#endif
|
81 |
|
82 |
#if TCG_TARGET_REG_BITS == 64 |
83 |
static int32_t tci_read_reg32s(TCGReg index)
|
84 |
{ |
85 |
return (int32_t)tci_read_reg(index);
|
86 |
} |
87 |
#endif
|
88 |
|
89 |
static uint8_t tci_read_reg8(TCGReg index)
|
90 |
{ |
91 |
return (uint8_t)tci_read_reg(index);
|
92 |
} |
93 |
|
94 |
static uint16_t tci_read_reg16(TCGReg index)
|
95 |
{ |
96 |
return (uint16_t)tci_read_reg(index);
|
97 |
} |
98 |
|
99 |
static uint32_t tci_read_reg32(TCGReg index)
|
100 |
{ |
101 |
return (uint32_t)tci_read_reg(index);
|
102 |
} |
103 |
|
104 |
#if TCG_TARGET_REG_BITS == 64 |
105 |
static uint64_t tci_read_reg64(TCGReg index)
|
106 |
{ |
107 |
return tci_read_reg(index);
|
108 |
} |
109 |
#endif
|
110 |
|
111 |
static void tci_write_reg(TCGReg index, tcg_target_ulong value) |
112 |
{ |
113 |
assert(index < ARRAY_SIZE(tci_reg)); |
114 |
assert(index != TCG_AREG0); |
115 |
assert(index != TCG_REG_CALL_STACK); |
116 |
tci_reg[index] = value; |
117 |
} |
118 |
|
119 |
static void tci_write_reg8s(TCGReg index, int8_t value) |
120 |
{ |
121 |
tci_write_reg(index, value); |
122 |
} |
123 |
|
124 |
static void tci_write_reg16s(TCGReg index, int16_t value) |
125 |
{ |
126 |
tci_write_reg(index, value); |
127 |
} |
128 |
|
129 |
#if TCG_TARGET_REG_BITS == 64 |
130 |
static void tci_write_reg32s(TCGReg index, int32_t value) |
131 |
{ |
132 |
tci_write_reg(index, value); |
133 |
} |
134 |
#endif
|
135 |
|
136 |
static void tci_write_reg8(TCGReg index, uint8_t value) |
137 |
{ |
138 |
tci_write_reg(index, value); |
139 |
} |
140 |
|
141 |
static void tci_write_reg16(TCGReg index, uint16_t value) |
142 |
{ |
143 |
tci_write_reg(index, value); |
144 |
} |
145 |
|
146 |
static void tci_write_reg32(TCGReg index, uint32_t value) |
147 |
{ |
148 |
tci_write_reg(index, value); |
149 |
} |
150 |
|
151 |
#if TCG_TARGET_REG_BITS == 32 |
152 |
static void tci_write_reg64(uint32_t high_index, uint32_t low_index, |
153 |
uint64_t value) |
154 |
{ |
155 |
tci_write_reg(low_index, value); |
156 |
tci_write_reg(high_index, value >> 32);
|
157 |
} |
158 |
#elif TCG_TARGET_REG_BITS == 64 |
159 |
static void tci_write_reg64(TCGReg index, uint64_t value) |
160 |
{ |
161 |
tci_write_reg(index, value); |
162 |
} |
163 |
#endif
|
164 |
|
165 |
#if TCG_TARGET_REG_BITS == 32 |
166 |
/* Create a 64 bit value from two 32 bit values. */
|
167 |
static uint64_t tci_uint64(uint32_t high, uint32_t low)
|
168 |
{ |
169 |
return ((uint64_t)high << 32) + low; |
170 |
} |
171 |
#endif
|
172 |
|
173 |
/* Read constant (native size) from bytecode. */
|
174 |
static tcg_target_ulong tci_read_i(uint8_t **tb_ptr)
|
175 |
{ |
176 |
tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr); |
177 |
*tb_ptr += sizeof(value);
|
178 |
return value;
|
179 |
} |
180 |
|
181 |
/* Read unsigned constant (32 bit) from bytecode. */
|
182 |
static uint32_t tci_read_i32(uint8_t **tb_ptr)
|
183 |
{ |
184 |
uint32_t value = *(uint32_t *)(*tb_ptr); |
185 |
*tb_ptr += sizeof(value);
|
186 |
return value;
|
187 |
} |
188 |
|
189 |
/* Read signed constant (32 bit) from bytecode. */
|
190 |
static int32_t tci_read_s32(uint8_t **tb_ptr)
|
191 |
{ |
192 |
int32_t value = *(int32_t *)(*tb_ptr); |
193 |
*tb_ptr += sizeof(value);
|
194 |
return value;
|
195 |
} |
196 |
|
197 |
#if TCG_TARGET_REG_BITS == 64 |
198 |
/* Read constant (64 bit) from bytecode. */
|
199 |
static uint64_t tci_read_i64(uint8_t **tb_ptr)
|
200 |
{ |
201 |
uint64_t value = *(uint64_t *)(*tb_ptr); |
202 |
*tb_ptr += sizeof(value);
|
203 |
return value;
|
204 |
} |
205 |
#endif
|
206 |
|
207 |
/* Read indexed register (native size) from bytecode. */
|
208 |
static tcg_target_ulong tci_read_r(uint8_t **tb_ptr)
|
209 |
{ |
210 |
tcg_target_ulong value = tci_read_reg(**tb_ptr); |
211 |
*tb_ptr += 1;
|
212 |
return value;
|
213 |
} |
214 |
|
215 |
/* Read indexed register (8 bit) from bytecode. */
|
216 |
static uint8_t tci_read_r8(uint8_t **tb_ptr)
|
217 |
{ |
218 |
uint8_t value = tci_read_reg8(**tb_ptr); |
219 |
*tb_ptr += 1;
|
220 |
return value;
|
221 |
} |
222 |
|
223 |
#if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
|
224 |
/* Read indexed register (8 bit signed) from bytecode. */
|
225 |
static int8_t tci_read_r8s(uint8_t **tb_ptr)
|
226 |
{ |
227 |
int8_t value = tci_read_reg8s(**tb_ptr); |
228 |
*tb_ptr += 1;
|
229 |
return value;
|
230 |
} |
231 |
#endif
|
232 |
|
233 |
/* Read indexed register (16 bit) from bytecode. */
|
234 |
static uint16_t tci_read_r16(uint8_t **tb_ptr)
|
235 |
{ |
236 |
uint16_t value = tci_read_reg16(**tb_ptr); |
237 |
*tb_ptr += 1;
|
238 |
return value;
|
239 |
} |
240 |
|
241 |
#if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
|
242 |
/* Read indexed register (16 bit signed) from bytecode. */
|
243 |
static int16_t tci_read_r16s(uint8_t **tb_ptr)
|
244 |
{ |
245 |
int16_t value = tci_read_reg16s(**tb_ptr); |
246 |
*tb_ptr += 1;
|
247 |
return value;
|
248 |
} |
249 |
#endif
|
250 |
|
251 |
/* Read indexed register (32 bit) from bytecode. */
|
252 |
static uint32_t tci_read_r32(uint8_t **tb_ptr)
|
253 |
{ |
254 |
uint32_t value = tci_read_reg32(**tb_ptr); |
255 |
*tb_ptr += 1;
|
256 |
return value;
|
257 |
} |
258 |
|
259 |
#if TCG_TARGET_REG_BITS == 32 |
260 |
/* Read two indexed registers (2 * 32 bit) from bytecode. */
|
261 |
static uint64_t tci_read_r64(uint8_t **tb_ptr)
|
262 |
{ |
263 |
uint32_t low = tci_read_r32(tb_ptr); |
264 |
return tci_uint64(tci_read_r32(tb_ptr), low);
|
265 |
} |
266 |
#elif TCG_TARGET_REG_BITS == 64 |
267 |
/* Read indexed register (32 bit signed) from bytecode. */
|
268 |
static int32_t tci_read_r32s(uint8_t **tb_ptr)
|
269 |
{ |
270 |
int32_t value = tci_read_reg32s(**tb_ptr); |
271 |
*tb_ptr += 1;
|
272 |
return value;
|
273 |
} |
274 |
|
275 |
/* Read indexed register (64 bit) from bytecode. */
|
276 |
static uint64_t tci_read_r64(uint8_t **tb_ptr)
|
277 |
{ |
278 |
uint64_t value = tci_read_reg64(**tb_ptr); |
279 |
*tb_ptr += 1;
|
280 |
return value;
|
281 |
} |
282 |
#endif
|
283 |
|
284 |
/* Read indexed register(s) with target address from bytecode. */
|
285 |
static target_ulong tci_read_ulong(uint8_t **tb_ptr)
|
286 |
{ |
287 |
target_ulong taddr = tci_read_r(tb_ptr); |
288 |
#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
|
289 |
taddr += (uint64_t)tci_read_r(tb_ptr) << 32;
|
290 |
#endif
|
291 |
return taddr;
|
292 |
} |
293 |
|
294 |
/* Read indexed register or constant (native size) from bytecode. */
|
295 |
static tcg_target_ulong tci_read_ri(uint8_t **tb_ptr)
|
296 |
{ |
297 |
tcg_target_ulong value; |
298 |
TCGReg r = **tb_ptr; |
299 |
*tb_ptr += 1;
|
300 |
if (r == TCG_CONST) {
|
301 |
value = tci_read_i(tb_ptr); |
302 |
} else {
|
303 |
value = tci_read_reg(r); |
304 |
} |
305 |
return value;
|
306 |
} |
307 |
|
308 |
/* Read indexed register or constant (32 bit) from bytecode. */
|
309 |
static uint32_t tci_read_ri32(uint8_t **tb_ptr)
|
310 |
{ |
311 |
uint32_t value; |
312 |
TCGReg r = **tb_ptr; |
313 |
*tb_ptr += 1;
|
314 |
if (r == TCG_CONST) {
|
315 |
value = tci_read_i32(tb_ptr); |
316 |
} else {
|
317 |
value = tci_read_reg32(r); |
318 |
} |
319 |
return value;
|
320 |
} |
321 |
|
322 |
#if TCG_TARGET_REG_BITS == 32 |
323 |
/* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
|
324 |
static uint64_t tci_read_ri64(uint8_t **tb_ptr)
|
325 |
{ |
326 |
uint32_t low = tci_read_ri32(tb_ptr); |
327 |
return tci_uint64(tci_read_ri32(tb_ptr), low);
|
328 |
} |
329 |
#elif TCG_TARGET_REG_BITS == 64 |
330 |
/* Read indexed register or constant (64 bit) from bytecode. */
|
331 |
static uint64_t tci_read_ri64(uint8_t **tb_ptr)
|
332 |
{ |
333 |
uint64_t value; |
334 |
TCGReg r = **tb_ptr; |
335 |
*tb_ptr += 1;
|
336 |
if (r == TCG_CONST) {
|
337 |
value = tci_read_i64(tb_ptr); |
338 |
} else {
|
339 |
value = tci_read_reg64(r); |
340 |
} |
341 |
return value;
|
342 |
} |
343 |
#endif
|
344 |
|
345 |
static tcg_target_ulong tci_read_label(uint8_t **tb_ptr)
|
346 |
{ |
347 |
tcg_target_ulong label = tci_read_i(tb_ptr); |
348 |
assert(label != 0);
|
349 |
return label;
|
350 |
} |
351 |
|
352 |
static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition) |
353 |
{ |
354 |
bool result = false; |
355 |
int32_t i0 = u0; |
356 |
int32_t i1 = u1; |
357 |
switch (condition) {
|
358 |
case TCG_COND_EQ:
|
359 |
result = (u0 == u1); |
360 |
break;
|
361 |
case TCG_COND_NE:
|
362 |
result = (u0 != u1); |
363 |
break;
|
364 |
case TCG_COND_LT:
|
365 |
result = (i0 < i1); |
366 |
break;
|
367 |
case TCG_COND_GE:
|
368 |
result = (i0 >= i1); |
369 |
break;
|
370 |
case TCG_COND_LE:
|
371 |
result = (i0 <= i1); |
372 |
break;
|
373 |
case TCG_COND_GT:
|
374 |
result = (i0 > i1); |
375 |
break;
|
376 |
case TCG_COND_LTU:
|
377 |
result = (u0 < u1); |
378 |
break;
|
379 |
case TCG_COND_GEU:
|
380 |
result = (u0 >= u1); |
381 |
break;
|
382 |
case TCG_COND_LEU:
|
383 |
result = (u0 <= u1); |
384 |
break;
|
385 |
case TCG_COND_GTU:
|
386 |
result = (u0 > u1); |
387 |
break;
|
388 |
default:
|
389 |
TODO(); |
390 |
} |
391 |
return result;
|
392 |
} |
393 |
|
394 |
static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition) |
395 |
{ |
396 |
bool result = false; |
397 |
int64_t i0 = u0; |
398 |
int64_t i1 = u1; |
399 |
switch (condition) {
|
400 |
case TCG_COND_EQ:
|
401 |
result = (u0 == u1); |
402 |
break;
|
403 |
case TCG_COND_NE:
|
404 |
result = (u0 != u1); |
405 |
break;
|
406 |
case TCG_COND_LT:
|
407 |
result = (i0 < i1); |
408 |
break;
|
409 |
case TCG_COND_GE:
|
410 |
result = (i0 >= i1); |
411 |
break;
|
412 |
case TCG_COND_LE:
|
413 |
result = (i0 <= i1); |
414 |
break;
|
415 |
case TCG_COND_GT:
|
416 |
result = (i0 > i1); |
417 |
break;
|
418 |
case TCG_COND_LTU:
|
419 |
result = (u0 < u1); |
420 |
break;
|
421 |
case TCG_COND_GEU:
|
422 |
result = (u0 >= u1); |
423 |
break;
|
424 |
case TCG_COND_LEU:
|
425 |
result = (u0 <= u1); |
426 |
break;
|
427 |
case TCG_COND_GTU:
|
428 |
result = (u0 > u1); |
429 |
break;
|
430 |
default:
|
431 |
TODO(); |
432 |
} |
433 |
return result;
|
434 |
} |
435 |
|
436 |
/* Interpret pseudo code in tb. */
|
437 |
uintptr_t tcg_qemu_tb_exec(CPUArchState *env, uint8_t *tb_ptr) |
438 |
{ |
439 |
long tcg_temps[CPU_TEMP_BUF_NLONGS];
|
440 |
uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS); |
441 |
uintptr_t next_tb = 0;
|
442 |
|
443 |
tci_reg[TCG_AREG0] = (tcg_target_ulong)env; |
444 |
tci_reg[TCG_REG_CALL_STACK] = sp_value; |
445 |
assert(tb_ptr); |
446 |
|
447 |
for (;;) {
|
448 |
TCGOpcode opc = tb_ptr[0];
|
449 |
#if !defined(NDEBUG)
|
450 |
uint8_t op_size = tb_ptr[1];
|
451 |
uint8_t *old_code_ptr = tb_ptr; |
452 |
#endif
|
453 |
tcg_target_ulong t0; |
454 |
tcg_target_ulong t1; |
455 |
tcg_target_ulong t2; |
456 |
tcg_target_ulong label; |
457 |
TCGCond condition; |
458 |
target_ulong taddr; |
459 |
#ifndef CONFIG_SOFTMMU
|
460 |
tcg_target_ulong host_addr; |
461 |
#endif
|
462 |
uint8_t tmp8; |
463 |
uint16_t tmp16; |
464 |
uint32_t tmp32; |
465 |
uint64_t tmp64; |
466 |
#if TCG_TARGET_REG_BITS == 32 |
467 |
uint64_t v64; |
468 |
#endif
|
469 |
|
470 |
#if defined(GETPC)
|
471 |
tci_tb_ptr = (uintptr_t)tb_ptr; |
472 |
#endif
|
473 |
|
474 |
/* Skip opcode and size entry. */
|
475 |
tb_ptr += 2;
|
476 |
|
477 |
switch (opc) {
|
478 |
case INDEX_op_end:
|
479 |
case INDEX_op_nop:
|
480 |
break;
|
481 |
case INDEX_op_nop1:
|
482 |
case INDEX_op_nop2:
|
483 |
case INDEX_op_nop3:
|
484 |
case INDEX_op_nopn:
|
485 |
case INDEX_op_discard:
|
486 |
TODO(); |
487 |
break;
|
488 |
case INDEX_op_set_label:
|
489 |
TODO(); |
490 |
break;
|
491 |
case INDEX_op_call:
|
492 |
t0 = tci_read_ri(&tb_ptr); |
493 |
#if TCG_TARGET_REG_BITS == 32 |
494 |
tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0), |
495 |
tci_read_reg(TCG_REG_R1), |
496 |
tci_read_reg(TCG_REG_R2), |
497 |
tci_read_reg(TCG_REG_R3), |
498 |
tci_read_reg(TCG_REG_R5), |
499 |
tci_read_reg(TCG_REG_R6), |
500 |
tci_read_reg(TCG_REG_R7), |
501 |
tci_read_reg(TCG_REG_R8), |
502 |
tci_read_reg(TCG_REG_R9), |
503 |
tci_read_reg(TCG_REG_R10)); |
504 |
tci_write_reg(TCG_REG_R0, tmp64); |
505 |
tci_write_reg(TCG_REG_R1, tmp64 >> 32);
|
506 |
#else
|
507 |
tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0), |
508 |
tci_read_reg(TCG_REG_R1), |
509 |
tci_read_reg(TCG_REG_R2), |
510 |
tci_read_reg(TCG_REG_R3), |
511 |
tci_read_reg(TCG_REG_R5)); |
512 |
tci_write_reg(TCG_REG_R0, tmp64); |
513 |
#endif
|
514 |
break;
|
515 |
case INDEX_op_br:
|
516 |
label = tci_read_label(&tb_ptr); |
517 |
assert(tb_ptr == old_code_ptr + op_size); |
518 |
tb_ptr = (uint8_t *)label; |
519 |
continue;
|
520 |
case INDEX_op_setcond_i32:
|
521 |
t0 = *tb_ptr++; |
522 |
t1 = tci_read_r32(&tb_ptr); |
523 |
t2 = tci_read_ri32(&tb_ptr); |
524 |
condition = *tb_ptr++; |
525 |
tci_write_reg32(t0, tci_compare32(t1, t2, condition)); |
526 |
break;
|
527 |
#if TCG_TARGET_REG_BITS == 32 |
528 |
case INDEX_op_setcond2_i32:
|
529 |
t0 = *tb_ptr++; |
530 |
tmp64 = tci_read_r64(&tb_ptr); |
531 |
v64 = tci_read_ri64(&tb_ptr); |
532 |
condition = *tb_ptr++; |
533 |
tci_write_reg32(t0, tci_compare64(tmp64, v64, condition)); |
534 |
break;
|
535 |
#elif TCG_TARGET_REG_BITS == 64 |
536 |
case INDEX_op_setcond_i64:
|
537 |
t0 = *tb_ptr++; |
538 |
t1 = tci_read_r64(&tb_ptr); |
539 |
t2 = tci_read_ri64(&tb_ptr); |
540 |
condition = *tb_ptr++; |
541 |
tci_write_reg64(t0, tci_compare64(t1, t2, condition)); |
542 |
break;
|
543 |
#endif
|
544 |
case INDEX_op_mov_i32:
|
545 |
t0 = *tb_ptr++; |
546 |
t1 = tci_read_r32(&tb_ptr); |
547 |
tci_write_reg32(t0, t1); |
548 |
break;
|
549 |
case INDEX_op_movi_i32:
|
550 |
t0 = *tb_ptr++; |
551 |
t1 = tci_read_i32(&tb_ptr); |
552 |
tci_write_reg32(t0, t1); |
553 |
break;
|
554 |
|
555 |
/* Load/store operations (32 bit). */
|
556 |
|
557 |
case INDEX_op_ld8u_i32:
|
558 |
t0 = *tb_ptr++; |
559 |
t1 = tci_read_r(&tb_ptr); |
560 |
t2 = tci_read_s32(&tb_ptr); |
561 |
tci_write_reg8(t0, *(uint8_t *)(t1 + t2)); |
562 |
break;
|
563 |
case INDEX_op_ld8s_i32:
|
564 |
case INDEX_op_ld16u_i32:
|
565 |
TODO(); |
566 |
break;
|
567 |
case INDEX_op_ld16s_i32:
|
568 |
TODO(); |
569 |
break;
|
570 |
case INDEX_op_ld_i32:
|
571 |
t0 = *tb_ptr++; |
572 |
t1 = tci_read_r(&tb_ptr); |
573 |
t2 = tci_read_s32(&tb_ptr); |
574 |
tci_write_reg32(t0, *(uint32_t *)(t1 + t2)); |
575 |
break;
|
576 |
case INDEX_op_st8_i32:
|
577 |
t0 = tci_read_r8(&tb_ptr); |
578 |
t1 = tci_read_r(&tb_ptr); |
579 |
t2 = tci_read_s32(&tb_ptr); |
580 |
*(uint8_t *)(t1 + t2) = t0; |
581 |
break;
|
582 |
case INDEX_op_st16_i32:
|
583 |
t0 = tci_read_r16(&tb_ptr); |
584 |
t1 = tci_read_r(&tb_ptr); |
585 |
t2 = tci_read_s32(&tb_ptr); |
586 |
*(uint16_t *)(t1 + t2) = t0; |
587 |
break;
|
588 |
case INDEX_op_st_i32:
|
589 |
t0 = tci_read_r32(&tb_ptr); |
590 |
t1 = tci_read_r(&tb_ptr); |
591 |
t2 = tci_read_s32(&tb_ptr); |
592 |
assert(t1 != sp_value || (int32_t)t2 < 0);
|
593 |
*(uint32_t *)(t1 + t2) = t0; |
594 |
break;
|
595 |
|
596 |
/* Arithmetic operations (32 bit). */
|
597 |
|
598 |
case INDEX_op_add_i32:
|
599 |
t0 = *tb_ptr++; |
600 |
t1 = tci_read_ri32(&tb_ptr); |
601 |
t2 = tci_read_ri32(&tb_ptr); |
602 |
tci_write_reg32(t0, t1 + t2); |
603 |
break;
|
604 |
case INDEX_op_sub_i32:
|
605 |
t0 = *tb_ptr++; |
606 |
t1 = tci_read_ri32(&tb_ptr); |
607 |
t2 = tci_read_ri32(&tb_ptr); |
608 |
tci_write_reg32(t0, t1 - t2); |
609 |
break;
|
610 |
case INDEX_op_mul_i32:
|
611 |
t0 = *tb_ptr++; |
612 |
t1 = tci_read_ri32(&tb_ptr); |
613 |
t2 = tci_read_ri32(&tb_ptr); |
614 |
tci_write_reg32(t0, t1 * t2); |
615 |
break;
|
616 |
#if TCG_TARGET_HAS_div_i32
|
617 |
case INDEX_op_div_i32:
|
618 |
t0 = *tb_ptr++; |
619 |
t1 = tci_read_ri32(&tb_ptr); |
620 |
t2 = tci_read_ri32(&tb_ptr); |
621 |
tci_write_reg32(t0, (int32_t)t1 / (int32_t)t2); |
622 |
break;
|
623 |
case INDEX_op_divu_i32:
|
624 |
t0 = *tb_ptr++; |
625 |
t1 = tci_read_ri32(&tb_ptr); |
626 |
t2 = tci_read_ri32(&tb_ptr); |
627 |
tci_write_reg32(t0, t1 / t2); |
628 |
break;
|
629 |
case INDEX_op_rem_i32:
|
630 |
t0 = *tb_ptr++; |
631 |
t1 = tci_read_ri32(&tb_ptr); |
632 |
t2 = tci_read_ri32(&tb_ptr); |
633 |
tci_write_reg32(t0, (int32_t)t1 % (int32_t)t2); |
634 |
break;
|
635 |
case INDEX_op_remu_i32:
|
636 |
t0 = *tb_ptr++; |
637 |
t1 = tci_read_ri32(&tb_ptr); |
638 |
t2 = tci_read_ri32(&tb_ptr); |
639 |
tci_write_reg32(t0, t1 % t2); |
640 |
break;
|
641 |
#elif TCG_TARGET_HAS_div2_i32
|
642 |
case INDEX_op_div2_i32:
|
643 |
case INDEX_op_divu2_i32:
|
644 |
TODO(); |
645 |
break;
|
646 |
#endif
|
647 |
case INDEX_op_and_i32:
|
648 |
t0 = *tb_ptr++; |
649 |
t1 = tci_read_ri32(&tb_ptr); |
650 |
t2 = tci_read_ri32(&tb_ptr); |
651 |
tci_write_reg32(t0, t1 & t2); |
652 |
break;
|
653 |
case INDEX_op_or_i32:
|
654 |
t0 = *tb_ptr++; |
655 |
t1 = tci_read_ri32(&tb_ptr); |
656 |
t2 = tci_read_ri32(&tb_ptr); |
657 |
tci_write_reg32(t0, t1 | t2); |
658 |
break;
|
659 |
case INDEX_op_xor_i32:
|
660 |
t0 = *tb_ptr++; |
661 |
t1 = tci_read_ri32(&tb_ptr); |
662 |
t2 = tci_read_ri32(&tb_ptr); |
663 |
tci_write_reg32(t0, t1 ^ t2); |
664 |
break;
|
665 |
|
666 |
/* Shift/rotate operations (32 bit). */
|
667 |
|
668 |
case INDEX_op_shl_i32:
|
669 |
t0 = *tb_ptr++; |
670 |
t1 = tci_read_ri32(&tb_ptr); |
671 |
t2 = tci_read_ri32(&tb_ptr); |
672 |
tci_write_reg32(t0, t1 << t2); |
673 |
break;
|
674 |
case INDEX_op_shr_i32:
|
675 |
t0 = *tb_ptr++; |
676 |
t1 = tci_read_ri32(&tb_ptr); |
677 |
t2 = tci_read_ri32(&tb_ptr); |
678 |
tci_write_reg32(t0, t1 >> t2); |
679 |
break;
|
680 |
case INDEX_op_sar_i32:
|
681 |
t0 = *tb_ptr++; |
682 |
t1 = tci_read_ri32(&tb_ptr); |
683 |
t2 = tci_read_ri32(&tb_ptr); |
684 |
tci_write_reg32(t0, ((int32_t)t1 >> t2)); |
685 |
break;
|
686 |
#if TCG_TARGET_HAS_rot_i32
|
687 |
case INDEX_op_rotl_i32:
|
688 |
t0 = *tb_ptr++; |
689 |
t1 = tci_read_ri32(&tb_ptr); |
690 |
t2 = tci_read_ri32(&tb_ptr); |
691 |
tci_write_reg32(t0, rol32(t1, t2)); |
692 |
break;
|
693 |
case INDEX_op_rotr_i32:
|
694 |
t0 = *tb_ptr++; |
695 |
t1 = tci_read_ri32(&tb_ptr); |
696 |
t2 = tci_read_ri32(&tb_ptr); |
697 |
tci_write_reg32(t0, ror32(t1, t2)); |
698 |
break;
|
699 |
#endif
|
700 |
#if TCG_TARGET_HAS_deposit_i32
|
701 |
case INDEX_op_deposit_i32:
|
702 |
t0 = *tb_ptr++; |
703 |
t1 = tci_read_r32(&tb_ptr); |
704 |
t2 = tci_read_r32(&tb_ptr); |
705 |
tmp16 = *tb_ptr++; |
706 |
tmp8 = *tb_ptr++; |
707 |
tmp32 = (((1 << tmp8) - 1) << tmp16); |
708 |
tci_write_reg32(t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32)); |
709 |
break;
|
710 |
#endif
|
711 |
case INDEX_op_brcond_i32:
|
712 |
t0 = tci_read_r32(&tb_ptr); |
713 |
t1 = tci_read_ri32(&tb_ptr); |
714 |
condition = *tb_ptr++; |
715 |
label = tci_read_label(&tb_ptr); |
716 |
if (tci_compare32(t0, t1, condition)) {
|
717 |
assert(tb_ptr == old_code_ptr + op_size); |
718 |
tb_ptr = (uint8_t *)label; |
719 |
continue;
|
720 |
} |
721 |
break;
|
722 |
#if TCG_TARGET_REG_BITS == 32 |
723 |
case INDEX_op_add2_i32:
|
724 |
t0 = *tb_ptr++; |
725 |
t1 = *tb_ptr++; |
726 |
tmp64 = tci_read_r64(&tb_ptr); |
727 |
tmp64 += tci_read_r64(&tb_ptr); |
728 |
tci_write_reg64(t1, t0, tmp64); |
729 |
break;
|
730 |
case INDEX_op_sub2_i32:
|
731 |
t0 = *tb_ptr++; |
732 |
t1 = *tb_ptr++; |
733 |
tmp64 = tci_read_r64(&tb_ptr); |
734 |
tmp64 -= tci_read_r64(&tb_ptr); |
735 |
tci_write_reg64(t1, t0, tmp64); |
736 |
break;
|
737 |
case INDEX_op_brcond2_i32:
|
738 |
tmp64 = tci_read_r64(&tb_ptr); |
739 |
v64 = tci_read_ri64(&tb_ptr); |
740 |
condition = *tb_ptr++; |
741 |
label = tci_read_label(&tb_ptr); |
742 |
if (tci_compare64(tmp64, v64, condition)) {
|
743 |
assert(tb_ptr == old_code_ptr + op_size); |
744 |
tb_ptr = (uint8_t *)label; |
745 |
continue;
|
746 |
} |
747 |
break;
|
748 |
case INDEX_op_mulu2_i32:
|
749 |
t0 = *tb_ptr++; |
750 |
t1 = *tb_ptr++; |
751 |
t2 = tci_read_r32(&tb_ptr); |
752 |
tmp64 = tci_read_r32(&tb_ptr); |
753 |
tci_write_reg64(t1, t0, t2 * tmp64); |
754 |
break;
|
755 |
#endif /* TCG_TARGET_REG_BITS == 32 */ |
756 |
#if TCG_TARGET_HAS_ext8s_i32
|
757 |
case INDEX_op_ext8s_i32:
|
758 |
t0 = *tb_ptr++; |
759 |
t1 = tci_read_r8s(&tb_ptr); |
760 |
tci_write_reg32(t0, t1); |
761 |
break;
|
762 |
#endif
|
763 |
#if TCG_TARGET_HAS_ext16s_i32
|
764 |
case INDEX_op_ext16s_i32:
|
765 |
t0 = *tb_ptr++; |
766 |
t1 = tci_read_r16s(&tb_ptr); |
767 |
tci_write_reg32(t0, t1); |
768 |
break;
|
769 |
#endif
|
770 |
#if TCG_TARGET_HAS_ext8u_i32
|
771 |
case INDEX_op_ext8u_i32:
|
772 |
t0 = *tb_ptr++; |
773 |
t1 = tci_read_r8(&tb_ptr); |
774 |
tci_write_reg32(t0, t1); |
775 |
break;
|
776 |
#endif
|
777 |
#if TCG_TARGET_HAS_ext16u_i32
|
778 |
case INDEX_op_ext16u_i32:
|
779 |
t0 = *tb_ptr++; |
780 |
t1 = tci_read_r16(&tb_ptr); |
781 |
tci_write_reg32(t0, t1); |
782 |
break;
|
783 |
#endif
|
784 |
#if TCG_TARGET_HAS_bswap16_i32
|
785 |
case INDEX_op_bswap16_i32:
|
786 |
t0 = *tb_ptr++; |
787 |
t1 = tci_read_r16(&tb_ptr); |
788 |
tci_write_reg32(t0, bswap16(t1)); |
789 |
break;
|
790 |
#endif
|
791 |
#if TCG_TARGET_HAS_bswap32_i32
|
792 |
case INDEX_op_bswap32_i32:
|
793 |
t0 = *tb_ptr++; |
794 |
t1 = tci_read_r32(&tb_ptr); |
795 |
tci_write_reg32(t0, bswap32(t1)); |
796 |
break;
|
797 |
#endif
|
798 |
#if TCG_TARGET_HAS_not_i32
|
799 |
case INDEX_op_not_i32:
|
800 |
t0 = *tb_ptr++; |
801 |
t1 = tci_read_r32(&tb_ptr); |
802 |
tci_write_reg32(t0, ~t1); |
803 |
break;
|
804 |
#endif
|
805 |
#if TCG_TARGET_HAS_neg_i32
|
806 |
case INDEX_op_neg_i32:
|
807 |
t0 = *tb_ptr++; |
808 |
t1 = tci_read_r32(&tb_ptr); |
809 |
tci_write_reg32(t0, -t1); |
810 |
break;
|
811 |
#endif
|
812 |
#if TCG_TARGET_REG_BITS == 64 |
813 |
case INDEX_op_mov_i64:
|
814 |
t0 = *tb_ptr++; |
815 |
t1 = tci_read_r64(&tb_ptr); |
816 |
tci_write_reg64(t0, t1); |
817 |
break;
|
818 |
case INDEX_op_movi_i64:
|
819 |
t0 = *tb_ptr++; |
820 |
t1 = tci_read_i64(&tb_ptr); |
821 |
tci_write_reg64(t0, t1); |
822 |
break;
|
823 |
|
824 |
/* Load/store operations (64 bit). */
|
825 |
|
826 |
case INDEX_op_ld8u_i64:
|
827 |
t0 = *tb_ptr++; |
828 |
t1 = tci_read_r(&tb_ptr); |
829 |
t2 = tci_read_s32(&tb_ptr); |
830 |
tci_write_reg8(t0, *(uint8_t *)(t1 + t2)); |
831 |
break;
|
832 |
case INDEX_op_ld8s_i64:
|
833 |
case INDEX_op_ld16u_i64:
|
834 |
case INDEX_op_ld16s_i64:
|
835 |
TODO(); |
836 |
break;
|
837 |
case INDEX_op_ld32u_i64:
|
838 |
t0 = *tb_ptr++; |
839 |
t1 = tci_read_r(&tb_ptr); |
840 |
t2 = tci_read_s32(&tb_ptr); |
841 |
tci_write_reg32(t0, *(uint32_t *)(t1 + t2)); |
842 |
break;
|
843 |
case INDEX_op_ld32s_i64:
|
844 |
t0 = *tb_ptr++; |
845 |
t1 = tci_read_r(&tb_ptr); |
846 |
t2 = tci_read_s32(&tb_ptr); |
847 |
tci_write_reg32s(t0, *(int32_t *)(t1 + t2)); |
848 |
break;
|
849 |
case INDEX_op_ld_i64:
|
850 |
t0 = *tb_ptr++; |
851 |
t1 = tci_read_r(&tb_ptr); |
852 |
t2 = tci_read_s32(&tb_ptr); |
853 |
tci_write_reg64(t0, *(uint64_t *)(t1 + t2)); |
854 |
break;
|
855 |
case INDEX_op_st8_i64:
|
856 |
t0 = tci_read_r8(&tb_ptr); |
857 |
t1 = tci_read_r(&tb_ptr); |
858 |
t2 = tci_read_s32(&tb_ptr); |
859 |
*(uint8_t *)(t1 + t2) = t0; |
860 |
break;
|
861 |
case INDEX_op_st16_i64:
|
862 |
t0 = tci_read_r16(&tb_ptr); |
863 |
t1 = tci_read_r(&tb_ptr); |
864 |
t2 = tci_read_s32(&tb_ptr); |
865 |
*(uint16_t *)(t1 + t2) = t0; |
866 |
break;
|
867 |
case INDEX_op_st32_i64:
|
868 |
t0 = tci_read_r32(&tb_ptr); |
869 |
t1 = tci_read_r(&tb_ptr); |
870 |
t2 = tci_read_s32(&tb_ptr); |
871 |
*(uint32_t *)(t1 + t2) = t0; |
872 |
break;
|
873 |
case INDEX_op_st_i64:
|
874 |
t0 = tci_read_r64(&tb_ptr); |
875 |
t1 = tci_read_r(&tb_ptr); |
876 |
t2 = tci_read_s32(&tb_ptr); |
877 |
assert(t1 != sp_value || (int32_t)t2 < 0);
|
878 |
*(uint64_t *)(t1 + t2) = t0; |
879 |
break;
|
880 |
|
881 |
/* Arithmetic operations (64 bit). */
|
882 |
|
883 |
case INDEX_op_add_i64:
|
884 |
t0 = *tb_ptr++; |
885 |
t1 = tci_read_ri64(&tb_ptr); |
886 |
t2 = tci_read_ri64(&tb_ptr); |
887 |
tci_write_reg64(t0, t1 + t2); |
888 |
break;
|
889 |
case INDEX_op_sub_i64:
|
890 |
t0 = *tb_ptr++; |
891 |
t1 = tci_read_ri64(&tb_ptr); |
892 |
t2 = tci_read_ri64(&tb_ptr); |
893 |
tci_write_reg64(t0, t1 - t2); |
894 |
break;
|
895 |
case INDEX_op_mul_i64:
|
896 |
t0 = *tb_ptr++; |
897 |
t1 = tci_read_ri64(&tb_ptr); |
898 |
t2 = tci_read_ri64(&tb_ptr); |
899 |
tci_write_reg64(t0, t1 * t2); |
900 |
break;
|
901 |
#if TCG_TARGET_HAS_div_i64
|
902 |
case INDEX_op_div_i64:
|
903 |
case INDEX_op_divu_i64:
|
904 |
case INDEX_op_rem_i64:
|
905 |
case INDEX_op_remu_i64:
|
906 |
TODO(); |
907 |
break;
|
908 |
#elif TCG_TARGET_HAS_div2_i64
|
909 |
case INDEX_op_div2_i64:
|
910 |
case INDEX_op_divu2_i64:
|
911 |
TODO(); |
912 |
break;
|
913 |
#endif
|
914 |
case INDEX_op_and_i64:
|
915 |
t0 = *tb_ptr++; |
916 |
t1 = tci_read_ri64(&tb_ptr); |
917 |
t2 = tci_read_ri64(&tb_ptr); |
918 |
tci_write_reg64(t0, t1 & t2); |
919 |
break;
|
920 |
case INDEX_op_or_i64:
|
921 |
t0 = *tb_ptr++; |
922 |
t1 = tci_read_ri64(&tb_ptr); |
923 |
t2 = tci_read_ri64(&tb_ptr); |
924 |
tci_write_reg64(t0, t1 | t2); |
925 |
break;
|
926 |
case INDEX_op_xor_i64:
|
927 |
t0 = *tb_ptr++; |
928 |
t1 = tci_read_ri64(&tb_ptr); |
929 |
t2 = tci_read_ri64(&tb_ptr); |
930 |
tci_write_reg64(t0, t1 ^ t2); |
931 |
break;
|
932 |
|
933 |
/* Shift/rotate operations (64 bit). */
|
934 |
|
935 |
case INDEX_op_shl_i64:
|
936 |
t0 = *tb_ptr++; |
937 |
t1 = tci_read_ri64(&tb_ptr); |
938 |
t2 = tci_read_ri64(&tb_ptr); |
939 |
tci_write_reg64(t0, t1 << t2); |
940 |
break;
|
941 |
case INDEX_op_shr_i64:
|
942 |
t0 = *tb_ptr++; |
943 |
t1 = tci_read_ri64(&tb_ptr); |
944 |
t2 = tci_read_ri64(&tb_ptr); |
945 |
tci_write_reg64(t0, t1 >> t2); |
946 |
break;
|
947 |
case INDEX_op_sar_i64:
|
948 |
t0 = *tb_ptr++; |
949 |
t1 = tci_read_ri64(&tb_ptr); |
950 |
t2 = tci_read_ri64(&tb_ptr); |
951 |
tci_write_reg64(t0, ((int64_t)t1 >> t2)); |
952 |
break;
|
953 |
#if TCG_TARGET_HAS_rot_i64
|
954 |
case INDEX_op_rotl_i64:
|
955 |
t0 = *tb_ptr++; |
956 |
t1 = tci_read_ri64(&tb_ptr); |
957 |
t2 = tci_read_ri64(&tb_ptr); |
958 |
tci_write_reg64(t0, rol64(t1, t2)); |
959 |
break;
|
960 |
case INDEX_op_rotr_i64:
|
961 |
t0 = *tb_ptr++; |
962 |
t1 = tci_read_ri64(&tb_ptr); |
963 |
t2 = tci_read_ri64(&tb_ptr); |
964 |
tci_write_reg64(t0, ror64(t1, t2)); |
965 |
break;
|
966 |
#endif
|
967 |
#if TCG_TARGET_HAS_deposit_i64
|
968 |
case INDEX_op_deposit_i64:
|
969 |
t0 = *tb_ptr++; |
970 |
t1 = tci_read_r64(&tb_ptr); |
971 |
t2 = tci_read_r64(&tb_ptr); |
972 |
tmp16 = *tb_ptr++; |
973 |
tmp8 = *tb_ptr++; |
974 |
tmp64 = (((1ULL << tmp8) - 1) << tmp16); |
975 |
tci_write_reg64(t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64)); |
976 |
break;
|
977 |
#endif
|
978 |
case INDEX_op_brcond_i64:
|
979 |
t0 = tci_read_r64(&tb_ptr); |
980 |
t1 = tci_read_ri64(&tb_ptr); |
981 |
condition = *tb_ptr++; |
982 |
label = tci_read_label(&tb_ptr); |
983 |
if (tci_compare64(t0, t1, condition)) {
|
984 |
assert(tb_ptr == old_code_ptr + op_size); |
985 |
tb_ptr = (uint8_t *)label; |
986 |
continue;
|
987 |
} |
988 |
break;
|
989 |
#if TCG_TARGET_HAS_ext8u_i64
|
990 |
case INDEX_op_ext8u_i64:
|
991 |
t0 = *tb_ptr++; |
992 |
t1 = tci_read_r8(&tb_ptr); |
993 |
tci_write_reg64(t0, t1); |
994 |
break;
|
995 |
#endif
|
996 |
#if TCG_TARGET_HAS_ext8s_i64
|
997 |
case INDEX_op_ext8s_i64:
|
998 |
t0 = *tb_ptr++; |
999 |
t1 = tci_read_r8s(&tb_ptr); |
1000 |
tci_write_reg64(t0, t1); |
1001 |
break;
|
1002 |
#endif
|
1003 |
#if TCG_TARGET_HAS_ext16s_i64
|
1004 |
case INDEX_op_ext16s_i64:
|
1005 |
t0 = *tb_ptr++; |
1006 |
t1 = tci_read_r16s(&tb_ptr); |
1007 |
tci_write_reg64(t0, t1); |
1008 |
break;
|
1009 |
#endif
|
1010 |
#if TCG_TARGET_HAS_ext16u_i64
|
1011 |
case INDEX_op_ext16u_i64:
|
1012 |
t0 = *tb_ptr++; |
1013 |
t1 = tci_read_r16(&tb_ptr); |
1014 |
tci_write_reg64(t0, t1); |
1015 |
break;
|
1016 |
#endif
|
1017 |
#if TCG_TARGET_HAS_ext32s_i64
|
1018 |
case INDEX_op_ext32s_i64:
|
1019 |
t0 = *tb_ptr++; |
1020 |
t1 = tci_read_r32s(&tb_ptr); |
1021 |
tci_write_reg64(t0, t1); |
1022 |
break;
|
1023 |
#endif
|
1024 |
#if TCG_TARGET_HAS_ext32u_i64
|
1025 |
case INDEX_op_ext32u_i64:
|
1026 |
t0 = *tb_ptr++; |
1027 |
t1 = tci_read_r32(&tb_ptr); |
1028 |
tci_write_reg64(t0, t1); |
1029 |
break;
|
1030 |
#endif
|
1031 |
#if TCG_TARGET_HAS_bswap16_i64
|
1032 |
case INDEX_op_bswap16_i64:
|
1033 |
TODO(); |
1034 |
t0 = *tb_ptr++; |
1035 |
t1 = tci_read_r16(&tb_ptr); |
1036 |
tci_write_reg64(t0, bswap16(t1)); |
1037 |
break;
|
1038 |
#endif
|
1039 |
#if TCG_TARGET_HAS_bswap32_i64
|
1040 |
case INDEX_op_bswap32_i64:
|
1041 |
t0 = *tb_ptr++; |
1042 |
t1 = tci_read_r32(&tb_ptr); |
1043 |
tci_write_reg64(t0, bswap32(t1)); |
1044 |
break;
|
1045 |
#endif
|
1046 |
#if TCG_TARGET_HAS_bswap64_i64
|
1047 |
case INDEX_op_bswap64_i64:
|
1048 |
t0 = *tb_ptr++; |
1049 |
t1 = tci_read_r64(&tb_ptr); |
1050 |
tci_write_reg64(t0, bswap64(t1)); |
1051 |
break;
|
1052 |
#endif
|
1053 |
#if TCG_TARGET_HAS_not_i64
|
1054 |
case INDEX_op_not_i64:
|
1055 |
t0 = *tb_ptr++; |
1056 |
t1 = tci_read_r64(&tb_ptr); |
1057 |
tci_write_reg64(t0, ~t1); |
1058 |
break;
|
1059 |
#endif
|
1060 |
#if TCG_TARGET_HAS_neg_i64
|
1061 |
case INDEX_op_neg_i64:
|
1062 |
t0 = *tb_ptr++; |
1063 |
t1 = tci_read_r64(&tb_ptr); |
1064 |
tci_write_reg64(t0, -t1); |
1065 |
break;
|
1066 |
#endif
|
1067 |
#endif /* TCG_TARGET_REG_BITS == 64 */ |
1068 |
|
1069 |
/* QEMU specific operations. */
|
1070 |
|
1071 |
#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
|
1072 |
case INDEX_op_debug_insn_start:
|
1073 |
TODO(); |
1074 |
break;
|
1075 |
#else
|
1076 |
case INDEX_op_debug_insn_start:
|
1077 |
TODO(); |
1078 |
break;
|
1079 |
#endif
|
1080 |
case INDEX_op_exit_tb:
|
1081 |
next_tb = *(uint64_t *)tb_ptr; |
1082 |
goto exit;
|
1083 |
break;
|
1084 |
case INDEX_op_goto_tb:
|
1085 |
t0 = tci_read_i32(&tb_ptr); |
1086 |
assert(tb_ptr == old_code_ptr + op_size); |
1087 |
tb_ptr += (int32_t)t0; |
1088 |
continue;
|
1089 |
case INDEX_op_qemu_ld8u:
|
1090 |
t0 = *tb_ptr++; |
1091 |
taddr = tci_read_ulong(&tb_ptr); |
1092 |
#ifdef CONFIG_SOFTMMU
|
1093 |
tmp8 = helper_ldb_mmu(env, taddr, tci_read_i(&tb_ptr)); |
1094 |
#else
|
1095 |
host_addr = (tcg_target_ulong)taddr; |
1096 |
tmp8 = *(uint8_t *)(host_addr + GUEST_BASE); |
1097 |
#endif
|
1098 |
tci_write_reg8(t0, tmp8); |
1099 |
break;
|
1100 |
case INDEX_op_qemu_ld8s:
|
1101 |
t0 = *tb_ptr++; |
1102 |
taddr = tci_read_ulong(&tb_ptr); |
1103 |
#ifdef CONFIG_SOFTMMU
|
1104 |
tmp8 = helper_ldb_mmu(env, taddr, tci_read_i(&tb_ptr)); |
1105 |
#else
|
1106 |
host_addr = (tcg_target_ulong)taddr; |
1107 |
tmp8 = *(uint8_t *)(host_addr + GUEST_BASE); |
1108 |
#endif
|
1109 |
tci_write_reg8s(t0, tmp8); |
1110 |
break;
|
1111 |
case INDEX_op_qemu_ld16u:
|
1112 |
t0 = *tb_ptr++; |
1113 |
taddr = tci_read_ulong(&tb_ptr); |
1114 |
#ifdef CONFIG_SOFTMMU
|
1115 |
tmp16 = helper_ldw_mmu(env, taddr, tci_read_i(&tb_ptr)); |
1116 |
#else
|
1117 |
host_addr = (tcg_target_ulong)taddr; |
1118 |
tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE)); |
1119 |
#endif
|
1120 |
tci_write_reg16(t0, tmp16); |
1121 |
break;
|
1122 |
case INDEX_op_qemu_ld16s:
|
1123 |
t0 = *tb_ptr++; |
1124 |
taddr = tci_read_ulong(&tb_ptr); |
1125 |
#ifdef CONFIG_SOFTMMU
|
1126 |
tmp16 = helper_ldw_mmu(env, taddr, tci_read_i(&tb_ptr)); |
1127 |
#else
|
1128 |
host_addr = (tcg_target_ulong)taddr; |
1129 |
tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE)); |
1130 |
#endif
|
1131 |
tci_write_reg16s(t0, tmp16); |
1132 |
break;
|
1133 |
#if TCG_TARGET_REG_BITS == 64 |
1134 |
case INDEX_op_qemu_ld32u:
|
1135 |
t0 = *tb_ptr++; |
1136 |
taddr = tci_read_ulong(&tb_ptr); |
1137 |
#ifdef CONFIG_SOFTMMU
|
1138 |
tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr)); |
1139 |
#else
|
1140 |
host_addr = (tcg_target_ulong)taddr; |
1141 |
tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE)); |
1142 |
#endif
|
1143 |
tci_write_reg32(t0, tmp32); |
1144 |
break;
|
1145 |
case INDEX_op_qemu_ld32s:
|
1146 |
t0 = *tb_ptr++; |
1147 |
taddr = tci_read_ulong(&tb_ptr); |
1148 |
#ifdef CONFIG_SOFTMMU
|
1149 |
tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr)); |
1150 |
#else
|
1151 |
host_addr = (tcg_target_ulong)taddr; |
1152 |
tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE)); |
1153 |
#endif
|
1154 |
tci_write_reg32s(t0, tmp32); |
1155 |
break;
|
1156 |
#endif /* TCG_TARGET_REG_BITS == 64 */ |
1157 |
case INDEX_op_qemu_ld32:
|
1158 |
t0 = *tb_ptr++; |
1159 |
taddr = tci_read_ulong(&tb_ptr); |
1160 |
#ifdef CONFIG_SOFTMMU
|
1161 |
tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr)); |
1162 |
#else
|
1163 |
host_addr = (tcg_target_ulong)taddr; |
1164 |
tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE)); |
1165 |
#endif
|
1166 |
tci_write_reg32(t0, tmp32); |
1167 |
break;
|
1168 |
case INDEX_op_qemu_ld64:
|
1169 |
t0 = *tb_ptr++; |
1170 |
#if TCG_TARGET_REG_BITS == 32 |
1171 |
t1 = *tb_ptr++; |
1172 |
#endif
|
1173 |
taddr = tci_read_ulong(&tb_ptr); |
1174 |
#ifdef CONFIG_SOFTMMU
|
1175 |
tmp64 = helper_ldq_mmu(env, taddr, tci_read_i(&tb_ptr)); |
1176 |
#else
|
1177 |
host_addr = (tcg_target_ulong)taddr; |
1178 |
tmp64 = tswap64(*(uint64_t *)(host_addr + GUEST_BASE)); |
1179 |
#endif
|
1180 |
tci_write_reg(t0, tmp64); |
1181 |
#if TCG_TARGET_REG_BITS == 32 |
1182 |
tci_write_reg(t1, tmp64 >> 32);
|
1183 |
#endif
|
1184 |
break;
|
1185 |
case INDEX_op_qemu_st8:
|
1186 |
t0 = tci_read_r8(&tb_ptr); |
1187 |
taddr = tci_read_ulong(&tb_ptr); |
1188 |
#ifdef CONFIG_SOFTMMU
|
1189 |
t2 = tci_read_i(&tb_ptr); |
1190 |
helper_stb_mmu(env, taddr, t0, t2); |
1191 |
#else
|
1192 |
host_addr = (tcg_target_ulong)taddr; |
1193 |
*(uint8_t *)(host_addr + GUEST_BASE) = t0; |
1194 |
#endif
|
1195 |
break;
|
1196 |
case INDEX_op_qemu_st16:
|
1197 |
t0 = tci_read_r16(&tb_ptr); |
1198 |
taddr = tci_read_ulong(&tb_ptr); |
1199 |
#ifdef CONFIG_SOFTMMU
|
1200 |
t2 = tci_read_i(&tb_ptr); |
1201 |
helper_stw_mmu(env, taddr, t0, t2); |
1202 |
#else
|
1203 |
host_addr = (tcg_target_ulong)taddr; |
1204 |
*(uint16_t *)(host_addr + GUEST_BASE) = tswap16(t0); |
1205 |
#endif
|
1206 |
break;
|
1207 |
case INDEX_op_qemu_st32:
|
1208 |
t0 = tci_read_r32(&tb_ptr); |
1209 |
taddr = tci_read_ulong(&tb_ptr); |
1210 |
#ifdef CONFIG_SOFTMMU
|
1211 |
t2 = tci_read_i(&tb_ptr); |
1212 |
helper_stl_mmu(env, taddr, t0, t2); |
1213 |
#else
|
1214 |
host_addr = (tcg_target_ulong)taddr; |
1215 |
*(uint32_t *)(host_addr + GUEST_BASE) = tswap32(t0); |
1216 |
#endif
|
1217 |
break;
|
1218 |
case INDEX_op_qemu_st64:
|
1219 |
tmp64 = tci_read_r64(&tb_ptr); |
1220 |
taddr = tci_read_ulong(&tb_ptr); |
1221 |
#ifdef CONFIG_SOFTMMU
|
1222 |
t2 = tci_read_i(&tb_ptr); |
1223 |
helper_stq_mmu(env, taddr, tmp64, t2); |
1224 |
#else
|
1225 |
host_addr = (tcg_target_ulong)taddr; |
1226 |
*(uint64_t *)(host_addr + GUEST_BASE) = tswap64(tmp64); |
1227 |
#endif
|
1228 |
break;
|
1229 |
default:
|
1230 |
TODO(); |
1231 |
break;
|
1232 |
} |
1233 |
assert(tb_ptr == old_code_ptr + op_size); |
1234 |
} |
1235 |
exit:
|
1236 |
return next_tb;
|
1237 |
} |