Statistics
| Branch: | Revision:

root / target-i386 / translate.c @ 4242b1bd

History | View | Annotate | Download (240.9 kB)

1
/*
2
 *  i386 translation
3
 *
4
 *  Copyright (c) 2003 Fabrice Bellard
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20
#include <stdarg.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23
#include <string.h>
24
#include <inttypes.h>
25
#include <signal.h>
26
#include <assert.h>
27

    
28
#include "cpu.h"
29
#include "exec-all.h"
30
#include "disas.h"
31
#include "helper.h"
32
#include "tcg-op.h"
33

    
34
#define PREFIX_REPZ   0x01
35
#define PREFIX_REPNZ  0x02
36
#define PREFIX_LOCK   0x04
37
#define PREFIX_DATA   0x08
38
#define PREFIX_ADR    0x10
39

    
40
#ifdef TARGET_X86_64
41
#define X86_64_ONLY(x) x
42
#define X86_64_DEF(x...) x
43
#define CODE64(s) ((s)->code64)
44
#define REX_X(s) ((s)->rex_x)
45
#define REX_B(s) ((s)->rex_b)
46
/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
47
#if 1
48
#define BUGGY_64(x) NULL
49
#endif
50
#else
51
#define X86_64_ONLY(x) NULL
52
#define X86_64_DEF(x...)
53
#define CODE64(s) 0
54
#define REX_X(s) 0
55
#define REX_B(s) 0
56
#endif
57

    
58
//#define MACRO_TEST   1
59

    
60
/* global register indexes */
61
static TCGv cpu_env, cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
62
/* local temps */
63
static TCGv cpu_T[2], cpu_T3;
64
/* local register indexes (only used inside old micro ops) */
65
static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
66
static TCGv cpu_tmp5, cpu_tmp6;
67

    
68
#include "gen-icount.h"
69

    
70
#ifdef TARGET_X86_64
71
static int x86_64_hregs;
72
#endif
73

    
74
typedef struct DisasContext {
75
    /* current insn context */
76
    int override; /* -1 if no override */
77
    int prefix;
78
    int aflag, dflag;
79
    target_ulong pc; /* pc = eip + cs_base */
80
    int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
81
                   static state change (stop translation) */
82
    /* current block context */
83
    target_ulong cs_base; /* base of CS segment */
84
    int pe;     /* protected mode */
85
    int code32; /* 32 bit code segment */
86
#ifdef TARGET_X86_64
87
    int lma;    /* long mode active */
88
    int code64; /* 64 bit code segment */
89
    int rex_x, rex_b;
90
#endif
91
    int ss32;   /* 32 bit stack segment */
92
    int cc_op;  /* current CC operation */
93
    int addseg; /* non zero if either DS/ES/SS have a non zero base */
94
    int f_st;   /* currently unused */
95
    int vm86;   /* vm86 mode */
96
    int cpl;
97
    int iopl;
98
    int tf;     /* TF cpu flag */
99
    int singlestep_enabled; /* "hardware" single step enabled */
100
    int jmp_opt; /* use direct block chaining for direct jumps */
101
    int mem_index; /* select memory access functions */
102
    uint64_t flags; /* all execution flags */
103
    struct TranslationBlock *tb;
104
    int popl_esp_hack; /* for correct popl with esp base handling */
105
    int rip_offset; /* only used in x86_64, but left for simplicity */
106
    int cpuid_features;
107
    int cpuid_ext_features;
108
    int cpuid_ext2_features;
109
    int cpuid_ext3_features;
110
} DisasContext;
111

    
112
static void gen_eob(DisasContext *s);
113
static void gen_jmp(DisasContext *s, target_ulong eip);
114
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
115

    
116
/* i386 arith/logic operations */
117
enum {
118
    OP_ADDL,
119
    OP_ORL,
120
    OP_ADCL,
121
    OP_SBBL,
122
    OP_ANDL,
123
    OP_SUBL,
124
    OP_XORL,
125
    OP_CMPL,
126
};
127

    
128
/* i386 shift ops */
129
enum {
130
    OP_ROL,
131
    OP_ROR,
132
    OP_RCL,
133
    OP_RCR,
134
    OP_SHL,
135
    OP_SHR,
136
    OP_SHL1, /* undocumented */
137
    OP_SAR = 7,
138
};
139

    
140
enum {
141
    JCC_O,
142
    JCC_B,
143
    JCC_Z,
144
    JCC_BE,
145
    JCC_S,
146
    JCC_P,
147
    JCC_L,
148
    JCC_LE,
149
};
150

    
151
/* operand size */
152
enum {
153
    OT_BYTE = 0,
154
    OT_WORD,
155
    OT_LONG,
156
    OT_QUAD,
157
};
158

    
159
enum {
160
    /* I386 int registers */
161
    OR_EAX,   /* MUST be even numbered */
162
    OR_ECX,
163
    OR_EDX,
164
    OR_EBX,
165
    OR_ESP,
166
    OR_EBP,
167
    OR_ESI,
168
    OR_EDI,
169

    
170
    OR_TMP0 = 16,    /* temporary operand register */
171
    OR_TMP1,
172
    OR_A0, /* temporary register used when doing address evaluation */
173
};
174

    
175
static inline void gen_op_movl_T0_0(void)
176
{
177
    tcg_gen_movi_tl(cpu_T[0], 0);
178
}
179

    
180
static inline void gen_op_movl_T0_im(int32_t val)
181
{
182
    tcg_gen_movi_tl(cpu_T[0], val);
183
}
184

    
185
static inline void gen_op_movl_T0_imu(uint32_t val)
186
{
187
    tcg_gen_movi_tl(cpu_T[0], val);
188
}
189

    
190
static inline void gen_op_movl_T1_im(int32_t val)
191
{
192
    tcg_gen_movi_tl(cpu_T[1], val);
193
}
194

    
195
static inline void gen_op_movl_T1_imu(uint32_t val)
196
{
197
    tcg_gen_movi_tl(cpu_T[1], val);
198
}
199

    
200
static inline void gen_op_movl_A0_im(uint32_t val)
201
{
202
    tcg_gen_movi_tl(cpu_A0, val);
203
}
204

    
205
#ifdef TARGET_X86_64
206
static inline void gen_op_movq_A0_im(int64_t val)
207
{
208
    tcg_gen_movi_tl(cpu_A0, val);
209
}
210
#endif
211

    
212
static inline void gen_movtl_T0_im(target_ulong val)
213
{
214
    tcg_gen_movi_tl(cpu_T[0], val);
215
}
216

    
217
static inline void gen_movtl_T1_im(target_ulong val)
218
{
219
    tcg_gen_movi_tl(cpu_T[1], val);
220
}
221

    
222
static inline void gen_op_andl_T0_ffff(void)
223
{
224
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
225
}
226

    
227
static inline void gen_op_andl_T0_im(uint32_t val)
228
{
229
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
230
}
231

    
232
static inline void gen_op_movl_T0_T1(void)
233
{
234
    tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
235
}
236

    
237
static inline void gen_op_andl_A0_ffff(void)
238
{
239
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
240
}
241

    
242
#ifdef TARGET_X86_64
243

    
244
#define NB_OP_SIZES 4
245

    
246
#else /* !TARGET_X86_64 */
247

    
248
#define NB_OP_SIZES 3
249

    
250
#endif /* !TARGET_X86_64 */
251

    
252
#if defined(WORDS_BIGENDIAN)
253
#define REG_B_OFFSET (sizeof(target_ulong) - 1)
254
#define REG_H_OFFSET (sizeof(target_ulong) - 2)
255
#define REG_W_OFFSET (sizeof(target_ulong) - 2)
256
#define REG_L_OFFSET (sizeof(target_ulong) - 4)
257
#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
258
#else
259
#define REG_B_OFFSET 0
260
#define REG_H_OFFSET 1
261
#define REG_W_OFFSET 0
262
#define REG_L_OFFSET 0
263
#define REG_LH_OFFSET 4
264
#endif
265

    
266
static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
267
{
268
    switch(ot) {
269
    case OT_BYTE:
270
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
271
            tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
272
        } else {
273
            tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
274
        }
275
        break;
276
    case OT_WORD:
277
        tcg_gen_st16_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
278
        break;
279
#ifdef TARGET_X86_64
280
    case OT_LONG:
281
        tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
282
        /* high part of register set to zero */
283
        tcg_gen_movi_tl(cpu_tmp0, 0);
284
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
285
        break;
286
    default:
287
    case OT_QUAD:
288
        tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
289
        break;
290
#else
291
    default:
292
    case OT_LONG:
293
        tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
294
        break;
295
#endif
296
    }
297
}
298

    
299
static inline void gen_op_mov_reg_T0(int ot, int reg)
300
{
301
    gen_op_mov_reg_v(ot, reg, cpu_T[0]);
302
}
303

    
304
static inline void gen_op_mov_reg_T1(int ot, int reg)
305
{
306
    gen_op_mov_reg_v(ot, reg, cpu_T[1]);
307
}
308

    
309
static inline void gen_op_mov_reg_A0(int size, int reg)
310
{
311
    switch(size) {
312
    case 0:
313
        tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
314
        break;
315
#ifdef TARGET_X86_64
316
    case 1:
317
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
318
        /* high part of register set to zero */
319
        tcg_gen_movi_tl(cpu_tmp0, 0);
320
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
321
        break;
322
    default:
323
    case 2:
324
        tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
325
        break;
326
#else
327
    default:
328
    case 1:
329
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
330
        break;
331
#endif
332
    }
333
}
334

    
335
static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
336
{
337
    switch(ot) {
338
    case OT_BYTE:
339
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
340
            goto std_case;
341
        } else {
342
            tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
343
        }
344
        break;
345
    default:
346
    std_case:
347
        tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
348
        break;
349
    }
350
}
351

    
352
static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
353
{
354
    gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
355
}
356

    
357
static inline void gen_op_movl_A0_reg(int reg)
358
{
359
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
360
}
361

    
362
static inline void gen_op_addl_A0_im(int32_t val)
363
{
364
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
365
#ifdef TARGET_X86_64
366
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
367
#endif
368
}
369

    
370
#ifdef TARGET_X86_64
371
static inline void gen_op_addq_A0_im(int64_t val)
372
{
373
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
374
}
375
#endif
376
    
377
static void gen_add_A0_im(DisasContext *s, int val)
378
{
379
#ifdef TARGET_X86_64
380
    if (CODE64(s))
381
        gen_op_addq_A0_im(val);
382
    else
383
#endif
384
        gen_op_addl_A0_im(val);
385
}
386

    
387
static inline void gen_op_addl_T0_T1(void)
388
{
389
    tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
390
}
391

    
392
static inline void gen_op_jmp_T0(void)
393
{
394
    tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
395
}
396

    
397
static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
398
{
399
    switch(size) {
400
    case 0:
401
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
402
        tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
403
        tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
404
        break;
405
    case 1:
406
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
407
        tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
408
#ifdef TARGET_X86_64
409
        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
410
#endif
411
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
412
        break;
413
#ifdef TARGET_X86_64
414
    case 2:
415
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
416
        tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
417
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
418
        break;
419
#endif
420
    }
421
}
422

    
423
static inline void gen_op_add_reg_T0(int size, int reg)
424
{
425
    switch(size) {
426
    case 0:
427
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
428
        tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
429
        tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
430
        break;
431
    case 1:
432
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
433
        tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
434
#ifdef TARGET_X86_64
435
        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
436
#endif
437
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
438
        break;
439
#ifdef TARGET_X86_64
440
    case 2:
441
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
442
        tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
443
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
444
        break;
445
#endif
446
    }
447
}
448

    
449
static inline void gen_op_set_cc_op(int32_t val)
450
{
451
    tcg_gen_movi_i32(cpu_cc_op, val);
452
}
453

    
454
static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
455
{
456
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
457
    if (shift != 0) 
458
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
459
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
460
#ifdef TARGET_X86_64
461
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
462
#endif
463
}
464

    
465
static inline void gen_op_movl_A0_seg(int reg)
466
{
467
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
468
}
469

    
470
static inline void gen_op_addl_A0_seg(int reg)
471
{
472
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
473
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
474
#ifdef TARGET_X86_64
475
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
476
#endif
477
}
478

    
479
#ifdef TARGET_X86_64
480
static inline void gen_op_movq_A0_seg(int reg)
481
{
482
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
483
}
484

    
485
static inline void gen_op_addq_A0_seg(int reg)
486
{
487
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
488
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
489
}
490

    
491
static inline void gen_op_movq_A0_reg(int reg)
492
{
493
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
494
}
495

    
496
static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
497
{
498
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
499
    if (shift != 0) 
500
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
501
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
502
}
503
#endif
504

    
505
static inline void gen_op_lds_T0_A0(int idx)
506
{
507
    int mem_index = (idx >> 2) - 1;
508
    switch(idx & 3) {
509
    case 0:
510
        tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
511
        break;
512
    case 1:
513
        tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
514
        break;
515
    default:
516
    case 2:
517
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
518
        break;
519
    }
520
}
521

    
522
static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
523
{
524
    int mem_index = (idx >> 2) - 1;
525
    switch(idx & 3) {
526
    case 0:
527
        tcg_gen_qemu_ld8u(t0, a0, mem_index);
528
        break;
529
    case 1:
530
        tcg_gen_qemu_ld16u(t0, a0, mem_index);
531
        break;
532
    case 2:
533
        tcg_gen_qemu_ld32u(t0, a0, mem_index);
534
        break;
535
    default:
536
    case 3:
537
        tcg_gen_qemu_ld64(t0, a0, mem_index);
538
        break;
539
    }
540
}
541

    
542
/* XXX: always use ldu or lds */
543
static inline void gen_op_ld_T0_A0(int idx)
544
{
545
    gen_op_ld_v(idx, cpu_T[0], cpu_A0);
546
}
547

    
548
static inline void gen_op_ldu_T0_A0(int idx)
549
{
550
    gen_op_ld_v(idx, cpu_T[0], cpu_A0);
551
}
552

    
553
static inline void gen_op_ld_T1_A0(int idx)
554
{
555
    gen_op_ld_v(idx, cpu_T[1], cpu_A0);
556
}
557

    
558
static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
559
{
560
    int mem_index = (idx >> 2) - 1;
561
    switch(idx & 3) {
562
    case 0:
563
        tcg_gen_qemu_st8(t0, a0, mem_index);
564
        break;
565
    case 1:
566
        tcg_gen_qemu_st16(t0, a0, mem_index);
567
        break;
568
    case 2:
569
        tcg_gen_qemu_st32(t0, a0, mem_index);
570
        break;
571
    default:
572
    case 3:
573
        tcg_gen_qemu_st64(t0, a0, mem_index);
574
        break;
575
    }
576
}
577

    
578
static inline void gen_op_st_T0_A0(int idx)
579
{
580
    gen_op_st_v(idx, cpu_T[0], cpu_A0);
581
}
582

    
583
static inline void gen_op_st_T1_A0(int idx)
584
{
585
    gen_op_st_v(idx, cpu_T[1], cpu_A0);
586
}
587

    
588
static inline void gen_jmp_im(target_ulong pc)
589
{
590
    tcg_gen_movi_tl(cpu_tmp0, pc);
591
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
592
}
593

    
594
static inline void gen_string_movl_A0_ESI(DisasContext *s)
595
{
596
    int override;
597

    
598
    override = s->override;
599
#ifdef TARGET_X86_64
600
    if (s->aflag == 2) {
601
        if (override >= 0) {
602
            gen_op_movq_A0_seg(override);
603
            gen_op_addq_A0_reg_sN(0, R_ESI);
604
        } else {
605
            gen_op_movq_A0_reg(R_ESI);
606
        }
607
    } else
608
#endif
609
    if (s->aflag) {
610
        /* 32 bit address */
611
        if (s->addseg && override < 0)
612
            override = R_DS;
613
        if (override >= 0) {
614
            gen_op_movl_A0_seg(override);
615
            gen_op_addl_A0_reg_sN(0, R_ESI);
616
        } else {
617
            gen_op_movl_A0_reg(R_ESI);
618
        }
619
    } else {
620
        /* 16 address, always override */
621
        if (override < 0)
622
            override = R_DS;
623
        gen_op_movl_A0_reg(R_ESI);
624
        gen_op_andl_A0_ffff();
625
        gen_op_addl_A0_seg(override);
626
    }
627
}
628

    
629
static inline void gen_string_movl_A0_EDI(DisasContext *s)
630
{
631
#ifdef TARGET_X86_64
632
    if (s->aflag == 2) {
633
        gen_op_movq_A0_reg(R_EDI);
634
    } else
635
#endif
636
    if (s->aflag) {
637
        if (s->addseg) {
638
            gen_op_movl_A0_seg(R_ES);
639
            gen_op_addl_A0_reg_sN(0, R_EDI);
640
        } else {
641
            gen_op_movl_A0_reg(R_EDI);
642
        }
643
    } else {
644
        gen_op_movl_A0_reg(R_EDI);
645
        gen_op_andl_A0_ffff();
646
        gen_op_addl_A0_seg(R_ES);
647
    }
648
}
649

    
650
static inline void gen_op_movl_T0_Dshift(int ot) 
651
{
652
    tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
653
    tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
654
};
655

    
656
static void gen_extu(int ot, TCGv reg)
657
{
658
    switch(ot) {
659
    case OT_BYTE:
660
        tcg_gen_ext8u_tl(reg, reg);
661
        break;
662
    case OT_WORD:
663
        tcg_gen_ext16u_tl(reg, reg);
664
        break;
665
    case OT_LONG:
666
        tcg_gen_ext32u_tl(reg, reg);
667
        break;
668
    default:
669
        break;
670
    }
671
}
672

    
673
static void gen_exts(int ot, TCGv reg)
674
{
675
    switch(ot) {
676
    case OT_BYTE:
677
        tcg_gen_ext8s_tl(reg, reg);
678
        break;
679
    case OT_WORD:
680
        tcg_gen_ext16s_tl(reg, reg);
681
        break;
682
    case OT_LONG:
683
        tcg_gen_ext32s_tl(reg, reg);
684
        break;
685
    default:
686
        break;
687
    }
688
}
689

    
690
static inline void gen_op_jnz_ecx(int size, int label1)
691
{
692
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
693
    gen_extu(size + 1, cpu_tmp0);
694
    tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
695
}
696

    
697
static inline void gen_op_jz_ecx(int size, int label1)
698
{
699
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
700
    gen_extu(size + 1, cpu_tmp0);
701
    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
702
}
703

    
704
static void *helper_in_func[3] = {
705
    helper_inb,
706
    helper_inw,
707
    helper_inl,
708
};
709

    
710
static void *helper_out_func[3] = {
711
    helper_outb,
712
    helper_outw,
713
    helper_outl,
714
};
715

    
716
static void *gen_check_io_func[3] = {
717
    helper_check_iob,
718
    helper_check_iow,
719
    helper_check_iol,
720
};
721

    
722
static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
723
                         uint32_t svm_flags)
724
{
725
    int state_saved;
726
    target_ulong next_eip;
727

    
728
    state_saved = 0;
729
    if (s->pe && (s->cpl > s->iopl || s->vm86)) {
730
        if (s->cc_op != CC_OP_DYNAMIC)
731
            gen_op_set_cc_op(s->cc_op);
732
        gen_jmp_im(cur_eip);
733
        state_saved = 1;
734
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
735
        tcg_gen_helper_0_1(gen_check_io_func[ot],
736
                           cpu_tmp2_i32);
737
    }
738
    if(s->flags & HF_SVMI_MASK) {
739
        if (!state_saved) {
740
            if (s->cc_op != CC_OP_DYNAMIC)
741
                gen_op_set_cc_op(s->cc_op);
742
            gen_jmp_im(cur_eip);
743
            state_saved = 1;
744
        }
745
        svm_flags |= (1 << (4 + ot));
746
        next_eip = s->pc - s->cs_base;
747
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
748
        tcg_gen_helper_0_3(helper_svm_check_io,
749
                           cpu_tmp2_i32,
750
                           tcg_const_i32(svm_flags),
751
                           tcg_const_i32(next_eip - cur_eip));
752
    }
753
}
754

    
755
static inline void gen_movs(DisasContext *s, int ot)
756
{
757
    gen_string_movl_A0_ESI(s);
758
    gen_op_ld_T0_A0(ot + s->mem_index);
759
    gen_string_movl_A0_EDI(s);
760
    gen_op_st_T0_A0(ot + s->mem_index);
761
    gen_op_movl_T0_Dshift(ot);
762
    gen_op_add_reg_T0(s->aflag, R_ESI);
763
    gen_op_add_reg_T0(s->aflag, R_EDI);
764
}
765

    
766
static inline void gen_update_cc_op(DisasContext *s)
767
{
768
    if (s->cc_op != CC_OP_DYNAMIC) {
769
        gen_op_set_cc_op(s->cc_op);
770
        s->cc_op = CC_OP_DYNAMIC;
771
    }
772
}
773

    
774
static void gen_op_update1_cc(void)
775
{
776
    tcg_gen_discard_tl(cpu_cc_src);
777
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
778
}
779

    
780
static void gen_op_update2_cc(void)
781
{
782
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
783
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
784
}
785

    
786
static inline void gen_op_cmpl_T0_T1_cc(void)
787
{
788
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
789
    tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
790
}
791

    
792
static inline void gen_op_testl_T0_T1_cc(void)
793
{
794
    tcg_gen_discard_tl(cpu_cc_src);
795
    tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
796
}
797

    
798
static void gen_op_update_neg_cc(void)
799
{
800
    tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
801
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
802
}
803

    
804
/* compute eflags.C to reg */
805
static void gen_compute_eflags_c(TCGv reg)
806
{
807
#if TCG_TARGET_REG_BITS == 32
808
    tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
809
    tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 
810
                     (long)cc_table + offsetof(CCTable, compute_c));
811
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
812
    tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE, 
813
                 1, &cpu_tmp2_i32, 0, NULL);
814
#else
815
    tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
816
    tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
817
    tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64, 
818
                     (long)cc_table + offsetof(CCTable, compute_c));
819
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
820
    tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE, 
821
                 1, &cpu_tmp2_i32, 0, NULL);
822
#endif
823
    tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
824
}
825

    
826
/* compute all eflags to cc_src */
827
static void gen_compute_eflags(TCGv reg)
828
{
829
#if TCG_TARGET_REG_BITS == 32
830
    tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
831
    tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 
832
                     (long)cc_table + offsetof(CCTable, compute_all));
833
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
834
    tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE, 
835
                 1, &cpu_tmp2_i32, 0, NULL);
836
#else
837
    tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
838
    tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
839
    tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64, 
840
                     (long)cc_table + offsetof(CCTable, compute_all));
841
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
842
    tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE, 
843
                 1, &cpu_tmp2_i32, 0, NULL);
844
#endif
845
    tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
846
}
847

    
848
static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
849
{
850
    if (s->cc_op != CC_OP_DYNAMIC)
851
        gen_op_set_cc_op(s->cc_op);
852
    switch(jcc_op) {
853
    case JCC_O:
854
        gen_compute_eflags(cpu_T[0]);
855
        tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
856
        tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
857
        break;
858
    case JCC_B:
859
        gen_compute_eflags_c(cpu_T[0]);
860
        break;
861
    case JCC_Z:
862
        gen_compute_eflags(cpu_T[0]);
863
        tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
864
        tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
865
        break;
866
    case JCC_BE:
867
        gen_compute_eflags(cpu_tmp0);
868
        tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
869
        tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
870
        tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
871
        break;
872
    case JCC_S:
873
        gen_compute_eflags(cpu_T[0]);
874
        tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
875
        tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
876
        break;
877
    case JCC_P:
878
        gen_compute_eflags(cpu_T[0]);
879
        tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
880
        tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
881
        break;
882
    case JCC_L:
883
        gen_compute_eflags(cpu_tmp0);
884
        tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
885
        tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
886
        tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
887
        tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
888
        break;
889
    default:
890
    case JCC_LE:
891
        gen_compute_eflags(cpu_tmp0);
892
        tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
893
        tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
894
        tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
895
        tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
896
        tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
897
        tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
898
        break;
899
    }
900
}
901

    
902
/* return true if setcc_slow is not needed (WARNING: must be kept in
903
   sync with gen_jcc1) */
904
static int is_fast_jcc_case(DisasContext *s, int b)
905
{
906
    int jcc_op;
907
    jcc_op = (b >> 1) & 7;
908
    switch(s->cc_op) {
909
        /* we optimize the cmp/jcc case */
910
    case CC_OP_SUBB:
911
    case CC_OP_SUBW:
912
    case CC_OP_SUBL:
913
    case CC_OP_SUBQ:
914
        if (jcc_op == JCC_O || jcc_op == JCC_P)
915
            goto slow_jcc;
916
        break;
917

    
918
        /* some jumps are easy to compute */
919
    case CC_OP_ADDB:
920
    case CC_OP_ADDW:
921
    case CC_OP_ADDL:
922
    case CC_OP_ADDQ:
923

    
924
    case CC_OP_LOGICB:
925
    case CC_OP_LOGICW:
926
    case CC_OP_LOGICL:
927
    case CC_OP_LOGICQ:
928

    
929
    case CC_OP_INCB:
930
    case CC_OP_INCW:
931
    case CC_OP_INCL:
932
    case CC_OP_INCQ:
933

    
934
    case CC_OP_DECB:
935
    case CC_OP_DECW:
936
    case CC_OP_DECL:
937
    case CC_OP_DECQ:
938

    
939
    case CC_OP_SHLB:
940
    case CC_OP_SHLW:
941
    case CC_OP_SHLL:
942
    case CC_OP_SHLQ:
943
        if (jcc_op != JCC_Z && jcc_op != JCC_S)
944
            goto slow_jcc;
945
        break;
946
    default:
947
    slow_jcc:
948
        return 0;
949
    }
950
    return 1;
951
}
952

    
953
/* generate a conditional jump to label 'l1' according to jump opcode
954
   value 'b'. In the fast case, T0 is guaranted not to be used. */
955
static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
956
{
957
    int inv, jcc_op, size, cond;
958
    TCGv t0;
959

    
960
    inv = b & 1;
961
    jcc_op = (b >> 1) & 7;
962

    
963
    switch(cc_op) {
964
        /* we optimize the cmp/jcc case */
965
    case CC_OP_SUBB:
966
    case CC_OP_SUBW:
967
    case CC_OP_SUBL:
968
    case CC_OP_SUBQ:
969
        
970
        size = cc_op - CC_OP_SUBB;
971
        switch(jcc_op) {
972
        case JCC_Z:
973
        fast_jcc_z:
974
            switch(size) {
975
            case 0:
976
                tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
977
                t0 = cpu_tmp0;
978
                break;
979
            case 1:
980
                tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
981
                t0 = cpu_tmp0;
982
                break;
983
#ifdef TARGET_X86_64
984
            case 2:
985
                tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
986
                t0 = cpu_tmp0;
987
                break;
988
#endif
989
            default:
990
                t0 = cpu_cc_dst;
991
                break;
992
            }
993
            tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
994
            break;
995
        case JCC_S:
996
        fast_jcc_s:
997
            switch(size) {
998
            case 0:
999
                tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
1000
                tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0, 
1001
                                   0, l1);
1002
                break;
1003
            case 1:
1004
                tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1005
                tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0, 
1006
                                   0, l1);
1007
                break;
1008
#ifdef TARGET_X86_64
1009
            case 2:
1010
                tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1011
                tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0, 
1012
                                   0, l1);
1013
                break;
1014
#endif
1015
            default:
1016
                tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst, 
1017
                                   0, l1);
1018
                break;
1019
            }
1020
            break;
1021
            
1022
        case JCC_B:
1023
            cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1024
            goto fast_jcc_b;
1025
        case JCC_BE:
1026
            cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1027
        fast_jcc_b:
1028
            tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1029
            switch(size) {
1030
            case 0:
1031
                t0 = cpu_tmp0;
1032
                tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1033
                tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1034
                break;
1035
            case 1:
1036
                t0 = cpu_tmp0;
1037
                tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1038
                tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1039
                break;
1040
#ifdef TARGET_X86_64
1041
            case 2:
1042
                t0 = cpu_tmp0;
1043
                tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1044
                tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1045
                break;
1046
#endif
1047
            default:
1048
                t0 = cpu_cc_src;
1049
                break;
1050
            }
1051
            tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1052
            break;
1053
            
1054
        case JCC_L:
1055
            cond = inv ? TCG_COND_GE : TCG_COND_LT;
1056
            goto fast_jcc_l;
1057
        case JCC_LE:
1058
            cond = inv ? TCG_COND_GT : TCG_COND_LE;
1059
        fast_jcc_l:
1060
            tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1061
            switch(size) {
1062
            case 0:
1063
                t0 = cpu_tmp0;
1064
                tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1065
                tcg_gen_ext8s_tl(t0, cpu_cc_src);
1066
                break;
1067
            case 1:
1068
                t0 = cpu_tmp0;
1069
                tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1070
                tcg_gen_ext16s_tl(t0, cpu_cc_src);
1071
                break;
1072
#ifdef TARGET_X86_64
1073
            case 2:
1074
                t0 = cpu_tmp0;
1075
                tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1076
                tcg_gen_ext32s_tl(t0, cpu_cc_src);
1077
                break;
1078
#endif
1079
            default:
1080
                t0 = cpu_cc_src;
1081
                break;
1082
            }
1083
            tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1084
            break;
1085
            
1086
        default:
1087
            goto slow_jcc;
1088
        }
1089
        break;
1090
        
1091
        /* some jumps are easy to compute */
1092
    case CC_OP_ADDB:
1093
    case CC_OP_ADDW:
1094
    case CC_OP_ADDL:
1095
    case CC_OP_ADDQ:
1096
        
1097
    case CC_OP_ADCB:
1098
    case CC_OP_ADCW:
1099
    case CC_OP_ADCL:
1100
    case CC_OP_ADCQ:
1101
        
1102
    case CC_OP_SBBB:
1103
    case CC_OP_SBBW:
1104
    case CC_OP_SBBL:
1105
    case CC_OP_SBBQ:
1106
        
1107
    case CC_OP_LOGICB:
1108
    case CC_OP_LOGICW:
1109
    case CC_OP_LOGICL:
1110
    case CC_OP_LOGICQ:
1111
        
1112
    case CC_OP_INCB:
1113
    case CC_OP_INCW:
1114
    case CC_OP_INCL:
1115
    case CC_OP_INCQ:
1116
        
1117
    case CC_OP_DECB:
1118
    case CC_OP_DECW:
1119
    case CC_OP_DECL:
1120
    case CC_OP_DECQ:
1121
        
1122
    case CC_OP_SHLB:
1123
    case CC_OP_SHLW:
1124
    case CC_OP_SHLL:
1125
    case CC_OP_SHLQ:
1126
        
1127
    case CC_OP_SARB:
1128
    case CC_OP_SARW:
1129
    case CC_OP_SARL:
1130
    case CC_OP_SARQ:
1131
        switch(jcc_op) {
1132
        case JCC_Z:
1133
            size = (cc_op - CC_OP_ADDB) & 3;
1134
            goto fast_jcc_z;
1135
        case JCC_S:
1136
            size = (cc_op - CC_OP_ADDB) & 3;
1137
            goto fast_jcc_s;
1138
        default:
1139
            goto slow_jcc;
1140
        }
1141
        break;
1142
    default:
1143
    slow_jcc:
1144
        gen_setcc_slow_T0(s, jcc_op);
1145
        tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, 
1146
                           cpu_T[0], 0, l1);
1147
        break;
1148
    }
1149
}
1150

    
1151
/* XXX: does not work with gdbstub "ice" single step - not a
1152
   serious problem */
1153
static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1154
{
1155
    int l1, l2;
1156

    
1157
    l1 = gen_new_label();
1158
    l2 = gen_new_label();
1159
    gen_op_jnz_ecx(s->aflag, l1);
1160
    gen_set_label(l2);
1161
    gen_jmp_tb(s, next_eip, 1);
1162
    gen_set_label(l1);
1163
    return l2;
1164
}
1165

    
1166
static inline void gen_stos(DisasContext *s, int ot)
1167
{
1168
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1169
    gen_string_movl_A0_EDI(s);
1170
    gen_op_st_T0_A0(ot + s->mem_index);
1171
    gen_op_movl_T0_Dshift(ot);
1172
    gen_op_add_reg_T0(s->aflag, R_EDI);
1173
}
1174

    
1175
static inline void gen_lods(DisasContext *s, int ot)
1176
{
1177
    gen_string_movl_A0_ESI(s);
1178
    gen_op_ld_T0_A0(ot + s->mem_index);
1179
    gen_op_mov_reg_T0(ot, R_EAX);
1180
    gen_op_movl_T0_Dshift(ot);
1181
    gen_op_add_reg_T0(s->aflag, R_ESI);
1182
}
1183

    
1184
static inline void gen_scas(DisasContext *s, int ot)
1185
{
1186
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1187
    gen_string_movl_A0_EDI(s);
1188
    gen_op_ld_T1_A0(ot + s->mem_index);
1189
    gen_op_cmpl_T0_T1_cc();
1190
    gen_op_movl_T0_Dshift(ot);
1191
    gen_op_add_reg_T0(s->aflag, R_EDI);
1192
}
1193

    
1194
static inline void gen_cmps(DisasContext *s, int ot)
1195
{
1196
    gen_string_movl_A0_ESI(s);
1197
    gen_op_ld_T0_A0(ot + s->mem_index);
1198
    gen_string_movl_A0_EDI(s);
1199
    gen_op_ld_T1_A0(ot + s->mem_index);
1200
    gen_op_cmpl_T0_T1_cc();
1201
    gen_op_movl_T0_Dshift(ot);
1202
    gen_op_add_reg_T0(s->aflag, R_ESI);
1203
    gen_op_add_reg_T0(s->aflag, R_EDI);
1204
}
1205

    
1206
static inline void gen_ins(DisasContext *s, int ot)
1207
{
1208
    if (use_icount)
1209
        gen_io_start();
1210
    gen_string_movl_A0_EDI(s);
1211
    /* Note: we must do this dummy write first to be restartable in
1212
       case of page fault. */
1213
    gen_op_movl_T0_0();
1214
    gen_op_st_T0_A0(ot + s->mem_index);
1215
    gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1216
    tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1217
    tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1218
    tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1219
    gen_op_st_T0_A0(ot + s->mem_index);
1220
    gen_op_movl_T0_Dshift(ot);
1221
    gen_op_add_reg_T0(s->aflag, R_EDI);
1222
    if (use_icount)
1223
        gen_io_end();
1224
}
1225

    
1226
static inline void gen_outs(DisasContext *s, int ot)
1227
{
1228
    if (use_icount)
1229
        gen_io_start();
1230
    gen_string_movl_A0_ESI(s);
1231
    gen_op_ld_T0_A0(ot + s->mem_index);
1232

    
1233
    gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1234
    tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1235
    tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1236
    tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1237
    tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1238

    
1239
    gen_op_movl_T0_Dshift(ot);
1240
    gen_op_add_reg_T0(s->aflag, R_ESI);
1241
    if (use_icount)
1242
        gen_io_end();
1243
}
1244

    
1245
/* same method as Valgrind : we generate jumps to current or next
1246
   instruction */
1247
#define GEN_REPZ(op)                                                          \
1248
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1249
                                 target_ulong cur_eip, target_ulong next_eip) \
1250
{                                                                             \
1251
    int l2;\
1252
    gen_update_cc_op(s);                                                      \
1253
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1254
    gen_ ## op(s, ot);                                                        \
1255
    gen_op_add_reg_im(s->aflag, R_ECX, -1);                                   \
1256
    /* a loop would cause two single step exceptions if ECX = 1               \
1257
       before rep string_insn */                                              \
1258
    if (!s->jmp_opt)                                                          \
1259
        gen_op_jz_ecx(s->aflag, l2);                                          \
1260
    gen_jmp(s, cur_eip);                                                      \
1261
}
1262

    
1263
#define GEN_REPZ2(op)                                                         \
1264
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1265
                                   target_ulong cur_eip,                      \
1266
                                   target_ulong next_eip,                     \
1267
                                   int nz)                                    \
1268
{                                                                             \
1269
    int l2;\
1270
    gen_update_cc_op(s);                                                      \
1271
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1272
    gen_ ## op(s, ot);                                                        \
1273
    gen_op_add_reg_im(s->aflag, R_ECX, -1);                                   \
1274
    gen_op_set_cc_op(CC_OP_SUBB + ot);                                        \
1275
    gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2);                \
1276
    if (!s->jmp_opt)                                                          \
1277
        gen_op_jz_ecx(s->aflag, l2);                                          \
1278
    gen_jmp(s, cur_eip);                                                      \
1279
}
1280

    
1281
GEN_REPZ(movs)
1282
GEN_REPZ(stos)
1283
GEN_REPZ(lods)
1284
GEN_REPZ(ins)
1285
GEN_REPZ(outs)
1286
GEN_REPZ2(scas)
1287
GEN_REPZ2(cmps)
1288

    
1289
static void *helper_fp_arith_ST0_FT0[8] = {
1290
    helper_fadd_ST0_FT0,
1291
    helper_fmul_ST0_FT0,
1292
    helper_fcom_ST0_FT0,
1293
    helper_fcom_ST0_FT0,
1294
    helper_fsub_ST0_FT0,
1295
    helper_fsubr_ST0_FT0,
1296
    helper_fdiv_ST0_FT0,
1297
    helper_fdivr_ST0_FT0,
1298
};
1299

    
1300
/* NOTE the exception in "r" op ordering */
1301
static void *helper_fp_arith_STN_ST0[8] = {
1302
    helper_fadd_STN_ST0,
1303
    helper_fmul_STN_ST0,
1304
    NULL,
1305
    NULL,
1306
    helper_fsubr_STN_ST0,
1307
    helper_fsub_STN_ST0,
1308
    helper_fdivr_STN_ST0,
1309
    helper_fdiv_STN_ST0,
1310
};
1311

    
1312
/* if d == OR_TMP0, it means memory operand (address in A0) */
1313
static void gen_op(DisasContext *s1, int op, int ot, int d)
1314
{
1315
    if (d != OR_TMP0) {
1316
        gen_op_mov_TN_reg(ot, 0, d);
1317
    } else {
1318
        gen_op_ld_T0_A0(ot + s1->mem_index);
1319
    }
1320
    switch(op) {
1321
    case OP_ADCL:
1322
        if (s1->cc_op != CC_OP_DYNAMIC)
1323
            gen_op_set_cc_op(s1->cc_op);
1324
        gen_compute_eflags_c(cpu_tmp4);
1325
        tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1326
        tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1327
        if (d != OR_TMP0)
1328
            gen_op_mov_reg_T0(ot, d);
1329
        else
1330
            gen_op_st_T0_A0(ot + s1->mem_index);
1331
        tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1332
        tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1333
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1334
        tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1335
        tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1336
        s1->cc_op = CC_OP_DYNAMIC;
1337
        break;
1338
    case OP_SBBL:
1339
        if (s1->cc_op != CC_OP_DYNAMIC)
1340
            gen_op_set_cc_op(s1->cc_op);
1341
        gen_compute_eflags_c(cpu_tmp4);
1342
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1343
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1344
        if (d != OR_TMP0)
1345
            gen_op_mov_reg_T0(ot, d);
1346
        else
1347
            gen_op_st_T0_A0(ot + s1->mem_index);
1348
        tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1349
        tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1350
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1351
        tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1352
        tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1353
        s1->cc_op = CC_OP_DYNAMIC;
1354
        break;
1355
    case OP_ADDL:
1356
        gen_op_addl_T0_T1();
1357
        if (d != OR_TMP0)
1358
            gen_op_mov_reg_T0(ot, d);
1359
        else
1360
            gen_op_st_T0_A0(ot + s1->mem_index);
1361
        gen_op_update2_cc();
1362
        s1->cc_op = CC_OP_ADDB + ot;
1363
        break;
1364
    case OP_SUBL:
1365
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1366
        if (d != OR_TMP0)
1367
            gen_op_mov_reg_T0(ot, d);
1368
        else
1369
            gen_op_st_T0_A0(ot + s1->mem_index);
1370
        gen_op_update2_cc();
1371
        s1->cc_op = CC_OP_SUBB + ot;
1372
        break;
1373
    default:
1374
    case OP_ANDL:
1375
        tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1376
        if (d != OR_TMP0)
1377
            gen_op_mov_reg_T0(ot, d);
1378
        else
1379
            gen_op_st_T0_A0(ot + s1->mem_index);
1380
        gen_op_update1_cc();
1381
        s1->cc_op = CC_OP_LOGICB + ot;
1382
        break;
1383
    case OP_ORL:
1384
        tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1385
        if (d != OR_TMP0)
1386
            gen_op_mov_reg_T0(ot, d);
1387
        else
1388
            gen_op_st_T0_A0(ot + s1->mem_index);
1389
        gen_op_update1_cc();
1390
        s1->cc_op = CC_OP_LOGICB + ot;
1391
        break;
1392
    case OP_XORL:
1393
        tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1394
        if (d != OR_TMP0)
1395
            gen_op_mov_reg_T0(ot, d);
1396
        else
1397
            gen_op_st_T0_A0(ot + s1->mem_index);
1398
        gen_op_update1_cc();
1399
        s1->cc_op = CC_OP_LOGICB + ot;
1400
        break;
1401
    case OP_CMPL:
1402
        gen_op_cmpl_T0_T1_cc();
1403
        s1->cc_op = CC_OP_SUBB + ot;
1404
        break;
1405
    }
1406
}
1407

    
1408
/* if d == OR_TMP0, it means memory operand (address in A0) */
1409
static void gen_inc(DisasContext *s1, int ot, int d, int c)
1410
{
1411
    if (d != OR_TMP0)
1412
        gen_op_mov_TN_reg(ot, 0, d);
1413
    else
1414
        gen_op_ld_T0_A0(ot + s1->mem_index);
1415
    if (s1->cc_op != CC_OP_DYNAMIC)
1416
        gen_op_set_cc_op(s1->cc_op);
1417
    if (c > 0) {
1418
        tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1419
        s1->cc_op = CC_OP_INCB + ot;
1420
    } else {
1421
        tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1422
        s1->cc_op = CC_OP_DECB + ot;
1423
    }
1424
    if (d != OR_TMP0)
1425
        gen_op_mov_reg_T0(ot, d);
1426
    else
1427
        gen_op_st_T0_A0(ot + s1->mem_index);
1428
    gen_compute_eflags_c(cpu_cc_src);
1429
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1430
}
1431

    
1432
static void gen_shift_rm_T1(DisasContext *s, int ot, int op1, 
1433
                            int is_right, int is_arith)
1434
{
1435
    target_ulong mask;
1436
    int shift_label;
1437
    TCGv t0, t1;
1438

    
1439
    if (ot == OT_QUAD)
1440
        mask = 0x3f;
1441
    else
1442
        mask = 0x1f;
1443

    
1444
    /* load */
1445
    if (op1 == OR_TMP0)
1446
        gen_op_ld_T0_A0(ot + s->mem_index);
1447
    else
1448
        gen_op_mov_TN_reg(ot, 0, op1);
1449

    
1450
    tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1451

    
1452
    tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1453

    
1454
    if (is_right) {
1455
        if (is_arith) {
1456
            gen_exts(ot, cpu_T[0]);
1457
            tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1458
            tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1459
        } else {
1460
            gen_extu(ot, cpu_T[0]);
1461
            tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1462
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1463
        }
1464
    } else {
1465
        tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1466
        tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1467
    }
1468

    
1469
    /* store */
1470
    if (op1 == OR_TMP0)
1471
        gen_op_st_T0_A0(ot + s->mem_index);
1472
    else
1473
        gen_op_mov_reg_T0(ot, op1);
1474
        
1475
    /* update eflags if non zero shift */
1476
    if (s->cc_op != CC_OP_DYNAMIC)
1477
        gen_op_set_cc_op(s->cc_op);
1478

    
1479
    /* XXX: inefficient */
1480
    t0 = tcg_temp_local_new(TCG_TYPE_TL);
1481
    t1 = tcg_temp_local_new(TCG_TYPE_TL);
1482

    
1483
    tcg_gen_mov_tl(t0, cpu_T[0]);
1484
    tcg_gen_mov_tl(t1, cpu_T3);
1485

    
1486
    shift_label = gen_new_label();
1487
    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1488

    
1489
    tcg_gen_mov_tl(cpu_cc_src, t1);
1490
    tcg_gen_mov_tl(cpu_cc_dst, t0);
1491
    if (is_right)
1492
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1493
    else
1494
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1495
        
1496
    gen_set_label(shift_label);
1497
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1498

    
1499
    tcg_temp_free(t0);
1500
    tcg_temp_free(t1);
1501
}
1502

    
1503
static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1504
                            int is_right, int is_arith)
1505
{
1506
    int mask;
1507
    
1508
    if (ot == OT_QUAD)
1509
        mask = 0x3f;
1510
    else
1511
        mask = 0x1f;
1512

    
1513
    /* load */
1514
    if (op1 == OR_TMP0)
1515
        gen_op_ld_T0_A0(ot + s->mem_index);
1516
    else
1517
        gen_op_mov_TN_reg(ot, 0, op1);
1518

    
1519
    op2 &= mask;
1520
    if (op2 != 0) {
1521
        if (is_right) {
1522
            if (is_arith) {
1523
                gen_exts(ot, cpu_T[0]);
1524
                tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1525
                tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1526
            } else {
1527
                gen_extu(ot, cpu_T[0]);
1528
                tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1529
                tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1530
            }
1531
        } else {
1532
            tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1533
            tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1534
        }
1535
    }
1536

    
1537
    /* store */
1538
    if (op1 == OR_TMP0)
1539
        gen_op_st_T0_A0(ot + s->mem_index);
1540
    else
1541
        gen_op_mov_reg_T0(ot, op1);
1542
        
1543
    /* update eflags if non zero shift */
1544
    if (op2 != 0) {
1545
        tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
1546
        tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1547
        if (is_right)
1548
            s->cc_op = CC_OP_SARB + ot;
1549
        else
1550
            s->cc_op = CC_OP_SHLB + ot;
1551
    }
1552
}
1553

    
1554
static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1555
{
1556
    if (arg2 >= 0)
1557
        tcg_gen_shli_tl(ret, arg1, arg2);
1558
    else
1559
        tcg_gen_shri_tl(ret, arg1, -arg2);
1560
}
1561

    
1562
/* XXX: add faster immediate case */
1563
static void gen_rot_rm_T1(DisasContext *s, int ot, int op1, 
1564
                          int is_right)
1565
{
1566
    target_ulong mask;
1567
    int label1, label2, data_bits;
1568
    TCGv t0, t1, t2, a0;
1569

    
1570
    /* XXX: inefficient, but we must use local temps */
1571
    t0 = tcg_temp_local_new(TCG_TYPE_TL);
1572
    t1 = tcg_temp_local_new(TCG_TYPE_TL);
1573
    t2 = tcg_temp_local_new(TCG_TYPE_TL);
1574
    a0 = tcg_temp_local_new(TCG_TYPE_TL);
1575

    
1576
    if (ot == OT_QUAD)
1577
        mask = 0x3f;
1578
    else
1579
        mask = 0x1f;
1580

    
1581
    /* load */
1582
    if (op1 == OR_TMP0) {
1583
        tcg_gen_mov_tl(a0, cpu_A0);
1584
        gen_op_ld_v(ot + s->mem_index, t0, a0);
1585
    } else {
1586
        gen_op_mov_v_reg(ot, t0, op1);
1587
    }
1588

    
1589
    tcg_gen_mov_tl(t1, cpu_T[1]);
1590

    
1591
    tcg_gen_andi_tl(t1, t1, mask);
1592

    
1593
    /* Must test zero case to avoid using undefined behaviour in TCG
1594
       shifts. */
1595
    label1 = gen_new_label();
1596
    tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
1597
    
1598
    if (ot <= OT_WORD)
1599
        tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
1600
    else
1601
        tcg_gen_mov_tl(cpu_tmp0, t1);
1602
    
1603
    gen_extu(ot, t0);
1604
    tcg_gen_mov_tl(t2, t0);
1605

    
1606
    data_bits = 8 << ot;
1607
    /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1608
       fix TCG definition) */
1609
    if (is_right) {
1610
        tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
1611
        tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1612
        tcg_gen_shl_tl(t0, t0, cpu_tmp0);
1613
    } else {
1614
        tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
1615
        tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1616
        tcg_gen_shr_tl(t0, t0, cpu_tmp0);
1617
    }
1618
    tcg_gen_or_tl(t0, t0, cpu_tmp4);
1619

    
1620
    gen_set_label(label1);
1621
    /* store */
1622
    if (op1 == OR_TMP0) {
1623
        gen_op_st_v(ot + s->mem_index, t0, a0);
1624
    } else {
1625
        gen_op_mov_reg_v(ot, op1, t0);
1626
    }
1627
    
1628
    /* update eflags */
1629
    if (s->cc_op != CC_OP_DYNAMIC)
1630
        gen_op_set_cc_op(s->cc_op);
1631

    
1632
    label2 = gen_new_label();
1633
    tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
1634

    
1635
    gen_compute_eflags(cpu_cc_src);
1636
    tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1637
    tcg_gen_xor_tl(cpu_tmp0, t2, t0);
1638
    tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1639
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1640
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1641
    if (is_right) {
1642
        tcg_gen_shri_tl(t0, t0, data_bits - 1);
1643
    }
1644
    tcg_gen_andi_tl(t0, t0, CC_C);
1645
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
1646
    
1647
    tcg_gen_discard_tl(cpu_cc_dst);
1648
    tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1649
        
1650
    gen_set_label(label2);
1651
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1652

    
1653
    tcg_temp_free(t0);
1654
    tcg_temp_free(t1);
1655
    tcg_temp_free(t2);
1656
    tcg_temp_free(a0);
1657
}
1658

    
1659
static void *helper_rotc[8] = {
1660
    helper_rclb,
1661
    helper_rclw,
1662
    helper_rcll,
1663
    X86_64_ONLY(helper_rclq),
1664
    helper_rcrb,
1665
    helper_rcrw,
1666
    helper_rcrl,
1667
    X86_64_ONLY(helper_rcrq),
1668
};
1669

    
1670
/* XXX: add faster immediate = 1 case */
1671
static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1, 
1672
                           int is_right)
1673
{
1674
    int label1;
1675

    
1676
    if (s->cc_op != CC_OP_DYNAMIC)
1677
        gen_op_set_cc_op(s->cc_op);
1678

    
1679
    /* load */
1680
    if (op1 == OR_TMP0)
1681
        gen_op_ld_T0_A0(ot + s->mem_index);
1682
    else
1683
        gen_op_mov_TN_reg(ot, 0, op1);
1684
    
1685
    tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
1686
                       cpu_T[0], cpu_T[0], cpu_T[1]);
1687
    /* store */
1688
    if (op1 == OR_TMP0)
1689
        gen_op_st_T0_A0(ot + s->mem_index);
1690
    else
1691
        gen_op_mov_reg_T0(ot, op1);
1692

    
1693
    /* update eflags */
1694
    label1 = gen_new_label();
1695
    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
1696

    
1697
    tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
1698
    tcg_gen_discard_tl(cpu_cc_dst);
1699
    tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1700
        
1701
    gen_set_label(label1);
1702
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1703
}
1704

    
1705
/* XXX: add faster immediate case */
1706
static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1, 
1707
                                int is_right)
1708
{
1709
    int label1, label2, data_bits;
1710
    target_ulong mask;
1711
    TCGv t0, t1, t2, a0;
1712

    
1713
    t0 = tcg_temp_local_new(TCG_TYPE_TL);
1714
    t1 = tcg_temp_local_new(TCG_TYPE_TL);
1715
    t2 = tcg_temp_local_new(TCG_TYPE_TL);
1716
    a0 = tcg_temp_local_new(TCG_TYPE_TL);
1717

    
1718
    if (ot == OT_QUAD)
1719
        mask = 0x3f;
1720
    else
1721
        mask = 0x1f;
1722

    
1723
    /* load */
1724
    if (op1 == OR_TMP0) {
1725
        tcg_gen_mov_tl(a0, cpu_A0);
1726
        gen_op_ld_v(ot + s->mem_index, t0, a0);
1727
    } else {
1728
        gen_op_mov_v_reg(ot, t0, op1);
1729
    }
1730

    
1731
    tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
1732

    
1733
    tcg_gen_mov_tl(t1, cpu_T[1]);
1734
    tcg_gen_mov_tl(t2, cpu_T3);
1735

    
1736
    /* Must test zero case to avoid using undefined behaviour in TCG
1737
       shifts. */
1738
    label1 = gen_new_label();
1739
    tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
1740
    
1741
    tcg_gen_addi_tl(cpu_tmp5, t2, -1);
1742
    if (ot == OT_WORD) {
1743
        /* Note: we implement the Intel behaviour for shift count > 16 */
1744
        if (is_right) {
1745
            tcg_gen_andi_tl(t0, t0, 0xffff);
1746
            tcg_gen_shli_tl(cpu_tmp0, t1, 16);
1747
            tcg_gen_or_tl(t0, t0, cpu_tmp0);
1748
            tcg_gen_ext32u_tl(t0, t0);
1749

    
1750
            tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
1751
            
1752
            /* only needed if count > 16, but a test would complicate */
1753
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
1754
            tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
1755

    
1756
            tcg_gen_shr_tl(t0, t0, t2);
1757

    
1758
            tcg_gen_or_tl(t0, t0, cpu_tmp0);
1759
        } else {
1760
            /* XXX: not optimal */
1761
            tcg_gen_andi_tl(t0, t0, 0xffff);
1762
            tcg_gen_shli_tl(t1, t1, 16);
1763
            tcg_gen_or_tl(t1, t1, t0);
1764
            tcg_gen_ext32u_tl(t1, t1);
1765
            
1766
            tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
1767
            tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
1768
            tcg_gen_shr_tl(cpu_tmp6, t1, cpu_tmp0);
1769
            tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
1770

    
1771
            tcg_gen_shl_tl(t0, t0, t2);
1772
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
1773
            tcg_gen_shr_tl(t1, t1, cpu_tmp5);
1774
            tcg_gen_or_tl(t0, t0, t1);
1775
        }
1776
    } else {
1777
        data_bits = 8 << ot;
1778
        if (is_right) {
1779
            if (ot == OT_LONG)
1780
                tcg_gen_ext32u_tl(t0, t0);
1781

    
1782
            tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
1783

    
1784
            tcg_gen_shr_tl(t0, t0, t2);
1785
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
1786
            tcg_gen_shl_tl(t1, t1, cpu_tmp5);
1787
            tcg_gen_or_tl(t0, t0, t1);
1788
            
1789
        } else {
1790
            if (ot == OT_LONG)
1791
                tcg_gen_ext32u_tl(t1, t1);
1792

    
1793
            tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
1794
            
1795
            tcg_gen_shl_tl(t0, t0, t2);
1796
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
1797
            tcg_gen_shr_tl(t1, t1, cpu_tmp5);
1798
            tcg_gen_or_tl(t0, t0, t1);
1799
        }
1800
    }
1801
    tcg_gen_mov_tl(t1, cpu_tmp4);
1802

    
1803
    gen_set_label(label1);
1804
    /* store */
1805
    if (op1 == OR_TMP0) {
1806
        gen_op_st_v(ot + s->mem_index, t0, a0);
1807
    } else {
1808
        gen_op_mov_reg_v(ot, op1, t0);
1809
    }
1810
    
1811
    /* update eflags */
1812
    if (s->cc_op != CC_OP_DYNAMIC)
1813
        gen_op_set_cc_op(s->cc_op);
1814

    
1815
    label2 = gen_new_label();
1816
    tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
1817

    
1818
    tcg_gen_mov_tl(cpu_cc_src, t1);
1819
    tcg_gen_mov_tl(cpu_cc_dst, t0);
1820
    if (is_right) {
1821
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1822
    } else {
1823
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1824
    }
1825
    gen_set_label(label2);
1826
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1827

    
1828
    tcg_temp_free(t0);
1829
    tcg_temp_free(t1);
1830
    tcg_temp_free(t2);
1831
    tcg_temp_free(a0);
1832
}
1833

    
1834
static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1835
{
1836
    if (s != OR_TMP1)
1837
        gen_op_mov_TN_reg(ot, 1, s);
1838
    switch(op) {
1839
    case OP_ROL:
1840
        gen_rot_rm_T1(s1, ot, d, 0);
1841
        break;
1842
    case OP_ROR:
1843
        gen_rot_rm_T1(s1, ot, d, 1);
1844
        break;
1845
    case OP_SHL:
1846
    case OP_SHL1:
1847
        gen_shift_rm_T1(s1, ot, d, 0, 0);
1848
        break;
1849
    case OP_SHR:
1850
        gen_shift_rm_T1(s1, ot, d, 1, 0);
1851
        break;
1852
    case OP_SAR:
1853
        gen_shift_rm_T1(s1, ot, d, 1, 1);
1854
        break;
1855
    case OP_RCL:
1856
        gen_rotc_rm_T1(s1, ot, d, 0);
1857
        break;
1858
    case OP_RCR:
1859
        gen_rotc_rm_T1(s1, ot, d, 1);
1860
        break;
1861
    }
1862
}
1863

    
1864
static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1865
{
1866
    switch(op) {
1867
    case OP_SHL:
1868
    case OP_SHL1:
1869
        gen_shift_rm_im(s1, ot, d, c, 0, 0);
1870
        break;
1871
    case OP_SHR:
1872
        gen_shift_rm_im(s1, ot, d, c, 1, 0);
1873
        break;
1874
    case OP_SAR:
1875
        gen_shift_rm_im(s1, ot, d, c, 1, 1);
1876
        break;
1877
    default:
1878
        /* currently not optimized */
1879
        gen_op_movl_T1_im(c);
1880
        gen_shift(s1, op, ot, d, OR_TMP1);
1881
        break;
1882
    }
1883
}
1884

    
1885
static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1886
{
1887
    target_long disp;
1888
    int havesib;
1889
    int base;
1890
    int index;
1891
    int scale;
1892
    int opreg;
1893
    int mod, rm, code, override, must_add_seg;
1894

    
1895
    override = s->override;
1896
    must_add_seg = s->addseg;
1897
    if (override >= 0)
1898
        must_add_seg = 1;
1899
    mod = (modrm >> 6) & 3;
1900
    rm = modrm & 7;
1901

    
1902
    if (s->aflag) {
1903

    
1904
        havesib = 0;
1905
        base = rm;
1906
        index = 0;
1907
        scale = 0;
1908

    
1909
        if (base == 4) {
1910
            havesib = 1;
1911
            code = ldub_code(s->pc++);
1912
            scale = (code >> 6) & 3;
1913
            index = ((code >> 3) & 7) | REX_X(s);
1914
            base = (code & 7);
1915
        }
1916
        base |= REX_B(s);
1917

    
1918
        switch (mod) {
1919
        case 0:
1920
            if ((base & 7) == 5) {
1921
                base = -1;
1922
                disp = (int32_t)ldl_code(s->pc);
1923
                s->pc += 4;
1924
                if (CODE64(s) && !havesib) {
1925
                    disp += s->pc + s->rip_offset;
1926
                }
1927
            } else {
1928
                disp = 0;
1929
            }
1930
            break;
1931
        case 1:
1932
            disp = (int8_t)ldub_code(s->pc++);
1933
            break;
1934
        default:
1935
        case 2:
1936
            disp = ldl_code(s->pc);
1937
            s->pc += 4;
1938
            break;
1939
        }
1940

    
1941
        if (base >= 0) {
1942
            /* for correct popl handling with esp */
1943
            if (base == 4 && s->popl_esp_hack)
1944
                disp += s->popl_esp_hack;
1945
#ifdef TARGET_X86_64
1946
            if (s->aflag == 2) {
1947
                gen_op_movq_A0_reg(base);
1948
                if (disp != 0) {
1949
                    gen_op_addq_A0_im(disp);
1950
                }
1951
            } else
1952
#endif
1953
            {
1954
                gen_op_movl_A0_reg(base);
1955
                if (disp != 0)
1956
                    gen_op_addl_A0_im(disp);
1957
            }
1958
        } else {
1959
#ifdef TARGET_X86_64
1960
            if (s->aflag == 2) {
1961
                gen_op_movq_A0_im(disp);
1962
            } else
1963
#endif
1964
            {
1965
                gen_op_movl_A0_im(disp);
1966
            }
1967
        }
1968
        /* XXX: index == 4 is always invalid */
1969
        if (havesib && (index != 4 || scale != 0)) {
1970
#ifdef TARGET_X86_64
1971
            if (s->aflag == 2) {
1972
                gen_op_addq_A0_reg_sN(scale, index);
1973
            } else
1974
#endif
1975
            {
1976
                gen_op_addl_A0_reg_sN(scale, index);
1977
            }
1978
        }
1979
        if (must_add_seg) {
1980
            if (override < 0) {
1981
                if (base == R_EBP || base == R_ESP)
1982
                    override = R_SS;
1983
                else
1984
                    override = R_DS;
1985
            }
1986
#ifdef TARGET_X86_64
1987
            if (s->aflag == 2) {
1988
                gen_op_addq_A0_seg(override);
1989
            } else
1990
#endif
1991
            {
1992
                gen_op_addl_A0_seg(override);
1993
            }
1994
        }
1995
    } else {
1996
        switch (mod) {
1997
        case 0:
1998
            if (rm == 6) {
1999
                disp = lduw_code(s->pc);
2000
                s->pc += 2;
2001
                gen_op_movl_A0_im(disp);
2002
                rm = 0; /* avoid SS override */
2003
                goto no_rm;
2004
            } else {
2005
                disp = 0;
2006
            }
2007
            break;
2008
        case 1:
2009
            disp = (int8_t)ldub_code(s->pc++);
2010
            break;
2011
        default:
2012
        case 2:
2013
            disp = lduw_code(s->pc);
2014
            s->pc += 2;
2015
            break;
2016
        }
2017
        switch(rm) {
2018
        case 0:
2019
            gen_op_movl_A0_reg(R_EBX);
2020
            gen_op_addl_A0_reg_sN(0, R_ESI);
2021
            break;
2022
        case 1:
2023
            gen_op_movl_A0_reg(R_EBX);
2024
            gen_op_addl_A0_reg_sN(0, R_EDI);
2025
            break;
2026
        case 2:
2027
            gen_op_movl_A0_reg(R_EBP);
2028
            gen_op_addl_A0_reg_sN(0, R_ESI);
2029
            break;
2030
        case 3:
2031
            gen_op_movl_A0_reg(R_EBP);
2032
            gen_op_addl_A0_reg_sN(0, R_EDI);
2033
            break;
2034
        case 4:
2035
            gen_op_movl_A0_reg(R_ESI);
2036
            break;
2037
        case 5:
2038
            gen_op_movl_A0_reg(R_EDI);
2039
            break;
2040
        case 6:
2041
            gen_op_movl_A0_reg(R_EBP);
2042
            break;
2043
        default:
2044
        case 7:
2045
            gen_op_movl_A0_reg(R_EBX);
2046
            break;
2047
        }
2048
        if (disp != 0)
2049
            gen_op_addl_A0_im(disp);
2050
        gen_op_andl_A0_ffff();
2051
    no_rm:
2052
        if (must_add_seg) {
2053
            if (override < 0) {
2054
                if (rm == 2 || rm == 3 || rm == 6)
2055
                    override = R_SS;
2056
                else
2057
                    override = R_DS;
2058
            }
2059
            gen_op_addl_A0_seg(override);
2060
        }
2061
    }
2062

    
2063
    opreg = OR_A0;
2064
    disp = 0;
2065
    *reg_ptr = opreg;
2066
    *offset_ptr = disp;
2067
}
2068

    
2069
static void gen_nop_modrm(DisasContext *s, int modrm)
2070
{
2071
    int mod, rm, base, code;
2072

    
2073
    mod = (modrm >> 6) & 3;
2074
    if (mod == 3)
2075
        return;
2076
    rm = modrm & 7;
2077

    
2078
    if (s->aflag) {
2079

    
2080
        base = rm;
2081

    
2082
        if (base == 4) {
2083
            code = ldub_code(s->pc++);
2084
            base = (code & 7);
2085
        }
2086

    
2087
        switch (mod) {
2088
        case 0:
2089
            if (base == 5) {
2090
                s->pc += 4;
2091
            }
2092
            break;
2093
        case 1:
2094
            s->pc++;
2095
            break;
2096
        default:
2097
        case 2:
2098
            s->pc += 4;
2099
            break;
2100
        }
2101
    } else {
2102
        switch (mod) {
2103
        case 0:
2104
            if (rm == 6) {
2105
                s->pc += 2;
2106
            }
2107
            break;
2108
        case 1:
2109
            s->pc++;
2110
            break;
2111
        default:
2112
        case 2:
2113
            s->pc += 2;
2114
            break;
2115
        }
2116
    }
2117
}
2118

    
2119
/* used for LEA and MOV AX, mem */
2120
static void gen_add_A0_ds_seg(DisasContext *s)
2121
{
2122
    int override, must_add_seg;
2123
    must_add_seg = s->addseg;
2124
    override = R_DS;
2125
    if (s->override >= 0) {
2126
        override = s->override;
2127
        must_add_seg = 1;
2128
    } else {
2129
        override = R_DS;
2130
    }
2131
    if (must_add_seg) {
2132
#ifdef TARGET_X86_64
2133
        if (CODE64(s)) {
2134
            gen_op_addq_A0_seg(override);
2135
        } else
2136
#endif
2137
        {
2138
            gen_op_addl_A0_seg(override);
2139
        }
2140
    }
2141
}
2142

    
2143
/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
2144
   OR_TMP0 */
2145
static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2146
{
2147
    int mod, rm, opreg, disp;
2148

    
2149
    mod = (modrm >> 6) & 3;
2150
    rm = (modrm & 7) | REX_B(s);
2151
    if (mod == 3) {
2152
        if (is_store) {
2153
            if (reg != OR_TMP0)
2154
                gen_op_mov_TN_reg(ot, 0, reg);
2155
            gen_op_mov_reg_T0(ot, rm);
2156
        } else {
2157
            gen_op_mov_TN_reg(ot, 0, rm);
2158
            if (reg != OR_TMP0)
2159
                gen_op_mov_reg_T0(ot, reg);
2160
        }
2161
    } else {
2162
        gen_lea_modrm(s, modrm, &opreg, &disp);
2163
        if (is_store) {
2164
            if (reg != OR_TMP0)
2165
                gen_op_mov_TN_reg(ot, 0, reg);
2166
            gen_op_st_T0_A0(ot + s->mem_index);
2167
        } else {
2168
            gen_op_ld_T0_A0(ot + s->mem_index);
2169
            if (reg != OR_TMP0)
2170
                gen_op_mov_reg_T0(ot, reg);
2171
        }
2172
    }
2173
}
2174

    
2175
static inline uint32_t insn_get(DisasContext *s, int ot)
2176
{
2177
    uint32_t ret;
2178

    
2179
    switch(ot) {
2180
    case OT_BYTE:
2181
        ret = ldub_code(s->pc);
2182
        s->pc++;
2183
        break;
2184
    case OT_WORD:
2185
        ret = lduw_code(s->pc);
2186
        s->pc += 2;
2187
        break;
2188
    default:
2189
    case OT_LONG:
2190
        ret = ldl_code(s->pc);
2191
        s->pc += 4;
2192
        break;
2193
    }
2194
    return ret;
2195
}
2196

    
2197
static inline int insn_const_size(unsigned int ot)
2198
{
2199
    if (ot <= OT_LONG)
2200
        return 1 << ot;
2201
    else
2202
        return 4;
2203
}
2204

    
2205
static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2206
{
2207
    TranslationBlock *tb;
2208
    target_ulong pc;
2209

    
2210
    pc = s->cs_base + eip;
2211
    tb = s->tb;
2212
    /* NOTE: we handle the case where the TB spans two pages here */
2213
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2214
        (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK))  {
2215
        /* jump to same page: we can use a direct jump */
2216
        tcg_gen_goto_tb(tb_num);
2217
        gen_jmp_im(eip);
2218
        tcg_gen_exit_tb((long)tb + tb_num);
2219
    } else {
2220
        /* jump to another page: currently not optimized */
2221
        gen_jmp_im(eip);
2222
        gen_eob(s);
2223
    }
2224
}
2225

    
2226
static inline void gen_jcc(DisasContext *s, int b,
2227
                           target_ulong val, target_ulong next_eip)
2228
{
2229
    int l1, l2, cc_op;
2230

    
2231
    cc_op = s->cc_op;
2232
    if (s->cc_op != CC_OP_DYNAMIC) {
2233
        gen_op_set_cc_op(s->cc_op);
2234
        s->cc_op = CC_OP_DYNAMIC;
2235
    }
2236
    if (s->jmp_opt) {
2237
        l1 = gen_new_label();
2238
        gen_jcc1(s, cc_op, b, l1);
2239
        
2240
        gen_goto_tb(s, 0, next_eip);
2241

    
2242
        gen_set_label(l1);
2243
        gen_goto_tb(s, 1, val);
2244
        s->is_jmp = 3;
2245
    } else {
2246

    
2247
        l1 = gen_new_label();
2248
        l2 = gen_new_label();
2249
        gen_jcc1(s, cc_op, b, l1);
2250

    
2251
        gen_jmp_im(next_eip);
2252
        tcg_gen_br(l2);
2253

    
2254
        gen_set_label(l1);
2255
        gen_jmp_im(val);
2256
        gen_set_label(l2);
2257
        gen_eob(s);
2258
    }
2259
}
2260

    
2261
static void gen_setcc(DisasContext *s, int b)
2262
{
2263
    int inv, jcc_op, l1;
2264
    TCGv t0;
2265

    
2266
    if (is_fast_jcc_case(s, b)) {
2267
        /* nominal case: we use a jump */
2268
        /* XXX: make it faster by adding new instructions in TCG */
2269
        t0 = tcg_temp_local_new(TCG_TYPE_TL);
2270
        tcg_gen_movi_tl(t0, 0);
2271
        l1 = gen_new_label();
2272
        gen_jcc1(s, s->cc_op, b ^ 1, l1);
2273
        tcg_gen_movi_tl(t0, 1);
2274
        gen_set_label(l1);
2275
        tcg_gen_mov_tl(cpu_T[0], t0);
2276
        tcg_temp_free(t0);
2277
    } else {
2278
        /* slow case: it is more efficient not to generate a jump,
2279
           although it is questionnable whether this optimization is
2280
           worth to */
2281
        inv = b & 1;
2282
        jcc_op = (b >> 1) & 7;
2283
        gen_setcc_slow_T0(s, jcc_op);
2284
        if (inv) {
2285
            tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2286
        }
2287
    }
2288
}
2289

    
2290
static inline void gen_op_movl_T0_seg(int seg_reg)
2291
{
2292
    tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
2293
                     offsetof(CPUX86State,segs[seg_reg].selector));
2294
}
2295

    
2296
static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2297
{
2298
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2299
    tcg_gen_st32_tl(cpu_T[0], cpu_env, 
2300
                    offsetof(CPUX86State,segs[seg_reg].selector));
2301
    tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2302
    tcg_gen_st_tl(cpu_T[0], cpu_env, 
2303
                  offsetof(CPUX86State,segs[seg_reg].base));
2304
}
2305

    
2306
/* move T0 to seg_reg and compute if the CPU state may change. Never
2307
   call this function with seg_reg == R_CS */
2308
static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2309
{
2310
    if (s->pe && !s->vm86) {
2311
        /* XXX: optimize by finding processor state dynamically */
2312
        if (s->cc_op != CC_OP_DYNAMIC)
2313
            gen_op_set_cc_op(s->cc_op);
2314
        gen_jmp_im(cur_eip);
2315
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2316
        tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2317
        /* abort translation because the addseg value may change or
2318
           because ss32 may change. For R_SS, translation must always
2319
           stop as a special handling must be done to disable hardware
2320
           interrupts for the next instruction */
2321
        if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2322
            s->is_jmp = 3;
2323
    } else {
2324
        gen_op_movl_seg_T0_vm(seg_reg);
2325
        if (seg_reg == R_SS)
2326
            s->is_jmp = 3;
2327
    }
2328
}
2329

    
2330
static inline int svm_is_rep(int prefixes)
2331
{
2332
    return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2333
}
2334

    
2335
static inline void
2336
gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2337
                              uint32_t type, uint64_t param)
2338
{
2339
    /* no SVM activated; fast case */
2340
    if (likely(!(s->flags & HF_SVMI_MASK)))
2341
        return;
2342
    if (s->cc_op != CC_OP_DYNAMIC)
2343
        gen_op_set_cc_op(s->cc_op);
2344
    gen_jmp_im(pc_start - s->cs_base);
2345
    tcg_gen_helper_0_2(helper_svm_check_intercept_param, 
2346
                       tcg_const_i32(type), tcg_const_i64(param));
2347
}
2348

    
2349
static inline void
2350
gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2351
{
2352
    gen_svm_check_intercept_param(s, pc_start, type, 0);
2353
}
2354

    
2355
static inline void gen_stack_update(DisasContext *s, int addend)
2356
{
2357
#ifdef TARGET_X86_64
2358
    if (CODE64(s)) {
2359
        gen_op_add_reg_im(2, R_ESP, addend);
2360
    } else
2361
#endif
2362
    if (s->ss32) {
2363
        gen_op_add_reg_im(1, R_ESP, addend);
2364
    } else {
2365
        gen_op_add_reg_im(0, R_ESP, addend);
2366
    }
2367
}
2368

    
2369
/* generate a push. It depends on ss32, addseg and dflag */
2370
static void gen_push_T0(DisasContext *s)
2371
{
2372
#ifdef TARGET_X86_64
2373
    if (CODE64(s)) {
2374
        gen_op_movq_A0_reg(R_ESP);
2375
        if (s->dflag) {
2376
            gen_op_addq_A0_im(-8);
2377
            gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2378
        } else {
2379
            gen_op_addq_A0_im(-2);
2380
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2381
        }
2382
        gen_op_mov_reg_A0(2, R_ESP);
2383
    } else
2384
#endif
2385
    {
2386
        gen_op_movl_A0_reg(R_ESP);
2387
        if (!s->dflag)
2388
            gen_op_addl_A0_im(-2);
2389
        else
2390
            gen_op_addl_A0_im(-4);
2391
        if (s->ss32) {
2392
            if (s->addseg) {
2393
                tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2394
                gen_op_addl_A0_seg(R_SS);
2395
            }
2396
        } else {
2397
            gen_op_andl_A0_ffff();
2398
            tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2399
            gen_op_addl_A0_seg(R_SS);
2400
        }
2401
        gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2402
        if (s->ss32 && !s->addseg)
2403
            gen_op_mov_reg_A0(1, R_ESP);
2404
        else
2405
            gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2406
    }
2407
}
2408

    
2409
/* generate a push. It depends on ss32, addseg and dflag */
2410
/* slower version for T1, only used for call Ev */
2411
static void gen_push_T1(DisasContext *s)
2412
{
2413
#ifdef TARGET_X86_64
2414
    if (CODE64(s)) {
2415
        gen_op_movq_A0_reg(R_ESP);
2416
        if (s->dflag) {
2417
            gen_op_addq_A0_im(-8);
2418
            gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2419
        } else {
2420
            gen_op_addq_A0_im(-2);
2421
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2422
        }
2423
        gen_op_mov_reg_A0(2, R_ESP);
2424
    } else
2425
#endif
2426
    {
2427
        gen_op_movl_A0_reg(R_ESP);
2428
        if (!s->dflag)
2429
            gen_op_addl_A0_im(-2);
2430
        else
2431
            gen_op_addl_A0_im(-4);
2432
        if (s->ss32) {
2433
            if (s->addseg) {
2434
                gen_op_addl_A0_seg(R_SS);
2435
            }
2436
        } else {
2437
            gen_op_andl_A0_ffff();
2438
            gen_op_addl_A0_seg(R_SS);
2439
        }
2440
        gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2441

    
2442
        if (s->ss32 && !s->addseg)
2443
            gen_op_mov_reg_A0(1, R_ESP);
2444
        else
2445
            gen_stack_update(s, (-2) << s->dflag);
2446
    }
2447
}
2448

    
2449
/* two step pop is necessary for precise exceptions */
2450
static void gen_pop_T0(DisasContext *s)
2451
{
2452
#ifdef TARGET_X86_64
2453
    if (CODE64(s)) {
2454
        gen_op_movq_A0_reg(R_ESP);
2455
        gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2456
    } else
2457
#endif
2458
    {
2459
        gen_op_movl_A0_reg(R_ESP);
2460
        if (s->ss32) {
2461
            if (s->addseg)
2462
                gen_op_addl_A0_seg(R_SS);
2463
        } else {
2464
            gen_op_andl_A0_ffff();
2465
            gen_op_addl_A0_seg(R_SS);
2466
        }
2467
        gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2468
    }
2469
}
2470

    
2471
static void gen_pop_update(DisasContext *s)
2472
{
2473
#ifdef TARGET_X86_64
2474
    if (CODE64(s) && s->dflag) {
2475
        gen_stack_update(s, 8);
2476
    } else
2477
#endif
2478
    {
2479
        gen_stack_update(s, 2 << s->dflag);
2480
    }
2481
}
2482

    
2483
static void gen_stack_A0(DisasContext *s)
2484
{
2485
    gen_op_movl_A0_reg(R_ESP);
2486
    if (!s->ss32)
2487
        gen_op_andl_A0_ffff();
2488
    tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2489
    if (s->addseg)
2490
        gen_op_addl_A0_seg(R_SS);
2491
}
2492

    
2493
/* NOTE: wrap around in 16 bit not fully handled */
2494
static void gen_pusha(DisasContext *s)
2495
{
2496
    int i;
2497
    gen_op_movl_A0_reg(R_ESP);
2498
    gen_op_addl_A0_im(-16 <<  s->dflag);
2499
    if (!s->ss32)
2500
        gen_op_andl_A0_ffff();
2501
    tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2502
    if (s->addseg)
2503
        gen_op_addl_A0_seg(R_SS);
2504
    for(i = 0;i < 8; i++) {
2505
        gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2506
        gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2507
        gen_op_addl_A0_im(2 <<  s->dflag);
2508
    }
2509
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2510
}
2511

    
2512
/* NOTE: wrap around in 16 bit not fully handled */
2513
static void gen_popa(DisasContext *s)
2514
{
2515
    int i;
2516
    gen_op_movl_A0_reg(R_ESP);
2517
    if (!s->ss32)
2518
        gen_op_andl_A0_ffff();
2519
    tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2520
    tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 <<  s->dflag);
2521
    if (s->addseg)
2522
        gen_op_addl_A0_seg(R_SS);
2523
    for(i = 0;i < 8; i++) {
2524
        /* ESP is not reloaded */
2525
        if (i != 3) {
2526
            gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2527
            gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2528
        }
2529
        gen_op_addl_A0_im(2 <<  s->dflag);
2530
    }
2531
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2532
}
2533

    
2534
static void gen_enter(DisasContext *s, int esp_addend, int level)
2535
{
2536
    int ot, opsize;
2537

    
2538
    level &= 0x1f;
2539
#ifdef TARGET_X86_64
2540
    if (CODE64(s)) {
2541
        ot = s->dflag ? OT_QUAD : OT_WORD;
2542
        opsize = 1 << ot;
2543

    
2544
        gen_op_movl_A0_reg(R_ESP);
2545
        gen_op_addq_A0_im(-opsize);
2546
        tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2547

    
2548
        /* push bp */
2549
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2550
        gen_op_st_T0_A0(ot + s->mem_index);
2551
        if (level) {
2552
            /* XXX: must save state */
2553
            tcg_gen_helper_0_3(helper_enter64_level,
2554
                               tcg_const_i32(level),
2555
                               tcg_const_i32((ot == OT_QUAD)),
2556
                               cpu_T[1]);
2557
        }
2558
        gen_op_mov_reg_T1(ot, R_EBP);
2559
        tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2560
        gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2561
    } else
2562
#endif
2563
    {
2564
        ot = s->dflag + OT_WORD;
2565
        opsize = 2 << s->dflag;
2566

    
2567
        gen_op_movl_A0_reg(R_ESP);
2568
        gen_op_addl_A0_im(-opsize);
2569
        if (!s->ss32)
2570
            gen_op_andl_A0_ffff();
2571
        tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2572
        if (s->addseg)
2573
            gen_op_addl_A0_seg(R_SS);
2574
        /* push bp */
2575
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2576
        gen_op_st_T0_A0(ot + s->mem_index);
2577
        if (level) {
2578
            /* XXX: must save state */
2579
            tcg_gen_helper_0_3(helper_enter_level,
2580
                               tcg_const_i32(level),
2581
                               tcg_const_i32(s->dflag),
2582
                               cpu_T[1]);
2583
        }
2584
        gen_op_mov_reg_T1(ot, R_EBP);
2585
        tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2586
        gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2587
    }
2588
}
2589

    
2590
static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2591
{
2592
    if (s->cc_op != CC_OP_DYNAMIC)
2593
        gen_op_set_cc_op(s->cc_op);
2594
    gen_jmp_im(cur_eip);
2595
    tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
2596
    s->is_jmp = 3;
2597
}
2598

    
2599
/* an interrupt is different from an exception because of the
2600
   privilege checks */
2601
static void gen_interrupt(DisasContext *s, int intno,
2602
                          target_ulong cur_eip, target_ulong next_eip)
2603
{
2604
    if (s->cc_op != CC_OP_DYNAMIC)
2605
        gen_op_set_cc_op(s->cc_op);
2606
    gen_jmp_im(cur_eip);
2607
    tcg_gen_helper_0_2(helper_raise_interrupt, 
2608
                       tcg_const_i32(intno), 
2609
                       tcg_const_i32(next_eip - cur_eip));
2610
    s->is_jmp = 3;
2611
}
2612

    
2613
static void gen_debug(DisasContext *s, target_ulong cur_eip)
2614
{
2615
    if (s->cc_op != CC_OP_DYNAMIC)
2616
        gen_op_set_cc_op(s->cc_op);
2617
    gen_jmp_im(cur_eip);
2618
    tcg_gen_helper_0_0(helper_debug);
2619
    s->is_jmp = 3;
2620
}
2621

    
2622
/* generate a generic end of block. Trace exception is also generated
2623
   if needed */
2624
static void gen_eob(DisasContext *s)
2625
{
2626
    if (s->cc_op != CC_OP_DYNAMIC)
2627
        gen_op_set_cc_op(s->cc_op);
2628
    if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2629
        tcg_gen_helper_0_0(helper_reset_inhibit_irq);
2630
    }
2631
    if (s->singlestep_enabled) {
2632
        tcg_gen_helper_0_0(helper_debug);
2633
    } else if (s->tf) {
2634
        tcg_gen_helper_0_0(helper_single_step);
2635
    } else {
2636
        tcg_gen_exit_tb(0);
2637
    }
2638
    s->is_jmp = 3;
2639
}
2640

    
2641
/* generate a jump to eip. No segment change must happen before as a
2642
   direct call to the next block may occur */
2643
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2644
{
2645
    if (s->jmp_opt) {
2646
        if (s->cc_op != CC_OP_DYNAMIC) {
2647
            gen_op_set_cc_op(s->cc_op);
2648
            s->cc_op = CC_OP_DYNAMIC;
2649
        }
2650
        gen_goto_tb(s, tb_num, eip);
2651
        s->is_jmp = 3;
2652
    } else {
2653
        gen_jmp_im(eip);
2654
        gen_eob(s);
2655
    }
2656
}
2657

    
2658
static void gen_jmp(DisasContext *s, target_ulong eip)
2659
{
2660
    gen_jmp_tb(s, eip, 0);
2661
}
2662

    
2663
static inline void gen_ldq_env_A0(int idx, int offset)
2664
{
2665
    int mem_index = (idx >> 2) - 1;
2666
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2667
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
2668
}
2669

    
2670
static inline void gen_stq_env_A0(int idx, int offset)
2671
{
2672
    int mem_index = (idx >> 2) - 1;
2673
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2674
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2675
}
2676

    
2677
static inline void gen_ldo_env_A0(int idx, int offset)
2678
{
2679
    int mem_index = (idx >> 2) - 1;
2680
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2681
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2682
    tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2683
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2684
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2685
}
2686

    
2687
static inline void gen_sto_env_A0(int idx, int offset)
2688
{
2689
    int mem_index = (idx >> 2) - 1;
2690
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2691
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2692
    tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2693
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2694
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2695
}
2696

    
2697
static inline void gen_op_movo(int d_offset, int s_offset)
2698
{
2699
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2700
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2701
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2702
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
2703
}
2704

    
2705
static inline void gen_op_movq(int d_offset, int s_offset)
2706
{
2707
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2708
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2709
}
2710

    
2711
static inline void gen_op_movl(int d_offset, int s_offset)
2712
{
2713
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2714
    tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
2715
}
2716

    
2717
static inline void gen_op_movq_env_0(int d_offset)
2718
{
2719
    tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2720
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2721
}
2722

    
2723
#define SSE_SPECIAL ((void *)1)
2724
#define SSE_DUMMY ((void *)2)
2725

    
2726
#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2727
#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2728
                     helper_ ## x ## ss, helper_ ## x ## sd, }
2729

    
2730
static void *sse_op_table1[256][4] = {
2731
    /* 3DNow! extensions */
2732
    [0x0e] = { SSE_DUMMY }, /* femms */
2733
    [0x0f] = { SSE_DUMMY }, /* pf... */
2734
    /* pure SSE operations */
2735
    [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2736
    [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2737
    [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2738
    [0x13] = { SSE_SPECIAL, SSE_SPECIAL },  /* movlps, movlpd */
2739
    [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2740
    [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2741
    [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd, movshdup */
2742
    [0x17] = { SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd */
2743

    
2744
    [0x28] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2745
    [0x29] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2746
    [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2747
    [0x2b] = { SSE_SPECIAL, SSE_SPECIAL },  /* movntps, movntpd */
2748
    [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2749
    [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2750
    [0x2e] = { helper_ucomiss, helper_ucomisd },
2751
    [0x2f] = { helper_comiss, helper_comisd },
2752
    [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2753
    [0x51] = SSE_FOP(sqrt),
2754
    [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
2755
    [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
2756
    [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
2757
    [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
2758
    [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
2759
    [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
2760
    [0x58] = SSE_FOP(add),
2761
    [0x59] = SSE_FOP(mul),
2762
    [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
2763
               helper_cvtss2sd, helper_cvtsd2ss },
2764
    [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
2765
    [0x5c] = SSE_FOP(sub),
2766
    [0x5d] = SSE_FOP(min),
2767
    [0x5e] = SSE_FOP(div),
2768
    [0x5f] = SSE_FOP(max),
2769

    
2770
    [0xc2] = SSE_FOP(cmpeq),
2771
    [0xc6] = { helper_shufps, helper_shufpd },
2772

    
2773
    [0x38] = { SSE_SPECIAL, SSE_SPECIAL },  /* SSSE3 */
2774
    [0x3a] = { SSE_SPECIAL, SSE_SPECIAL },  /* SSSE3 */
2775

    
2776
    /* MMX ops and their SSE extensions */
2777
    [0x60] = MMX_OP2(punpcklbw),
2778
    [0x61] = MMX_OP2(punpcklwd),
2779
    [0x62] = MMX_OP2(punpckldq),
2780
    [0x63] = MMX_OP2(packsswb),
2781
    [0x64] = MMX_OP2(pcmpgtb),
2782
    [0x65] = MMX_OP2(pcmpgtw),
2783
    [0x66] = MMX_OP2(pcmpgtl),
2784
    [0x67] = MMX_OP2(packuswb),
2785
    [0x68] = MMX_OP2(punpckhbw),
2786
    [0x69] = MMX_OP2(punpckhwd),
2787
    [0x6a] = MMX_OP2(punpckhdq),
2788
    [0x6b] = MMX_OP2(packssdw),
2789
    [0x6c] = { NULL, helper_punpcklqdq_xmm },
2790
    [0x6d] = { NULL, helper_punpckhqdq_xmm },
2791
    [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2792
    [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2793
    [0x70] = { helper_pshufw_mmx,
2794
               helper_pshufd_xmm,
2795
               helper_pshufhw_xmm,
2796
               helper_pshuflw_xmm },
2797
    [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2798
    [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2799
    [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2800
    [0x74] = MMX_OP2(pcmpeqb),
2801
    [0x75] = MMX_OP2(pcmpeqw),
2802
    [0x76] = MMX_OP2(pcmpeql),
2803
    [0x77] = { SSE_DUMMY }, /* emms */
2804
    [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
2805
    [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
2806
    [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2807
    [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2808
    [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2809
    [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2810
    [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
2811
    [0xd1] = MMX_OP2(psrlw),
2812
    [0xd2] = MMX_OP2(psrld),
2813
    [0xd3] = MMX_OP2(psrlq),
2814
    [0xd4] = MMX_OP2(paddq),
2815
    [0xd5] = MMX_OP2(pmullw),
2816
    [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2817
    [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2818
    [0xd8] = MMX_OP2(psubusb),
2819
    [0xd9] = MMX_OP2(psubusw),
2820
    [0xda] = MMX_OP2(pminub),
2821
    [0xdb] = MMX_OP2(pand),
2822
    [0xdc] = MMX_OP2(paddusb),
2823
    [0xdd] = MMX_OP2(paddusw),
2824
    [0xde] = MMX_OP2(pmaxub),
2825
    [0xdf] = MMX_OP2(pandn),
2826
    [0xe0] = MMX_OP2(pavgb),
2827
    [0xe1] = MMX_OP2(psraw),
2828
    [0xe2] = MMX_OP2(psrad),
2829
    [0xe3] = MMX_OP2(pavgw),
2830
    [0xe4] = MMX_OP2(pmulhuw),
2831
    [0xe5] = MMX_OP2(pmulhw),
2832
    [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
2833
    [0xe7] = { SSE_SPECIAL , SSE_SPECIAL },  /* movntq, movntq */
2834
    [0xe8] = MMX_OP2(psubsb),
2835
    [0xe9] = MMX_OP2(psubsw),
2836
    [0xea] = MMX_OP2(pminsw),
2837
    [0xeb] = MMX_OP2(por),
2838
    [0xec] = MMX_OP2(paddsb),
2839
    [0xed] = MMX_OP2(paddsw),
2840
    [0xee] = MMX_OP2(pmaxsw),
2841
    [0xef] = MMX_OP2(pxor),
2842
    [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2843
    [0xf1] = MMX_OP2(psllw),
2844
    [0xf2] = MMX_OP2(pslld),
2845
    [0xf3] = MMX_OP2(psllq),
2846
    [0xf4] = MMX_OP2(pmuludq),
2847
    [0xf5] = MMX_OP2(pmaddwd),
2848
    [0xf6] = MMX_OP2(psadbw),
2849
    [0xf7] = MMX_OP2(maskmov),
2850
    [0xf8] = MMX_OP2(psubb),
2851
    [0xf9] = MMX_OP2(psubw),
2852
    [0xfa] = MMX_OP2(psubl),
2853
    [0xfb] = MMX_OP2(psubq),
2854
    [0xfc] = MMX_OP2(paddb),
2855
    [0xfd] = MMX_OP2(paddw),
2856
    [0xfe] = MMX_OP2(paddl),
2857
};
2858

    
2859
static void *sse_op_table2[3 * 8][2] = {
2860
    [0 + 2] = MMX_OP2(psrlw),
2861
    [0 + 4] = MMX_OP2(psraw),
2862
    [0 + 6] = MMX_OP2(psllw),
2863
    [8 + 2] = MMX_OP2(psrld),
2864
    [8 + 4] = MMX_OP2(psrad),
2865
    [8 + 6] = MMX_OP2(pslld),
2866
    [16 + 2] = MMX_OP2(psrlq),
2867
    [16 + 3] = { NULL, helper_psrldq_xmm },
2868
    [16 + 6] = MMX_OP2(psllq),
2869
    [16 + 7] = { NULL, helper_pslldq_xmm },
2870
};
2871

    
2872
static void *sse_op_table3[4 * 3] = {
2873
    helper_cvtsi2ss,
2874
    helper_cvtsi2sd,
2875
    X86_64_ONLY(helper_cvtsq2ss),
2876
    X86_64_ONLY(helper_cvtsq2sd),
2877

    
2878
    helper_cvttss2si,
2879
    helper_cvttsd2si,
2880
    X86_64_ONLY(helper_cvttss2sq),
2881
    X86_64_ONLY(helper_cvttsd2sq),
2882

    
2883
    helper_cvtss2si,
2884
    helper_cvtsd2si,
2885
    X86_64_ONLY(helper_cvtss2sq),
2886
    X86_64_ONLY(helper_cvtsd2sq),
2887
};
2888

    
2889
static void *sse_op_table4[8][4] = {
2890
    SSE_FOP(cmpeq),
2891
    SSE_FOP(cmplt),
2892
    SSE_FOP(cmple),
2893
    SSE_FOP(cmpunord),
2894
    SSE_FOP(cmpneq),
2895
    SSE_FOP(cmpnlt),
2896
    SSE_FOP(cmpnle),
2897
    SSE_FOP(cmpord),
2898
};
2899

    
2900
static void *sse_op_table5[256] = {
2901
    [0x0c] = helper_pi2fw,
2902
    [0x0d] = helper_pi2fd,
2903
    [0x1c] = helper_pf2iw,
2904
    [0x1d] = helper_pf2id,
2905
    [0x8a] = helper_pfnacc,
2906
    [0x8e] = helper_pfpnacc,
2907
    [0x90] = helper_pfcmpge,
2908
    [0x94] = helper_pfmin,
2909
    [0x96] = helper_pfrcp,
2910
    [0x97] = helper_pfrsqrt,
2911
    [0x9a] = helper_pfsub,
2912
    [0x9e] = helper_pfadd,
2913
    [0xa0] = helper_pfcmpgt,
2914
    [0xa4] = helper_pfmax,
2915
    [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2916
    [0xa7] = helper_movq, /* pfrsqit1 */
2917
    [0xaa] = helper_pfsubr,
2918
    [0xae] = helper_pfacc,
2919
    [0xb0] = helper_pfcmpeq,
2920
    [0xb4] = helper_pfmul,
2921
    [0xb6] = helper_movq, /* pfrcpit2 */
2922
    [0xb7] = helper_pmulhrw_mmx,
2923
    [0xbb] = helper_pswapd,
2924
    [0xbf] = helper_pavgb_mmx /* pavgusb */
2925
};
2926

    
2927
static void *sse_op_table6[256][2] = {
2928
    [0x00] = MMX_OP2(pshufb),
2929
    [0x01] = MMX_OP2(phaddw),
2930
    [0x02] = MMX_OP2(phaddd),
2931
    [0x03] = MMX_OP2(phaddsw),
2932
    [0x04] = MMX_OP2(pmaddubsw),
2933
    [0x05] = MMX_OP2(phsubw),
2934
    [0x06] = MMX_OP2(phsubd),
2935
    [0x07] = MMX_OP2(phsubsw),
2936
    [0x08] = MMX_OP2(psignb),
2937
    [0x09] = MMX_OP2(psignw),
2938
    [0x0a] = MMX_OP2(psignd),
2939
    [0x0b] = MMX_OP2(pmulhrsw),
2940
    [0x1c] = MMX_OP2(pabsb),
2941
    [0x1d] = MMX_OP2(pabsw),
2942
    [0x1e] = MMX_OP2(pabsd),
2943
};
2944

    
2945
static void *sse_op_table7[256][2] = {
2946
    [0x0f] = MMX_OP2(palignr),
2947
};
2948

    
2949
static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2950
{
2951
    int b1, op1_offset, op2_offset, is_xmm, val, ot;
2952
    int modrm, mod, rm, reg, reg_addr, offset_addr;
2953
    void *sse_op2;
2954

    
2955
    b &= 0xff;
2956
    if (s->prefix & PREFIX_DATA)
2957
        b1 = 1;
2958
    else if (s->prefix & PREFIX_REPZ)
2959
        b1 = 2;
2960
    else if (s->prefix & PREFIX_REPNZ)
2961
        b1 = 3;
2962
    else
2963
        b1 = 0;
2964
    sse_op2 = sse_op_table1[b][b1];
2965
    if (!sse_op2)
2966
        goto illegal_op;
2967
    if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2968
        is_xmm = 1;
2969
    } else {
2970
        if (b1 == 0) {
2971
            /* MMX case */
2972
            is_xmm = 0;
2973
        } else {
2974
            is_xmm = 1;
2975
        }
2976
    }
2977
    /* simple MMX/SSE operation */
2978
    if (s->flags & HF_TS_MASK) {
2979
        gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2980
        return;
2981
    }
2982
    if (s->flags & HF_EM_MASK) {
2983
    illegal_op:
2984
        gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2985
        return;
2986
    }
2987
    if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2988
        if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
2989
            goto illegal_op;
2990
    if (b == 0x0e) {
2991
        if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
2992
            goto illegal_op;
2993
        /* femms */
2994
        tcg_gen_helper_0_0(helper_emms);
2995
        return;
2996
    }
2997
    if (b == 0x77) {
2998
        /* emms */
2999
        tcg_gen_helper_0_0(helper_emms);
3000
        return;
3001
    }
3002
    /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3003
       the static cpu state) */
3004
    if (!is_xmm) {
3005
        tcg_gen_helper_0_0(helper_enter_mmx);
3006
    }
3007

    
3008
    modrm = ldub_code(s->pc++);
3009
    reg = ((modrm >> 3) & 7);
3010
    if (is_xmm)
3011
        reg |= rex_r;
3012
    mod = (modrm >> 6) & 3;
3013
    if (sse_op2 == SSE_SPECIAL) {
3014
        b |= (b1 << 8);
3015
        switch(b) {
3016
        case 0x0e7: /* movntq */
3017
            if (mod == 3)
3018
                goto illegal_op;
3019
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3020
            gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3021
            break;
3022
        case 0x1e7: /* movntdq */
3023
        case 0x02b: /* movntps */
3024
        case 0x12b: /* movntps */
3025
        case 0x3f0: /* lddqu */
3026
            if (mod == 3)
3027
                goto illegal_op;
3028
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3029
            gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3030
            break;
3031
        case 0x6e: /* movd mm, ea */
3032
#ifdef TARGET_X86_64
3033
            if (s->dflag == 2) {
3034
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3035
                tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3036
            } else
3037
#endif
3038
            {
3039
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3040
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3041
                                 offsetof(CPUX86State,fpregs[reg].mmx));
3042
                tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
3043
            }
3044
            break;
3045
        case 0x16e: /* movd xmm, ea */
3046
#ifdef TARGET_X86_64
3047
            if (s->dflag == 2) {
3048
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3049
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3050
                                 offsetof(CPUX86State,xmm_regs[reg]));
3051
                tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3052
            } else
3053
#endif
3054
            {
3055
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3056
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3057
                                 offsetof(CPUX86State,xmm_regs[reg]));
3058
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3059
                tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3060
            }
3061
            break;
3062
        case 0x6f: /* movq mm, ea */
3063
            if (mod != 3) {
3064
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3065
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3066
            } else {
3067
                rm = (modrm & 7);
3068
                tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3069
                               offsetof(CPUX86State,fpregs[rm].mmx));
3070
                tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3071
                               offsetof(CPUX86State,fpregs[reg].mmx));
3072
            }
3073
            break;
3074
        case 0x010: /* movups */
3075
        case 0x110: /* movupd */
3076
        case 0x028: /* movaps */
3077
        case 0x128: /* movapd */
3078
        case 0x16f: /* movdqa xmm, ea */
3079
        case 0x26f: /* movdqu xmm, ea */
3080
            if (mod != 3) {
3081
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3082
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3083
            } else {
3084
                rm = (modrm & 7) | REX_B(s);
3085
                gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3086
                            offsetof(CPUX86State,xmm_regs[rm]));
3087
            }
3088
            break;
3089
        case 0x210: /* movss xmm, ea */
3090
            if (mod != 3) {
3091
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3092
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3093
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3094
                gen_op_movl_T0_0();
3095
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3096
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3097
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3098
            } else {
3099
                rm = (modrm & 7) | REX_B(s);
3100
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3101
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3102
            }
3103
            break;
3104
        case 0x310: /* movsd xmm, ea */
3105
            if (mod != 3) {
3106
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3107
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3108
                gen_op_movl_T0_0();
3109
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3110
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3111
            } else {
3112
                rm = (modrm & 7) | REX_B(s);
3113
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3114
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3115
            }
3116
            break;
3117
        case 0x012: /* movlps */
3118
        case 0x112: /* movlpd */
3119
            if (mod != 3) {
3120
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3121
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3122
            } else {
3123
                /* movhlps */
3124
                rm = (modrm & 7) | REX_B(s);
3125
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3126
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3127
            }
3128
            break;
3129
        case 0x212: /* movsldup */
3130
            if (mod != 3) {
3131
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3132
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3133
            } else {
3134
                rm = (modrm & 7) | REX_B(s);
3135
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3136
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3137
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3138
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3139
            }
3140
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3141
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3142
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3143
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3144
            break;
3145
        case 0x312: /* movddup */
3146
            if (mod != 3) {
3147
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3148
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3149
            } else {
3150
                rm = (modrm & 7) | REX_B(s);
3151
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3152
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3153
            }
3154
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3155
                        offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3156
            break;
3157
        case 0x016: /* movhps */
3158
        case 0x116: /* movhpd */
3159
            if (mod != 3) {
3160
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3161
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3162
            } else {
3163
                /* movlhps */
3164
                rm = (modrm & 7) | REX_B(s);
3165
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3166
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3167
            }
3168
            break;
3169
        case 0x216: /* movshdup */
3170
            if (mod != 3) {
3171
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3172
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3173
            } else {
3174
                rm = (modrm & 7) | REX_B(s);
3175
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3176
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3177
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3178
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3179
            }
3180
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3181
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3182
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3183
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3184
            break;
3185
        case 0x7e: /* movd ea, mm */
3186
#ifdef TARGET_X86_64
3187
            if (s->dflag == 2) {
3188
                tcg_gen_ld_i64(cpu_T[0], cpu_env, 
3189
                               offsetof(CPUX86State,fpregs[reg].mmx));
3190
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3191
            } else
3192
#endif
3193
            {
3194
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
3195
                                 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3196
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3197
            }
3198
            break;
3199
        case 0x17e: /* movd ea, xmm */
3200
#ifdef TARGET_X86_64
3201
            if (s->dflag == 2) {
3202
                tcg_gen_ld_i64(cpu_T[0], cpu_env, 
3203
                               offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3204
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3205
            } else
3206
#endif
3207
            {
3208
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
3209
                                 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3210
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3211
            }
3212
            break;
3213
        case 0x27e: /* movq xmm, ea */
3214
            if (mod != 3) {
3215
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3216
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3217
            } else {
3218
                rm = (modrm & 7) | REX_B(s);
3219
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3220
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3221
            }
3222
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3223
            break;
3224
        case 0x7f: /* movq ea, mm */
3225
            if (mod != 3) {
3226
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3227
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3228
            } else {
3229
                rm = (modrm & 7);
3230
                gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3231
                            offsetof(CPUX86State,fpregs[reg].mmx));
3232
            }
3233
            break;
3234
        case 0x011: /* movups */
3235
        case 0x111: /* movupd */
3236
        case 0x029: /* movaps */
3237
        case 0x129: /* movapd */
3238
        case 0x17f: /* movdqa ea, xmm */
3239
        case 0x27f: /* movdqu ea, xmm */
3240
            if (mod != 3) {
3241
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3242
                gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3243
            } else {
3244
                rm = (modrm & 7) | REX_B(s);
3245
                gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3246
                            offsetof(CPUX86State,xmm_regs[reg]));
3247
            }
3248
            break;
3249
        case 0x211: /* movss ea, xmm */
3250
            if (mod != 3) {
3251
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3252
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3253
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
3254
            } else {
3255
                rm = (modrm & 7) | REX_B(s);
3256
                gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3257
                            offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3258
            }
3259
            break;
3260
        case 0x311: /* movsd ea, xmm */
3261
            if (mod != 3) {
3262
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3263
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3264
            } else {
3265
                rm = (modrm & 7) | REX_B(s);
3266
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3267
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3268
            }
3269
            break;
3270
        case 0x013: /* movlps */
3271
        case 0x113: /* movlpd */
3272
            if (mod != 3) {
3273
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3274
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3275
            } else {
3276
                goto illegal_op;
3277
            }
3278
            break;
3279
        case 0x017: /* movhps */
3280
        case 0x117: /* movhpd */
3281
            if (mod != 3) {
3282
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3283
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3284
            } else {
3285
                goto illegal_op;
3286
            }
3287
            break;
3288
        case 0x71: /* shift mm, im */
3289
        case 0x72:
3290
        case 0x73:
3291
        case 0x171: /* shift xmm, im */
3292
        case 0x172:
3293
        case 0x173:
3294
            val = ldub_code(s->pc++);
3295
            if (is_xmm) {
3296
                gen_op_movl_T0_im(val);
3297
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3298
                gen_op_movl_T0_0();
3299
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3300
                op1_offset = offsetof(CPUX86State,xmm_t0);
3301
            } else {
3302
                gen_op_movl_T0_im(val);
3303
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3304
                gen_op_movl_T0_0();
3305
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3306
                op1_offset = offsetof(CPUX86State,mmx_t0);
3307
            }
3308
            sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3309
            if (!sse_op2)
3310
                goto illegal_op;
3311
            if (is_xmm) {
3312
                rm = (modrm & 7) | REX_B(s);
3313
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3314
            } else {
3315
                rm = (modrm & 7);
3316
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3317
            }
3318
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3319
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3320
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3321
            break;
3322
        case 0x050: /* movmskps */
3323
            rm = (modrm & 7) | REX_B(s);
3324
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3325
                             offsetof(CPUX86State,xmm_regs[rm]));
3326
            tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3327
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3328
            gen_op_mov_reg_T0(OT_LONG, reg);
3329
            break;
3330
        case 0x150: /* movmskpd */
3331
            rm = (modrm & 7) | REX_B(s);
3332
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3333
                             offsetof(CPUX86State,xmm_regs[rm]));
3334
            tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3335
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3336
            gen_op_mov_reg_T0(OT_LONG, reg);
3337
            break;
3338
        case 0x02a: /* cvtpi2ps */
3339
        case 0x12a: /* cvtpi2pd */
3340
            tcg_gen_helper_0_0(helper_enter_mmx);
3341
            if (mod != 3) {
3342
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3343
                op2_offset = offsetof(CPUX86State,mmx_t0);
3344
                gen_ldq_env_A0(s->mem_index, op2_offset);
3345
            } else {
3346
                rm = (modrm & 7);
3347
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3348
            }
3349
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3350
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3351
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3352
            switch(b >> 8) {
3353
            case 0x0:
3354
                tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3355
                break;
3356
            default:
3357
            case 0x1:
3358
                tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3359
                break;
3360
            }
3361
            break;
3362
        case 0x22a: /* cvtsi2ss */
3363
        case 0x32a: /* cvtsi2sd */
3364
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3365
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3366
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3367
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3368
            sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3369
            if (ot == OT_LONG) {
3370
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3371
                tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3372
            } else {
3373
                tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_T[0]);
3374
            }
3375
            break;
3376
        case 0x02c: /* cvttps2pi */
3377
        case 0x12c: /* cvttpd2pi */
3378
        case 0x02d: /* cvtps2pi */
3379
        case 0x12d: /* cvtpd2pi */
3380
            tcg_gen_helper_0_0(helper_enter_mmx);
3381
            if (mod != 3) {
3382
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3383
                op2_offset = offsetof(CPUX86State,xmm_t0);
3384
                gen_ldo_env_A0(s->mem_index, op2_offset);
3385
            } else {
3386
                rm = (modrm & 7) | REX_B(s);
3387
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3388
            }
3389
            op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3390
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3391
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3392
            switch(b) {
3393
            case 0x02c:
3394
                tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3395
                break;
3396
            case 0x12c:
3397
                tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3398
                break;
3399
            case 0x02d:
3400
                tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3401
                break;
3402
            case 0x12d:
3403
                tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3404
                break;
3405
            }
3406
            break;
3407
        case 0x22c: /* cvttss2si */
3408
        case 0x32c: /* cvttsd2si */
3409
        case 0x22d: /* cvtss2si */
3410
        case 0x32d: /* cvtsd2si */
3411
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3412
            if (mod != 3) {
3413
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3414
                if ((b >> 8) & 1) {
3415
                    gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3416
                } else {
3417
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3418
                    tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3419
                }
3420
                op2_offset = offsetof(CPUX86State,xmm_t0);
3421
            } else {
3422
                rm = (modrm & 7) | REX_B(s);
3423
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3424
            }
3425
            sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3426
                                    (b & 1) * 4];
3427
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3428
            if (ot == OT_LONG) {
3429
                tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
3430
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3431
            } else {
3432
                tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3433
            }
3434
            gen_op_mov_reg_T0(ot, reg);
3435
            break;
3436
        case 0xc4: /* pinsrw */
3437
        case 0x1c4:
3438
            s->rip_offset = 1;
3439
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3440
            val = ldub_code(s->pc++);
3441
            if (b1) {
3442
                val &= 7;
3443
                tcg_gen_st16_tl(cpu_T[0], cpu_env,
3444
                                offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3445
            } else {
3446
                val &= 3;
3447
                tcg_gen_st16_tl(cpu_T[0], cpu_env,
3448
                                offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3449
            }
3450
            break;
3451
        case 0xc5: /* pextrw */
3452
        case 0x1c5:
3453
            if (mod != 3)
3454
                goto illegal_op;
3455
            val = ldub_code(s->pc++);
3456
            if (b1) {
3457
                val &= 7;
3458
                rm = (modrm & 7) | REX_B(s);
3459
                tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3460
                                 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3461
            } else {
3462
                val &= 3;
3463
                rm = (modrm & 7);
3464
                tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3465
                                offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3466
            }
3467
            reg = ((modrm >> 3) & 7) | rex_r;
3468
            gen_op_mov_reg_T0(OT_LONG, reg);
3469
            break;
3470
        case 0x1d6: /* movq ea, xmm */
3471
            if (mod != 3) {
3472
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3473
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3474
            } else {
3475
                rm = (modrm & 7) | REX_B(s);
3476
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3477
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3478
                gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3479
            }
3480
            break;
3481
        case 0x2d6: /* movq2dq */
3482
            tcg_gen_helper_0_0(helper_enter_mmx);
3483
            rm = (modrm & 7);
3484
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3485
                        offsetof(CPUX86State,fpregs[rm].mmx));
3486
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3487
            break;
3488
        case 0x3d6: /* movdq2q */
3489
            tcg_gen_helper_0_0(helper_enter_mmx);
3490
            rm = (modrm & 7) | REX_B(s);
3491
            gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3492
                        offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3493
            break;
3494
        case 0xd7: /* pmovmskb */
3495
        case 0x1d7:
3496
            if (mod != 3)
3497
                goto illegal_op;
3498
            if (b1) {
3499
                rm = (modrm & 7) | REX_B(s);
3500
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3501
                tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
3502
            } else {
3503
                rm = (modrm & 7);
3504
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3505
                tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
3506
            }
3507
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3508
            reg = ((modrm >> 3) & 7) | rex_r;
3509
            gen_op_mov_reg_T0(OT_LONG, reg);
3510
            break;
3511
        case 0x038:
3512
        case 0x138:
3513
            if (!(s->cpuid_ext_features & CPUID_EXT_SSSE3))
3514
                goto illegal_op;
3515

    
3516
            b = modrm;
3517
            modrm = ldub_code(s->pc++);
3518
            rm = modrm & 7;
3519
            reg = ((modrm >> 3) & 7) | rex_r;
3520
            mod = (modrm >> 6) & 3;
3521

    
3522
            sse_op2 = sse_op_table6[b][b1];
3523
            if (!sse_op2)
3524
                goto illegal_op;
3525

    
3526
            if (b1) {
3527
                op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3528
                if (mod == 3) {
3529
                    op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
3530
                } else {
3531
                    op2_offset = offsetof(CPUX86State,xmm_t0);
3532
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3533
                    gen_ldo_env_A0(s->mem_index, op2_offset);
3534
                }
3535
            } else {
3536
                op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3537
                if (mod == 3) {
3538
                    op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3539
                } else {
3540
                    op2_offset = offsetof(CPUX86State,mmx_t0);
3541
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3542
                    gen_ldq_env_A0(s->mem_index, op2_offset);
3543
                }
3544
            }
3545
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3546
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3547
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3548
            break;
3549
        case 0x03a:
3550
        case 0x13a:
3551
            if (!(s->cpuid_ext_features & CPUID_EXT_SSSE3))
3552
                goto illegal_op;
3553

    
3554
            b = modrm;
3555
            modrm = ldub_code(s->pc++);
3556
            rm = modrm & 7;
3557
            reg = ((modrm >> 3) & 7) | rex_r;
3558
            mod = (modrm >> 6) & 3;
3559

    
3560
            sse_op2 = sse_op_table7[b][b1];
3561
            if (!sse_op2)
3562
                goto illegal_op;
3563

    
3564
            if (b1) {
3565
                op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3566
                if (mod == 3) {
3567
                    op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
3568
                } else {
3569
                    op2_offset = offsetof(CPUX86State,xmm_t0);
3570
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3571
                    gen_ldo_env_A0(s->mem_index, op2_offset);
3572
                }
3573
            } else {
3574
                op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3575
                if (mod == 3) {
3576
                    op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3577
                } else {
3578
                    op2_offset = offsetof(CPUX86State,mmx_t0);
3579
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3580
                    gen_ldq_env_A0(s->mem_index, op2_offset);
3581
                }
3582
            }
3583
            val = ldub_code(s->pc++);
3584

    
3585
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3586
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3587
            tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3588
            break;
3589
        default:
3590
            goto illegal_op;
3591
        }
3592
    } else {
3593
        /* generic MMX or SSE operation */
3594
        switch(b) {
3595
        case 0x70: /* pshufx insn */
3596
        case 0xc6: /* pshufx insn */
3597
        case 0xc2: /* compare insns */
3598
            s->rip_offset = 1;
3599
            break;
3600
        default:
3601
            break;
3602
        }
3603
        if (is_xmm) {
3604
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3605
            if (mod != 3) {
3606
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3607
                op2_offset = offsetof(CPUX86State,xmm_t0);
3608
                if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3609
                                b == 0xc2)) {
3610
                    /* specific case for SSE single instructions */
3611
                    if (b1 == 2) {
3612
                        /* 32 bit access */
3613
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3614
                        tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3615
                    } else {
3616
                        /* 64 bit access */
3617
                        gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3618
                    }
3619
                } else {
3620
                    gen_ldo_env_A0(s->mem_index, op2_offset);
3621
                }
3622
            } else {
3623
                rm = (modrm & 7) | REX_B(s);
3624
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3625
            }
3626
        } else {
3627
            op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3628
            if (mod != 3) {
3629
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3630
                op2_offset = offsetof(CPUX86State,mmx_t0);
3631
                gen_ldq_env_A0(s->mem_index, op2_offset);
3632
            } else {
3633
                rm = (modrm & 7);
3634
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3635
            }
3636
        }
3637
        switch(b) {
3638
        case 0x0f: /* 3DNow! data insns */
3639
            if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3640
                goto illegal_op;
3641
            val = ldub_code(s->pc++);
3642
            sse_op2 = sse_op_table5[val];
3643
            if (!sse_op2)
3644
                goto illegal_op;
3645
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3646
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3647
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3648
            break;
3649
        case 0x70: /* pshufx insn */
3650
        case 0xc6: /* pshufx insn */
3651
            val = ldub_code(s->pc++);
3652
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3653
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3654
            tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3655
            break;
3656
        case 0xc2:
3657
            /* compare insns */
3658
            val = ldub_code(s->pc++);
3659
            if (val >= 8)
3660
                goto illegal_op;
3661
            sse_op2 = sse_op_table4[val][b1];
3662
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3663
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3664
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3665
            break;
3666
        case 0xf7:
3667
            /* maskmov : we must prepare A0 */
3668
            if (mod != 3)
3669
                goto illegal_op;
3670
#ifdef TARGET_X86_64
3671
            if (s->aflag == 2) {
3672
                gen_op_movq_A0_reg(R_EDI);
3673
            } else
3674
#endif
3675
            {
3676
                gen_op_movl_A0_reg(R_EDI);
3677
                if (s->aflag == 0)
3678
                    gen_op_andl_A0_ffff();
3679
            }
3680
            gen_add_A0_ds_seg(s);
3681

    
3682
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3683
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3684
            tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
3685
            break;
3686
        default:
3687
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3688
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3689
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3690
            break;
3691
        }
3692
        if (b == 0x2e || b == 0x2f) {
3693
            s->cc_op = CC_OP_EFLAGS;
3694
        }
3695
    }
3696
}
3697

    
3698
/* convert one instruction. s->is_jmp is set if the translation must
3699
   be stopped. Return the next pc value */
3700
static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3701
{
3702
    int b, prefixes, aflag, dflag;
3703
    int shift, ot;
3704
    int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3705
    target_ulong next_eip, tval;
3706
    int rex_w, rex_r;
3707

    
3708
    if (unlikely(loglevel & CPU_LOG_TB_OP))
3709
        tcg_gen_debug_insn_start(pc_start);
3710
    s->pc = pc_start;
3711
    prefixes = 0;
3712
    aflag = s->code32;
3713
    dflag = s->code32;
3714
    s->override = -1;
3715
    rex_w = -1;
3716
    rex_r = 0;
3717
#ifdef TARGET_X86_64
3718
    s->rex_x = 0;
3719
    s->rex_b = 0;
3720
    x86_64_hregs = 0;
3721
#endif
3722
    s->rip_offset = 0; /* for relative ip address */
3723
 next_byte:
3724
    b = ldub_code(s->pc);
3725
    s->pc++;
3726
    /* check prefixes */
3727
#ifdef TARGET_X86_64
3728
    if (CODE64(s)) {
3729
        switch (b) {
3730
        case 0xf3:
3731
            prefixes |= PREFIX_REPZ;
3732
            goto next_byte;
3733
        case 0xf2:
3734
            prefixes |= PREFIX_REPNZ;
3735
            goto next_byte;
3736
        case 0xf0:
3737
            prefixes |= PREFIX_LOCK;
3738
            goto next_byte;
3739
        case 0x2e:
3740
            s->override = R_CS;
3741
            goto next_byte;
3742
        case 0x36:
3743
            s->override = R_SS;
3744
            goto next_byte;
3745
        case 0x3e:
3746
            s->override = R_DS;
3747
            goto next_byte;
3748
        case 0x26:
3749
            s->override = R_ES;
3750
            goto next_byte;
3751
        case 0x64:
3752
            s->override = R_FS;
3753
            goto next_byte;
3754
        case 0x65:
3755
            s->override = R_GS;
3756
            goto next_byte;
3757
        case 0x66:
3758
            prefixes |= PREFIX_DATA;
3759
            goto next_byte;
3760
        case 0x67:
3761
            prefixes |= PREFIX_ADR;
3762
            goto next_byte;
3763
        case 0x40 ... 0x4f:
3764
            /* REX prefix */
3765
            rex_w = (b >> 3) & 1;
3766
            rex_r = (b & 0x4) << 1;
3767
            s->rex_x = (b & 0x2) << 2;
3768
            REX_B(s) = (b & 0x1) << 3;
3769
            x86_64_hregs = 1; /* select uniform byte register addressing */
3770
            goto next_byte;
3771
        }
3772
        if (rex_w == 1) {
3773
            /* 0x66 is ignored if rex.w is set */
3774
            dflag = 2;
3775
        } else {
3776
            if (prefixes & PREFIX_DATA)
3777
                dflag ^= 1;
3778
        }
3779
        if (!(prefixes & PREFIX_ADR))
3780
            aflag = 2;
3781
    } else
3782
#endif
3783
    {
3784
        switch (b) {
3785
        case 0xf3:
3786
            prefixes |= PREFIX_REPZ;
3787
            goto next_byte;
3788
        case 0xf2:
3789
            prefixes |= PREFIX_REPNZ;
3790
            goto next_byte;
3791
        case 0xf0:
3792
            prefixes |= PREFIX_LOCK;
3793
            goto next_byte;
3794
        case 0x2e:
3795
            s->override = R_CS;
3796
            goto next_byte;
3797
        case 0x36:
3798
            s->override = R_SS;
3799
            goto next_byte;
3800
        case 0x3e:
3801
            s->override = R_DS;
3802
            goto next_byte;
3803
        case 0x26:
3804
            s->override = R_ES;
3805
            goto next_byte;
3806
        case 0x64:
3807
            s->override = R_FS;
3808
            goto next_byte;
3809
        case 0x65:
3810
            s->override = R_GS;
3811
            goto next_byte;
3812
        case 0x66:
3813
            prefixes |= PREFIX_DATA;
3814
            goto next_byte;
3815
        case 0x67:
3816
            prefixes |= PREFIX_ADR;
3817
            goto next_byte;
3818
        }
3819
        if (prefixes & PREFIX_DATA)
3820
            dflag ^= 1;
3821
        if (prefixes & PREFIX_ADR)
3822
            aflag ^= 1;
3823
    }
3824

    
3825
    s->prefix = prefixes;
3826
    s->aflag = aflag;
3827
    s->dflag = dflag;
3828

    
3829
    /* lock generation */
3830
    if (prefixes & PREFIX_LOCK)
3831
        tcg_gen_helper_0_0(helper_lock);
3832

    
3833
    /* now check op code */
3834
 reswitch:
3835
    switch(b) {
3836
    case 0x0f:
3837
        /**************************/
3838
        /* extended op code */
3839
        b = ldub_code(s->pc++) | 0x100;
3840
        goto reswitch;
3841

    
3842
        /**************************/
3843
        /* arith & logic */
3844
    case 0x00 ... 0x05:
3845
    case 0x08 ... 0x0d:
3846
    case 0x10 ... 0x15:
3847
    case 0x18 ... 0x1d:
3848
    case 0x20 ... 0x25:
3849
    case 0x28 ... 0x2d:
3850
    case 0x30 ... 0x35:
3851
    case 0x38 ... 0x3d:
3852
        {
3853
            int op, f, val;
3854
            op = (b >> 3) & 7;
3855
            f = (b >> 1) & 3;
3856

    
3857
            if ((b & 1) == 0)
3858
                ot = OT_BYTE;
3859
            else
3860
                ot = dflag + OT_WORD;
3861

    
3862
            switch(f) {
3863
            case 0: /* OP Ev, Gv */
3864
                modrm = ldub_code(s->pc++);
3865
                reg = ((modrm >> 3) & 7) | rex_r;
3866
                mod = (modrm >> 6) & 3;
3867
                rm = (modrm & 7) | REX_B(s);
3868
                if (mod != 3) {
3869
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3870
                    opreg = OR_TMP0;
3871
                } else if (op == OP_XORL && rm == reg) {
3872
                xor_zero:
3873
                    /* xor reg, reg optimisation */
3874
                    gen_op_movl_T0_0();
3875
                    s->cc_op = CC_OP_LOGICB + ot;
3876
                    gen_op_mov_reg_T0(ot, reg);
3877
                    gen_op_update1_cc();
3878
                    break;
3879
                } else {
3880
                    opreg = rm;
3881
                }
3882
                gen_op_mov_TN_reg(ot, 1, reg);
3883
                gen_op(s, op, ot, opreg);
3884
                break;
3885
            case 1: /* OP Gv, Ev */
3886
                modrm = ldub_code(s->pc++);
3887
                mod = (modrm >> 6) & 3;
3888
                reg = ((modrm >> 3) & 7) | rex_r;
3889
                rm = (modrm & 7) | REX_B(s);
3890
                if (mod != 3) {
3891
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3892
                    gen_op_ld_T1_A0(ot + s->mem_index);
3893
                } else if (op == OP_XORL && rm == reg) {
3894
                    goto xor_zero;
3895
                } else {
3896
                    gen_op_mov_TN_reg(ot, 1, rm);
3897
                }
3898
                gen_op(s, op, ot, reg);
3899
                break;
3900
            case 2: /* OP A, Iv */
3901
                val = insn_get(s, ot);
3902
                gen_op_movl_T1_im(val);
3903
                gen_op(s, op, ot, OR_EAX);
3904
                break;
3905
            }
3906
        }
3907
        break;
3908

    
3909
    case 0x82:
3910
        if (CODE64(s))
3911
            goto illegal_op;
3912
    case 0x80: /* GRP1 */
3913
    case 0x81:
3914
    case 0x83:
3915
        {
3916
            int val;
3917

    
3918
            if ((b & 1) == 0)
3919
                ot = OT_BYTE;
3920
            else
3921
                ot = dflag + OT_WORD;
3922

    
3923
            modrm = ldub_code(s->pc++);
3924
            mod = (modrm >> 6) & 3;
3925
            rm = (modrm & 7) | REX_B(s);
3926
            op = (modrm >> 3) & 7;
3927

    
3928
            if (mod != 3) {
3929
                if (b == 0x83)
3930
                    s->rip_offset = 1;
3931
                else
3932
                    s->rip_offset = insn_const_size(ot);
3933
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3934
                opreg = OR_TMP0;
3935
            } else {
3936
                opreg = rm;
3937
            }
3938

    
3939
            switch(b) {
3940
            default:
3941
            case 0x80:
3942
            case 0x81:
3943
            case 0x82:
3944
                val = insn_get(s, ot);
3945
                break;
3946
            case 0x83:
3947
                val = (int8_t)insn_get(s, OT_BYTE);
3948
                break;
3949
            }
3950
            gen_op_movl_T1_im(val);
3951
            gen_op(s, op, ot, opreg);
3952
        }
3953
        break;
3954

    
3955
        /**************************/
3956
        /* inc, dec, and other misc arith */
3957
    case 0x40 ... 0x47: /* inc Gv */
3958
        ot = dflag ? OT_LONG : OT_WORD;
3959
        gen_inc(s, ot, OR_EAX + (b & 7), 1);
3960
        break;
3961
    case 0x48 ... 0x4f: /* dec Gv */
3962
        ot = dflag ? OT_LONG : OT_WORD;
3963
        gen_inc(s, ot, OR_EAX + (b & 7), -1);
3964
        break;
3965
    case 0xf6: /* GRP3 */
3966
    case 0xf7:
3967
        if ((b & 1) == 0)
3968
            ot = OT_BYTE;
3969
        else
3970
            ot = dflag + OT_WORD;
3971

    
3972
        modrm = ldub_code(s->pc++);
3973
        mod = (modrm >> 6) & 3;
3974
        rm = (modrm & 7) | REX_B(s);
3975
        op = (modrm >> 3) & 7;
3976
        if (mod != 3) {
3977
            if (op == 0)
3978
                s->rip_offset = insn_const_size(ot);
3979
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3980
            gen_op_ld_T0_A0(ot + s->mem_index);
3981
        } else {
3982
            gen_op_mov_TN_reg(ot, 0, rm);
3983
        }
3984

    
3985
        switch(op) {
3986
        case 0: /* test */
3987
            val = insn_get(s, ot);
3988
            gen_op_movl_T1_im(val);
3989
            gen_op_testl_T0_T1_cc();
3990
            s->cc_op = CC_OP_LOGICB + ot;
3991
            break;
3992
        case 2: /* not */
3993
            tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
3994
            if (mod != 3) {
3995
                gen_op_st_T0_A0(ot + s->mem_index);
3996
            } else {
3997
                gen_op_mov_reg_T0(ot, rm);
3998
            }
3999
            break;
4000
        case 3: /* neg */
4001
            tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
4002
            if (mod != 3) {
4003
                gen_op_st_T0_A0(ot + s->mem_index);
4004
            } else {
4005
                gen_op_mov_reg_T0(ot, rm);
4006
            }
4007
            gen_op_update_neg_cc();
4008
            s->cc_op = CC_OP_SUBB + ot;
4009
            break;
4010
        case 4: /* mul */
4011
            switch(ot) {
4012
            case OT_BYTE:
4013
                gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4014
                tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4015
                tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
4016
                /* XXX: use 32 bit mul which could be faster */
4017
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4018
                gen_op_mov_reg_T0(OT_WORD, R_EAX);
4019
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4020
                tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
4021
                s->cc_op = CC_OP_MULB;
4022
                break;
4023
            case OT_WORD:
4024
                gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4025
                tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4026
                tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
4027
                /* XXX: use 32 bit mul which could be faster */
4028
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4029
                gen_op_mov_reg_T0(OT_WORD, R_EAX);
4030
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4031
                tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4032
                gen_op_mov_reg_T0(OT_WORD, R_EDX);
4033
                tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4034
                s->cc_op = CC_OP_MULW;
4035
                break;
4036
            default:
4037
            case OT_LONG:
4038
#ifdef TARGET_X86_64
4039
                gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4040
                tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
4041
                tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
4042
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4043
                gen_op_mov_reg_T0(OT_LONG, R_EAX);
4044
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4045
                tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4046
                gen_op_mov_reg_T0(OT_LONG, R_EDX);
4047
                tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4048
#else
4049
                {
4050
                    TCGv t0, t1;
4051
                    t0 = tcg_temp_new(TCG_TYPE_I64);
4052
                    t1 = tcg_temp_new(TCG_TYPE_I64);
4053
                    gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4054
                    tcg_gen_extu_i32_i64(t0, cpu_T[0]);
4055
                    tcg_gen_extu_i32_i64(t1, cpu_T[1]);
4056
                    tcg_gen_mul_i64(t0, t0, t1);
4057
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4058
                    gen_op_mov_reg_T0(OT_LONG, R_EAX);
4059
                    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4060
                    tcg_gen_shri_i64(t0, t0, 32);
4061
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4062
                    gen_op_mov_reg_T0(OT_LONG, R_EDX);
4063
                    tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4064
                }
4065
#endif
4066
                s->cc_op = CC_OP_MULL;
4067
                break;
4068
#ifdef TARGET_X86_64
4069
            case OT_QUAD:
4070
                tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
4071
                s->cc_op = CC_OP_MULQ;
4072
                break;
4073
#endif
4074
            }
4075
            break;
4076
        case 5: /* imul */
4077
            switch(ot) {
4078
            case OT_BYTE:
4079
                gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4080
                tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4081
                tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
4082
                /* XXX: use 32 bit mul which could be faster */
4083
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4084
                gen_op_mov_reg_T0(OT_WORD, R_EAX);
4085
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4086
                tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
4087
                tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4088
                s->cc_op = CC_OP_MULB;
4089
                break;
4090
            case OT_WORD:
4091
                gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4092
                tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4093
                tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4094
                /* XXX: use 32 bit mul which could be faster */
4095
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4096
                gen_op_mov_reg_T0(OT_WORD, R_EAX);
4097
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4098
                tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4099
                tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4100
                tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4101
                gen_op_mov_reg_T0(OT_WORD, R_EDX);
4102
                s->cc_op = CC_OP_MULW;
4103
                break;
4104
            default:
4105
            case OT_LONG:
4106
#ifdef TARGET_X86_64
4107
                gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4108
                tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4109
                tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4110
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4111
                gen_op_mov_reg_T0(OT_LONG, R_EAX);
4112
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4113
                tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4114
                tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4115
                tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4116
                gen_op_mov_reg_T0(OT_LONG, R_EDX);
4117
#else
4118
                {
4119
                    TCGv t0, t1;
4120
                    t0 = tcg_temp_new(TCG_TYPE_I64);
4121
                    t1 = tcg_temp_new(TCG_TYPE_I64);
4122
                    gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4123
                    tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4124
                    tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4125
                    tcg_gen_mul_i64(t0, t0, t1);
4126
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4127
                    gen_op_mov_reg_T0(OT_LONG, R_EAX);
4128
                    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4129
                    tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4130
                    tcg_gen_shri_i64(t0, t0, 32);
4131
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4132
                    gen_op_mov_reg_T0(OT_LONG, R_EDX);
4133
                    tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4134
                }
4135
#endif
4136
                s->cc_op = CC_OP_MULL;
4137
                break;
4138
#ifdef TARGET_X86_64
4139
            case OT_QUAD:
4140
                tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
4141
                s->cc_op = CC_OP_MULQ;
4142
                break;
4143
#endif
4144
            }
4145
            break;
4146
        case 6: /* div */
4147
            switch(ot) {
4148
            case OT_BYTE:
4149
                gen_jmp_im(pc_start - s->cs_base);
4150
                tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
4151
                break;
4152
            case OT_WORD:
4153
                gen_jmp_im(pc_start - s->cs_base);
4154
                tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
4155
                break;
4156
            default:
4157
            case OT_LONG:
4158
                gen_jmp_im(pc_start - s->cs_base);
4159
                tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
4160
                break;
4161
#ifdef TARGET_X86_64
4162
            case OT_QUAD:
4163
                gen_jmp_im(pc_start - s->cs_base);
4164
                tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
4165
                break;
4166
#endif
4167
            }
4168
            break;
4169
        case 7: /* idiv */
4170
            switch(ot) {
4171
            case OT_BYTE:
4172
                gen_jmp_im(pc_start - s->cs_base);
4173
                tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
4174
                break;
4175
            case OT_WORD:
4176
                gen_jmp_im(pc_start - s->cs_base);
4177
                tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
4178
                break;
4179
            default:
4180
            case OT_LONG:
4181
                gen_jmp_im(pc_start - s->cs_base);
4182
                tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
4183
                break;
4184
#ifdef TARGET_X86_64
4185
            case OT_QUAD:
4186
                gen_jmp_im(pc_start - s->cs_base);
4187
                tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
4188
                break;
4189
#endif
4190
            }
4191
            break;
4192
        default:
4193
            goto illegal_op;
4194
        }
4195
        break;
4196

    
4197
    case 0xfe: /* GRP4 */
4198
    case 0xff: /* GRP5 */
4199
        if ((b & 1) == 0)
4200
            ot = OT_BYTE;
4201
        else
4202
            ot = dflag + OT_WORD;
4203

    
4204
        modrm = ldub_code(s->pc++);
4205
        mod = (modrm >> 6) & 3;
4206
        rm = (modrm & 7) | REX_B(s);
4207
        op = (modrm >> 3) & 7;
4208
        if (op >= 2 && b == 0xfe) {
4209
            goto illegal_op;
4210
        }
4211
        if (CODE64(s)) {
4212
            if (op == 2 || op == 4) {
4213
                /* operand size for jumps is 64 bit */
4214
                ot = OT_QUAD;
4215
            } else if (op == 3 || op == 5) {
4216
                /* for call calls, the operand is 16 or 32 bit, even
4217
                   in long mode */
4218
                ot = dflag ? OT_LONG : OT_WORD;
4219
            } else if (op == 6) {
4220
                /* default push size is 64 bit */
4221
                ot = dflag ? OT_QUAD : OT_WORD;
4222
            }
4223
        }
4224
        if (mod != 3) {
4225
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4226
            if (op >= 2 && op != 3 && op != 5)
4227
                gen_op_ld_T0_A0(ot + s->mem_index);
4228
        } else {
4229
            gen_op_mov_TN_reg(ot, 0, rm);
4230
        }
4231

    
4232
        switch(op) {
4233
        case 0: /* inc Ev */
4234
            if (mod != 3)
4235
                opreg = OR_TMP0;
4236
            else
4237
                opreg = rm;
4238
            gen_inc(s, ot, opreg, 1);
4239
            break;
4240
        case 1: /* dec Ev */
4241
            if (mod != 3)
4242
                opreg = OR_TMP0;
4243
            else
4244
                opreg = rm;
4245
            gen_inc(s, ot, opreg, -1);
4246
            break;
4247
        case 2: /* call Ev */
4248
            /* XXX: optimize if memory (no 'and' is necessary) */
4249
            if (s->dflag == 0)
4250
                gen_op_andl_T0_ffff();
4251
            next_eip = s->pc - s->cs_base;
4252
            gen_movtl_T1_im(next_eip);
4253
            gen_push_T1(s);
4254
            gen_op_jmp_T0();
4255
            gen_eob(s);
4256
            break;
4257
        case 3: /* lcall Ev */
4258
            gen_op_ld_T1_A0(ot + s->mem_index);
4259
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4260
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4261
        do_lcall:
4262
            if (s->pe && !s->vm86) {
4263
                if (s->cc_op != CC_OP_DYNAMIC)
4264
                    gen_op_set_cc_op(s->cc_op);
4265
                gen_jmp_im(pc_start - s->cs_base);
4266
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4267
                tcg_gen_helper_0_4(helper_lcall_protected,
4268
                                   cpu_tmp2_i32, cpu_T[1],
4269
                                   tcg_const_i32(dflag), 
4270
                                   tcg_const_i32(s->pc - pc_start));
4271
            } else {
4272
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4273
                tcg_gen_helper_0_4(helper_lcall_real,
4274
                                   cpu_tmp2_i32, cpu_T[1],
4275
                                   tcg_const_i32(dflag), 
4276
                                   tcg_const_i32(s->pc - s->cs_base));
4277
            }
4278
            gen_eob(s);
4279
            break;
4280
        case 4: /* jmp Ev */
4281
            if (s->dflag == 0)
4282
                gen_op_andl_T0_ffff();
4283
            gen_op_jmp_T0();
4284
            gen_eob(s);
4285
            break;
4286
        case 5: /* ljmp Ev */
4287
            gen_op_ld_T1_A0(ot + s->mem_index);
4288
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4289
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4290
        do_ljmp:
4291
            if (s->pe && !s->vm86) {
4292
                if (s->cc_op != CC_OP_DYNAMIC)
4293
                    gen_op_set_cc_op(s->cc_op);
4294
                gen_jmp_im(pc_start - s->cs_base);
4295
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4296
                tcg_gen_helper_0_3(helper_ljmp_protected,
4297
                                   cpu_tmp2_i32,
4298
                                   cpu_T[1],
4299
                                   tcg_const_i32(s->pc - pc_start));
4300
            } else {
4301
                gen_op_movl_seg_T0_vm(R_CS);
4302
                gen_op_movl_T0_T1();
4303
                gen_op_jmp_T0();
4304
            }
4305
            gen_eob(s);
4306
            break;
4307
        case 6: /* push Ev */
4308
            gen_push_T0(s);
4309
            break;
4310
        default:
4311
            goto illegal_op;
4312
        }
4313
        break;
4314

    
4315
    case 0x84: /* test Ev, Gv */
4316
    case 0x85:
4317
        if ((b & 1) == 0)
4318
            ot = OT_BYTE;
4319
        else
4320
            ot = dflag + OT_WORD;
4321

    
4322
        modrm = ldub_code(s->pc++);
4323
        mod = (modrm >> 6) & 3;
4324
        rm = (modrm & 7) | REX_B(s);
4325
        reg = ((modrm >> 3) & 7) | rex_r;
4326

    
4327
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4328
        gen_op_mov_TN_reg(ot, 1, reg);
4329
        gen_op_testl_T0_T1_cc();
4330
        s->cc_op = CC_OP_LOGICB + ot;
4331
        break;
4332

    
4333
    case 0xa8: /* test eAX, Iv */
4334
    case 0xa9:
4335
        if ((b & 1) == 0)
4336
            ot = OT_BYTE;
4337
        else
4338
            ot = dflag + OT_WORD;
4339
        val = insn_get(s, ot);
4340

    
4341
        gen_op_mov_TN_reg(ot, 0, OR_EAX);
4342
        gen_op_movl_T1_im(val);
4343
        gen_op_testl_T0_T1_cc();
4344
        s->cc_op = CC_OP_LOGICB + ot;
4345
        break;
4346

    
4347
    case 0x98: /* CWDE/CBW */
4348
#ifdef TARGET_X86_64
4349
        if (dflag == 2) {
4350
            gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4351
            tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4352
            gen_op_mov_reg_T0(OT_QUAD, R_EAX);
4353
        } else
4354
#endif
4355
        if (dflag == 1) {
4356
            gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4357
            tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4358
            gen_op_mov_reg_T0(OT_LONG, R_EAX);
4359
        } else {
4360
            gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
4361
            tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4362
            gen_op_mov_reg_T0(OT_WORD, R_EAX);
4363
        }
4364
        break;
4365
    case 0x99: /* CDQ/CWD */
4366
#ifdef TARGET_X86_64
4367
        if (dflag == 2) {
4368
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4369
            tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
4370
            gen_op_mov_reg_T0(OT_QUAD, R_EDX);
4371
        } else
4372
#endif
4373
        if (dflag == 1) {
4374
            gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4375
            tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4376
            tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
4377
            gen_op_mov_reg_T0(OT_LONG, R_EDX);
4378
        } else {
4379
            gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4380
            tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4381
            tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
4382
            gen_op_mov_reg_T0(OT_WORD, R_EDX);
4383
        }
4384
        break;
4385
    case 0x1af: /* imul Gv, Ev */
4386
    case 0x69: /* imul Gv, Ev, I */
4387
    case 0x6b:
4388
        ot = dflag + OT_WORD;
4389
        modrm = ldub_code(s->pc++);
4390
        reg = ((modrm >> 3) & 7) | rex_r;
4391
        if (b == 0x69)
4392
            s->rip_offset = insn_const_size(ot);
4393
        else if (b == 0x6b)
4394
            s->rip_offset = 1;
4395
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4396
        if (b == 0x69) {
4397
            val = insn_get(s, ot);
4398
            gen_op_movl_T1_im(val);
4399
        } else if (b == 0x6b) {
4400
            val = (int8_t)insn_get(s, OT_BYTE);
4401
            gen_op_movl_T1_im(val);
4402
        } else {
4403
            gen_op_mov_TN_reg(ot, 1, reg);
4404
        }
4405

    
4406
#ifdef TARGET_X86_64
4407
        if (ot == OT_QUAD) {
4408
            tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
4409
        } else
4410
#endif
4411
        if (ot == OT_LONG) {
4412
#ifdef TARGET_X86_64
4413
                tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4414
                tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4415
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4416
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4417
                tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4418
                tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4419
#else
4420
                {
4421
                    TCGv t0, t1;
4422
                    t0 = tcg_temp_new(TCG_TYPE_I64);
4423
                    t1 = tcg_temp_new(TCG_TYPE_I64);
4424
                    tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4425
                    tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4426
                    tcg_gen_mul_i64(t0, t0, t1);
4427
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4428
                    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4429
                    tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4430
                    tcg_gen_shri_i64(t0, t0, 32);
4431
                    tcg_gen_trunc_i64_i32(cpu_T[1], t0);
4432
                    tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
4433
                }
4434
#endif
4435
        } else {
4436
            tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4437
            tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4438
            /* XXX: use 32 bit mul which could be faster */
4439
            tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4440
            tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4441
            tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4442
            tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4443
        }
4444
        gen_op_mov_reg_T0(ot, reg);
4445
        s->cc_op = CC_OP_MULB + ot;
4446
        break;
4447
    case 0x1c0:
4448
    case 0x1c1: /* xadd Ev, Gv */
4449
        if ((b & 1) == 0)
4450
            ot = OT_BYTE;
4451
        else
4452
            ot = dflag + OT_WORD;
4453
        modrm = ldub_code(s->pc++);
4454
        reg = ((modrm >> 3) & 7) | rex_r;
4455
        mod = (modrm >> 6) & 3;
4456
        if (mod == 3) {
4457
            rm = (modrm & 7) | REX_B(s);
4458
            gen_op_mov_TN_reg(ot, 0, reg);
4459
            gen_op_mov_TN_reg(ot, 1, rm);
4460
            gen_op_addl_T0_T1();
4461
            gen_op_mov_reg_T1(ot, reg);
4462
            gen_op_mov_reg_T0(ot, rm);
4463
        } else {
4464
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4465
            gen_op_mov_TN_reg(ot, 0, reg);
4466
            gen_op_ld_T1_A0(ot + s->mem_index);
4467
            gen_op_addl_T0_T1();
4468
            gen_op_st_T0_A0(ot + s->mem_index);
4469
            gen_op_mov_reg_T1(ot, reg);
4470
        }
4471
        gen_op_update2_cc();
4472
        s->cc_op = CC_OP_ADDB + ot;
4473
        break;
4474
    case 0x1b0:
4475
    case 0x1b1: /* cmpxchg Ev, Gv */
4476
        {
4477
            int label1, label2;
4478
            TCGv t0, t1, t2, a0;
4479

    
4480
            if ((b & 1) == 0)
4481
                ot = OT_BYTE;
4482
            else
4483
                ot = dflag + OT_WORD;
4484
            modrm = ldub_code(s->pc++);
4485
            reg = ((modrm >> 3) & 7) | rex_r;
4486
            mod = (modrm >> 6) & 3;
4487
            t0 = tcg_temp_local_new(TCG_TYPE_TL);
4488
            t1 = tcg_temp_local_new(TCG_TYPE_TL);
4489
            t2 = tcg_temp_local_new(TCG_TYPE_TL);
4490
            a0 = tcg_temp_local_new(TCG_TYPE_TL);
4491
            gen_op_mov_v_reg(ot, t1, reg);
4492
            if (mod == 3) {
4493
                rm = (modrm & 7) | REX_B(s);
4494
                gen_op_mov_v_reg(ot, t0, rm);
4495
            } else {
4496
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4497
                tcg_gen_mov_tl(a0, cpu_A0);
4498
                gen_op_ld_v(ot + s->mem_index, t0, a0);
4499
                rm = 0; /* avoid warning */
4500
            }
4501
            label1 = gen_new_label();
4502
            tcg_gen_ld_tl(t2, cpu_env, offsetof(CPUState, regs[R_EAX]));
4503
            tcg_gen_sub_tl(t2, t2, t0);
4504
            gen_extu(ot, t2);
4505
            tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
4506
            if (mod == 3) {
4507
                label2 = gen_new_label();
4508
                gen_op_mov_reg_v(ot, R_EAX, t0);
4509
                tcg_gen_br(label2);
4510
                gen_set_label(label1);
4511
                gen_op_mov_reg_v(ot, rm, t1);
4512
                gen_set_label(label2);
4513
            } else {
4514
                tcg_gen_mov_tl(t1, t0);
4515
                gen_op_mov_reg_v(ot, R_EAX, t0);
4516
                gen_set_label(label1);
4517
                /* always store */
4518
                gen_op_st_v(ot + s->mem_index, t1, a0);
4519
            }
4520
            tcg_gen_mov_tl(cpu_cc_src, t0);
4521
            tcg_gen_mov_tl(cpu_cc_dst, t2);
4522
            s->cc_op = CC_OP_SUBB + ot;
4523
            tcg_temp_free(t0);
4524
            tcg_temp_free(t1);
4525
            tcg_temp_free(t2);
4526
            tcg_temp_free(a0);
4527
        }
4528
        break;
4529
    case 0x1c7: /* cmpxchg8b */
4530
        modrm = ldub_code(s->pc++);
4531
        mod = (modrm >> 6) & 3;
4532
        if ((mod == 3) || ((modrm & 0x38) != 0x8))
4533
            goto illegal_op;
4534
#ifdef TARGET_X86_64
4535
        if (dflag == 2) {
4536
            if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
4537
                goto illegal_op;
4538
            gen_jmp_im(pc_start - s->cs_base);
4539
            if (s->cc_op != CC_OP_DYNAMIC)
4540
                gen_op_set_cc_op(s->cc_op);
4541
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4542
            tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
4543
        } else
4544
#endif        
4545
        {
4546
            if (!(s->cpuid_features & CPUID_CX8))
4547
                goto illegal_op;
4548
            gen_jmp_im(pc_start - s->cs_base);
4549
            if (s->cc_op != CC_OP_DYNAMIC)
4550
                gen_op_set_cc_op(s->cc_op);
4551
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4552
            tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
4553
        }
4554
        s->cc_op = CC_OP_EFLAGS;
4555
        break;
4556

    
4557
        /**************************/
4558
        /* push/pop */
4559
    case 0x50 ... 0x57: /* push */
4560
        gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
4561
        gen_push_T0(s);
4562
        break;
4563
    case 0x58 ... 0x5f: /* pop */
4564
        if (CODE64(s)) {
4565
            ot = dflag ? OT_QUAD : OT_WORD;
4566
        } else {
4567
            ot = dflag + OT_WORD;
4568
        }
4569
        gen_pop_T0(s);
4570
        /* NOTE: order is important for pop %sp */
4571
        gen_pop_update(s);
4572
        gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
4573
        break;
4574
    case 0x60: /* pusha */
4575
        if (CODE64(s))
4576
            goto illegal_op;
4577
        gen_pusha(s);
4578
        break;
4579
    case 0x61: /* popa */
4580
        if (CODE64(s))
4581
            goto illegal_op;
4582
        gen_popa(s);
4583
        break;
4584
    case 0x68: /* push Iv */
4585
    case 0x6a:
4586
        if (CODE64(s)) {
4587
            ot = dflag ? OT_QUAD : OT_WORD;
4588
        } else {
4589
            ot = dflag + OT_WORD;
4590
        }
4591
        if (b == 0x68)
4592
            val = insn_get(s, ot);
4593
        else
4594
            val = (int8_t)insn_get(s, OT_BYTE);
4595
        gen_op_movl_T0_im(val);
4596
        gen_push_T0(s);
4597
        break;
4598
    case 0x8f: /* pop Ev */
4599
        if (CODE64(s)) {
4600
            ot = dflag ? OT_QUAD : OT_WORD;
4601
        } else {
4602
            ot = dflag + OT_WORD;
4603
        }
4604
        modrm = ldub_code(s->pc++);
4605
        mod = (modrm >> 6) & 3;
4606
        gen_pop_T0(s);
4607
        if (mod == 3) {
4608
            /* NOTE: order is important for pop %sp */
4609
            gen_pop_update(s);
4610
            rm = (modrm & 7) | REX_B(s);
4611
            gen_op_mov_reg_T0(ot, rm);
4612
        } else {
4613
            /* NOTE: order is important too for MMU exceptions */
4614
            s->popl_esp_hack = 1 << ot;
4615
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4616
            s->popl_esp_hack = 0;
4617
            gen_pop_update(s);
4618
        }
4619
        break;
4620
    case 0xc8: /* enter */
4621
        {
4622
            int level;
4623
            val = lduw_code(s->pc);
4624
            s->pc += 2;
4625
            level = ldub_code(s->pc++);
4626
            gen_enter(s, val, level);
4627
        }
4628
        break;
4629
    case 0xc9: /* leave */
4630
        /* XXX: exception not precise (ESP is updated before potential exception) */
4631
        if (CODE64(s)) {
4632
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4633
            gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4634
        } else if (s->ss32) {
4635
            gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4636
            gen_op_mov_reg_T0(OT_LONG, R_ESP);
4637
        } else {
4638
            gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4639
            gen_op_mov_reg_T0(OT_WORD, R_ESP);
4640
        }
4641
        gen_pop_T0(s);
4642
        if (CODE64(s)) {
4643
            ot = dflag ? OT_QUAD : OT_WORD;
4644
        } else {
4645
            ot = dflag + OT_WORD;
4646
        }
4647
        gen_op_mov_reg_T0(ot, R_EBP);
4648
        gen_pop_update(s);
4649
        break;
4650
    case 0x06: /* push es */
4651
    case 0x0e: /* push cs */
4652
    case 0x16: /* push ss */
4653
    case 0x1e: /* push ds */
4654
        if (CODE64(s))
4655
            goto illegal_op;
4656
        gen_op_movl_T0_seg(b >> 3);
4657
        gen_push_T0(s);
4658
        break;
4659
    case 0x1a0: /* push fs */
4660
    case 0x1a8: /* push gs */
4661
        gen_op_movl_T0_seg((b >> 3) & 7);
4662
        gen_push_T0(s);
4663
        break;
4664
    case 0x07: /* pop es */
4665
    case 0x17: /* pop ss */
4666
    case 0x1f: /* pop ds */
4667
        if (CODE64(s))
4668
            goto illegal_op;
4669
        reg = b >> 3;
4670
        gen_pop_T0(s);
4671
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4672
        gen_pop_update(s);
4673
        if (reg == R_SS) {
4674
            /* if reg == SS, inhibit interrupts/trace. */
4675
            /* If several instructions disable interrupts, only the
4676
               _first_ does it */
4677
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4678
                tcg_gen_helper_0_0(helper_set_inhibit_irq);
4679
            s->tf = 0;
4680
        }
4681
        if (s->is_jmp) {
4682
            gen_jmp_im(s->pc - s->cs_base);
4683
            gen_eob(s);
4684
        }
4685
        break;
4686
    case 0x1a1: /* pop fs */
4687
    case 0x1a9: /* pop gs */
4688
        gen_pop_T0(s);
4689
        gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4690
        gen_pop_update(s);
4691
        if (s->is_jmp) {
4692
            gen_jmp_im(s->pc - s->cs_base);
4693
            gen_eob(s);
4694
        }
4695
        break;
4696

    
4697
        /**************************/
4698
        /* mov */
4699
    case 0x88:
4700
    case 0x89: /* mov Gv, Ev */
4701
        if ((b & 1) == 0)
4702
            ot = OT_BYTE;
4703
        else
4704
            ot = dflag + OT_WORD;
4705
        modrm = ldub_code(s->pc++);
4706
        reg = ((modrm >> 3) & 7) | rex_r;
4707

    
4708
        /* generate a generic store */
4709
        gen_ldst_modrm(s, modrm, ot, reg, 1);
4710
        break;
4711
    case 0xc6:
4712
    case 0xc7: /* mov Ev, Iv */
4713
        if ((b & 1) == 0)
4714
            ot = OT_BYTE;
4715
        else
4716
            ot = dflag + OT_WORD;
4717
        modrm = ldub_code(s->pc++);
4718
        mod = (modrm >> 6) & 3;
4719
        if (mod != 3) {
4720
            s->rip_offset = insn_const_size(ot);
4721
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4722
        }
4723
        val = insn_get(s, ot);
4724
        gen_op_movl_T0_im(val);
4725
        if (mod != 3)
4726
            gen_op_st_T0_A0(ot + s->mem_index);
4727
        else
4728
            gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4729
        break;
4730
    case 0x8a:
4731
    case 0x8b: /* mov Ev, Gv */
4732
        if ((b & 1) == 0)
4733
            ot = OT_BYTE;
4734
        else
4735
            ot = OT_WORD + dflag;
4736
        modrm = ldub_code(s->pc++);
4737
        reg = ((modrm >> 3) & 7) | rex_r;
4738

    
4739
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4740
        gen_op_mov_reg_T0(ot, reg);
4741
        break;
4742
    case 0x8e: /* mov seg, Gv */
4743
        modrm = ldub_code(s->pc++);
4744
        reg = (modrm >> 3) & 7;
4745
        if (reg >= 6 || reg == R_CS)
4746
            goto illegal_op;
4747
        gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4748
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4749
        if (reg == R_SS) {
4750
            /* if reg == SS, inhibit interrupts/trace */
4751
            /* If several instructions disable interrupts, only the
4752
               _first_ does it */
4753
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4754
                tcg_gen_helper_0_0(helper_set_inhibit_irq);
4755
            s->tf = 0;
4756
        }
4757
        if (s->is_jmp) {
4758
            gen_jmp_im(s->pc - s->cs_base);
4759
            gen_eob(s);
4760
        }
4761
        break;
4762
    case 0x8c: /* mov Gv, seg */
4763
        modrm = ldub_code(s->pc++);
4764
        reg = (modrm >> 3) & 7;
4765
        mod = (modrm >> 6) & 3;
4766
        if (reg >= 6)
4767
            goto illegal_op;
4768
        gen_op_movl_T0_seg(reg);
4769
        if (mod == 3)
4770
            ot = OT_WORD + dflag;
4771
        else
4772
            ot = OT_WORD;
4773
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4774
        break;
4775

    
4776
    case 0x1b6: /* movzbS Gv, Eb */
4777
    case 0x1b7: /* movzwS Gv, Eb */
4778
    case 0x1be: /* movsbS Gv, Eb */
4779
    case 0x1bf: /* movswS Gv, Eb */
4780
        {
4781
            int d_ot;
4782
            /* d_ot is the size of destination */
4783
            d_ot = dflag + OT_WORD;
4784
            /* ot is the size of source */
4785
            ot = (b & 1) + OT_BYTE;
4786
            modrm = ldub_code(s->pc++);
4787
            reg = ((modrm >> 3) & 7) | rex_r;
4788
            mod = (modrm >> 6) & 3;
4789
            rm = (modrm & 7) | REX_B(s);
4790

    
4791
            if (mod == 3) {
4792
                gen_op_mov_TN_reg(ot, 0, rm);
4793
                switch(ot | (b & 8)) {
4794
                case OT_BYTE:
4795
                    tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4796
                    break;
4797
                case OT_BYTE | 8:
4798
                    tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4799
                    break;
4800
                case OT_WORD:
4801
                    tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4802
                    break;
4803
                default:
4804
                case OT_WORD | 8:
4805
                    tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4806
                    break;
4807
                }
4808
                gen_op_mov_reg_T0(d_ot, reg);
4809
            } else {
4810
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4811
                if (b & 8) {
4812
                    gen_op_lds_T0_A0(ot + s->mem_index);
4813
                } else {
4814
                    gen_op_ldu_T0_A0(ot + s->mem_index);
4815
                }
4816
                gen_op_mov_reg_T0(d_ot, reg);
4817
            }
4818
        }
4819
        break;
4820

    
4821
    case 0x8d: /* lea */
4822
        ot = dflag + OT_WORD;
4823
        modrm = ldub_code(s->pc++);
4824
        mod = (modrm >> 6) & 3;
4825
        if (mod == 3)
4826
            goto illegal_op;
4827
        reg = ((modrm >> 3) & 7) | rex_r;
4828
        /* we must ensure that no segment is added */
4829
        s->override = -1;
4830
        val = s->addseg;
4831
        s->addseg = 0;
4832
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4833
        s->addseg = val;
4834
        gen_op_mov_reg_A0(ot - OT_WORD, reg);
4835
        break;
4836

    
4837
    case 0xa0: /* mov EAX, Ov */
4838
    case 0xa1:
4839
    case 0xa2: /* mov Ov, EAX */
4840
    case 0xa3:
4841
        {
4842
            target_ulong offset_addr;
4843

    
4844
            if ((b & 1) == 0)
4845
                ot = OT_BYTE;
4846
            else
4847
                ot = dflag + OT_WORD;
4848
#ifdef TARGET_X86_64
4849
            if (s->aflag == 2) {
4850
                offset_addr = ldq_code(s->pc);
4851
                s->pc += 8;
4852
                gen_op_movq_A0_im(offset_addr);
4853
            } else
4854
#endif
4855
            {
4856
                if (s->aflag) {
4857
                    offset_addr = insn_get(s, OT_LONG);
4858
                } else {
4859
                    offset_addr = insn_get(s, OT_WORD);
4860
                }
4861
                gen_op_movl_A0_im(offset_addr);
4862
            }
4863
            gen_add_A0_ds_seg(s);
4864
            if ((b & 2) == 0) {
4865
                gen_op_ld_T0_A0(ot + s->mem_index);
4866
                gen_op_mov_reg_T0(ot, R_EAX);
4867
            } else {
4868
                gen_op_mov_TN_reg(ot, 0, R_EAX);
4869
                gen_op_st_T0_A0(ot + s->mem_index);
4870
            }
4871
        }
4872
        break;
4873
    case 0xd7: /* xlat */
4874
#ifdef TARGET_X86_64
4875
        if (s->aflag == 2) {
4876
            gen_op_movq_A0_reg(R_EBX);
4877
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4878
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4879
            tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4880
        } else
4881
#endif
4882
        {
4883
            gen_op_movl_A0_reg(R_EBX);
4884
            gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4885
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4886
            tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4887
            if (s->aflag == 0)
4888
                gen_op_andl_A0_ffff();
4889
            else
4890
                tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
4891
        }
4892
        gen_add_A0_ds_seg(s);
4893
        gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4894
        gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4895
        break;
4896
    case 0xb0 ... 0xb7: /* mov R, Ib */
4897
        val = insn_get(s, OT_BYTE);
4898
        gen_op_movl_T0_im(val);
4899
        gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4900
        break;
4901
    case 0xb8 ... 0xbf: /* mov R, Iv */
4902
#ifdef TARGET_X86_64
4903
        if (dflag == 2) {
4904
            uint64_t tmp;
4905
            /* 64 bit case */
4906
            tmp = ldq_code(s->pc);
4907
            s->pc += 8;
4908
            reg = (b & 7) | REX_B(s);
4909
            gen_movtl_T0_im(tmp);
4910
            gen_op_mov_reg_T0(OT_QUAD, reg);
4911
        } else
4912
#endif
4913
        {
4914
            ot = dflag ? OT_LONG : OT_WORD;
4915
            val = insn_get(s, ot);
4916
            reg = (b & 7) | REX_B(s);
4917
            gen_op_movl_T0_im(val);
4918
            gen_op_mov_reg_T0(ot, reg);
4919
        }
4920
        break;
4921

    
4922
    case 0x91 ... 0x97: /* xchg R, EAX */
4923
        ot = dflag + OT_WORD;
4924
        reg = (b & 7) | REX_B(s);
4925
        rm = R_EAX;
4926
        goto do_xchg_reg;
4927
    case 0x86:
4928
    case 0x87: /* xchg Ev, Gv */
4929
        if ((b & 1) == 0)
4930
            ot = OT_BYTE;
4931
        else
4932
            ot = dflag + OT_WORD;
4933
        modrm = ldub_code(s->pc++);
4934
        reg = ((modrm >> 3) & 7) | rex_r;
4935
        mod = (modrm >> 6) & 3;
4936
        if (mod == 3) {
4937
            rm = (modrm & 7) | REX_B(s);
4938
        do_xchg_reg:
4939
            gen_op_mov_TN_reg(ot, 0, reg);
4940
            gen_op_mov_TN_reg(ot, 1, rm);
4941
            gen_op_mov_reg_T0(ot, rm);
4942
            gen_op_mov_reg_T1(ot, reg);
4943
        } else {
4944
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4945
            gen_op_mov_TN_reg(ot, 0, reg);
4946
            /* for xchg, lock is implicit */
4947
            if (!(prefixes & PREFIX_LOCK))
4948
                tcg_gen_helper_0_0(helper_lock);
4949
            gen_op_ld_T1_A0(ot + s->mem_index);
4950
            gen_op_st_T0_A0(ot + s->mem_index);
4951
            if (!(prefixes & PREFIX_LOCK))
4952
                tcg_gen_helper_0_0(helper_unlock);
4953
            gen_op_mov_reg_T1(ot, reg);
4954
        }
4955
        break;
4956
    case 0xc4: /* les Gv */
4957
        if (CODE64(s))
4958
            goto illegal_op;
4959
        op = R_ES;
4960
        goto do_lxx;
4961
    case 0xc5: /* lds Gv */
4962
        if (CODE64(s))
4963
            goto illegal_op;
4964
        op = R_DS;
4965
        goto do_lxx;
4966
    case 0x1b2: /* lss Gv */
4967
        op = R_SS;
4968
        goto do_lxx;
4969
    case 0x1b4: /* lfs Gv */
4970
        op = R_FS;
4971
        goto do_lxx;
4972
    case 0x1b5: /* lgs Gv */
4973
        op = R_GS;
4974
    do_lxx:
4975
        ot = dflag ? OT_LONG : OT_WORD;
4976
        modrm = ldub_code(s->pc++);
4977
        reg = ((modrm >> 3) & 7) | rex_r;
4978
        mod = (modrm >> 6) & 3;
4979
        if (mod == 3)
4980
            goto illegal_op;
4981
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4982
        gen_op_ld_T1_A0(ot + s->mem_index);
4983
        gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4984
        /* load the segment first to handle exceptions properly */
4985
        gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4986
        gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4987
        /* then put the data */
4988
        gen_op_mov_reg_T1(ot, reg);
4989
        if (s->is_jmp) {
4990
            gen_jmp_im(s->pc - s->cs_base);
4991
            gen_eob(s);
4992
        }
4993
        break;
4994

    
4995
        /************************/
4996
        /* shifts */
4997
    case 0xc0:
4998
    case 0xc1:
4999
        /* shift Ev,Ib */
5000
        shift = 2;
5001
    grp2:
5002
        {
5003
            if ((b & 1) == 0)
5004
                ot = OT_BYTE;
5005
            else
5006
                ot = dflag + OT_WORD;
5007

    
5008
            modrm = ldub_code(s->pc++);
5009
            mod = (modrm >> 6) & 3;
5010
            op = (modrm >> 3) & 7;
5011

    
5012
            if (mod != 3) {
5013
                if (shift == 2) {
5014
                    s->rip_offset = 1;
5015
                }
5016
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5017
                opreg = OR_TMP0;
5018
            } else {
5019
                opreg = (modrm & 7) | REX_B(s);
5020
            }
5021

    
5022
            /* simpler op */
5023
            if (shift == 0) {
5024
                gen_shift(s, op, ot, opreg, OR_ECX);
5025
            } else {
5026
                if (shift == 2) {
5027
                    shift = ldub_code(s->pc++);
5028
                }
5029
                gen_shifti(s, op, ot, opreg, shift);
5030
            }
5031
        }
5032
        break;
5033
    case 0xd0:
5034
    case 0xd1:
5035
        /* shift Ev,1 */
5036
        shift = 1;
5037
        goto grp2;
5038
    case 0xd2:
5039
    case 0xd3:
5040
        /* shift Ev,cl */
5041
        shift = 0;
5042
        goto grp2;
5043

    
5044
    case 0x1a4: /* shld imm */
5045
        op = 0;
5046
        shift = 1;
5047
        goto do_shiftd;
5048
    case 0x1a5: /* shld cl */
5049
        op = 0;
5050
        shift = 0;
5051
        goto do_shiftd;
5052
    case 0x1ac: /* shrd imm */
5053
        op = 1;
5054
        shift = 1;
5055
        goto do_shiftd;
5056
    case 0x1ad: /* shrd cl */
5057
        op = 1;
5058
        shift = 0;
5059
    do_shiftd:
5060
        ot = dflag + OT_WORD;
5061
        modrm = ldub_code(s->pc++);
5062
        mod = (modrm >> 6) & 3;
5063
        rm = (modrm & 7) | REX_B(s);
5064
        reg = ((modrm >> 3) & 7) | rex_r;
5065
        if (mod != 3) {
5066
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5067
            opreg = OR_TMP0;
5068
        } else {
5069
            opreg = rm;
5070
        }
5071
        gen_op_mov_TN_reg(ot, 1, reg);
5072

    
5073
        if (shift) {
5074
            val = ldub_code(s->pc++);
5075
            tcg_gen_movi_tl(cpu_T3, val);
5076
        } else {
5077
            tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
5078
        }
5079
        gen_shiftd_rm_T1_T3(s, ot, opreg, op);
5080
        break;
5081

    
5082
        /************************/
5083
        /* floats */
5084
    case 0xd8 ... 0xdf:
5085
        if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
5086
            /* if CR0.EM or CR0.TS are set, generate an FPU exception */
5087
            /* XXX: what to do if illegal op ? */
5088
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5089
            break;
5090
        }
5091
        modrm = ldub_code(s->pc++);
5092
        mod = (modrm >> 6) & 3;
5093
        rm = modrm & 7;
5094
        op = ((b & 7) << 3) | ((modrm >> 3) & 7);
5095
        if (mod != 3) {
5096
            /* memory op */
5097
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5098
            switch(op) {
5099
            case 0x00 ... 0x07: /* fxxxs */
5100
            case 0x10 ... 0x17: /* fixxxl */
5101
            case 0x20 ... 0x27: /* fxxxl */
5102
            case 0x30 ... 0x37: /* fixxx */
5103
                {
5104
                    int op1;
5105
                    op1 = op & 7;
5106

    
5107
                    switch(op >> 4) {
5108
                    case 0:
5109
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5110
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5111
                        tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
5112
                        break;
5113
                    case 1:
5114
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5115
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5116
                        tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
5117
                        break;
5118
                    case 2:
5119
                        tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
5120
                                          (s->mem_index >> 2) - 1);
5121
                        tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
5122
                        break;
5123
                    case 3:
5124
                    default:
5125
                        gen_op_lds_T0_A0(OT_WORD + s->mem_index);
5126
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5127
                        tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
5128
                        break;
5129
                    }
5130

    
5131
                    tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5132
                    if (op1 == 3) {
5133
                        /* fcomp needs pop */
5134
                        tcg_gen_helper_0_0(helper_fpop);
5135
                    }
5136
                }
5137
                break;
5138
            case 0x08: /* flds */
5139
            case 0x0a: /* fsts */
5140
            case 0x0b: /* fstps */
5141
            case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
5142
            case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
5143
            case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
5144
                switch(op & 7) {
5145
                case 0:
5146
                    switch(op >> 4) {
5147
                    case 0:
5148
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5149
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5150
                        tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
5151
                        break;
5152
                    case 1:
5153
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5154
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5155
                        tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
5156
                        break;
5157
                    case 2:
5158
                        tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
5159
                                          (s->mem_index >> 2) - 1);
5160
                        tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
5161
                        break;
5162
                    case 3:
5163
                    default:
5164
                        gen_op_lds_T0_A0(OT_WORD + s->mem_index);
5165
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5166
                        tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
5167
                        break;
5168
                    }
5169
                    break;
5170
                case 1:
5171
                    /* XXX: the corresponding CPUID bit must be tested ! */
5172
                    switch(op >> 4) {
5173
                    case 1:
5174
                        tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
5175
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5176
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
5177
                        break;
5178
                    case 2:
5179
                        tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
5180
                        tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
5181
                                          (s->mem_index >> 2) - 1);
5182
                        break;
5183
                    case 3:
5184
                    default:
5185
                        tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
5186
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5187
                        gen_op_st_T0_A0(OT_WORD + s->mem_index);
5188
                        break;
5189
                    }
5190
                    tcg_gen_helper_0_0(helper_fpop);
5191
                    break;
5192
                default:
5193
                    switch(op >> 4) {
5194
                    case 0:
5195
                        tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
5196
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5197
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
5198
                        break;
5199
                    case 1:
5200
                        tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
5201
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5202
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
5203
                        break;
5204
                    case 2:
5205
                        tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
5206
                        tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
5207
                                          (s->mem_index >> 2) - 1);
5208
                        break;
5209
                    case 3:
5210
                    default:
5211
                        tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
5212
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5213
                        gen_op_st_T0_A0(OT_WORD + s->mem_index);
5214
                        break;
5215
                    }
5216
                    if ((op & 7) == 3)
5217
                        tcg_gen_helper_0_0(helper_fpop);
5218
                    break;
5219
                }
5220
                break;
5221
            case 0x0c: /* fldenv mem */
5222
                if (s->cc_op != CC_OP_DYNAMIC)
5223
                    gen_op_set_cc_op(s->cc_op);
5224
                gen_jmp_im(pc_start - s->cs_base);
5225
                tcg_gen_helper_0_2(helper_fldenv, 
5226
                                   cpu_A0, tcg_const_i32(s->dflag));
5227
                break;
5228
            case 0x0d: /* fldcw mem */
5229
                gen_op_ld_T0_A0(OT_WORD + s->mem_index);
5230
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5231
                tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
5232
                break;
5233
            case 0x0e: /* fnstenv mem */
5234
                if (s->cc_op != CC_OP_DYNAMIC)
5235
                    gen_op_set_cc_op(s->cc_op);
5236
                gen_jmp_im(pc_start - s->cs_base);
5237
                tcg_gen_helper_0_2(helper_fstenv,
5238
                                   cpu_A0, tcg_const_i32(s->dflag));
5239
                break;
5240
            case 0x0f: /* fnstcw mem */
5241
                tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
5242
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5243
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
5244
                break;
5245
            case 0x1d: /* fldt mem */
5246
                if (s->cc_op != CC_OP_DYNAMIC)
5247
                    gen_op_set_cc_op(s->cc_op);
5248
                gen_jmp_im(pc_start - s->cs_base);
5249
                tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
5250
                break;
5251
            case 0x1f: /* fstpt mem */
5252
                if (s->cc_op != CC_OP_DYNAMIC)
5253
                    gen_op_set_cc_op(s->cc_op);
5254
                gen_jmp_im(pc_start - s->cs_base);
5255
                tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
5256
                tcg_gen_helper_0_0(helper_fpop);
5257
                break;
5258
            case 0x2c: /* frstor mem */
5259
                if (s->cc_op != CC_OP_DYNAMIC)
5260
                    gen_op_set_cc_op(s->cc_op);
5261
                gen_jmp_im(pc_start - s->cs_base);
5262
                tcg_gen_helper_0_2(helper_frstor,
5263
                                   cpu_A0, tcg_const_i32(s->dflag));
5264
                break;
5265
            case 0x2e: /* fnsave mem */
5266
                if (s->cc_op != CC_OP_DYNAMIC)
5267
                    gen_op_set_cc_op(s->cc_op);
5268
                gen_jmp_im(pc_start - s->cs_base);
5269
                tcg_gen_helper_0_2(helper_fsave,
5270
                                   cpu_A0, tcg_const_i32(s->dflag));
5271
                break;
5272
            case 0x2f: /* fnstsw mem */
5273
                tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5274
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5275
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
5276
                break;
5277
            case 0x3c: /* fbld */
5278
                if (s->cc_op != CC_OP_DYNAMIC)
5279
                    gen_op_set_cc_op(s->cc_op);
5280
                gen_jmp_im(pc_start - s->cs_base);
5281
                tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
5282
                break;
5283
            case 0x3e: /* fbstp */
5284
                if (s->cc_op != CC_OP_DYNAMIC)
5285
                    gen_op_set_cc_op(s->cc_op);
5286
                gen_jmp_im(pc_start - s->cs_base);
5287
                tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
5288
                tcg_gen_helper_0_0(helper_fpop);
5289
                break;
5290
            case 0x3d: /* fildll */
5291
                tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
5292
                                  (s->mem_index >> 2) - 1);
5293
                tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
5294
                break;
5295
            case 0x3f: /* fistpll */
5296
                tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
5297
                tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
5298
                                  (s->mem_index >> 2) - 1);
5299
                tcg_gen_helper_0_0(helper_fpop);
5300
                break;
5301
            default:
5302
                goto illegal_op;
5303
            }
5304
        } else {
5305
            /* register float ops */
5306
            opreg = rm;
5307

    
5308
            switch(op) {
5309
            case 0x08: /* fld sti */
5310
                tcg_gen_helper_0_0(helper_fpush);
5311
                tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
5312
                break;
5313
            case 0x09: /* fxchg sti */
5314
            case 0x29: /* fxchg4 sti, undocumented op */
5315
            case 0x39: /* fxchg7 sti, undocumented op */
5316
                tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
5317
                break;
5318
            case 0x0a: /* grp d9/2 */
5319
                switch(rm) {
5320
                case 0: /* fnop */
5321
                    /* check exceptions (FreeBSD FPU probe) */
5322
                    if (s->cc_op != CC_OP_DYNAMIC)
5323
                        gen_op_set_cc_op(s->cc_op);
5324
                    gen_jmp_im(pc_start - s->cs_base);
5325
                    tcg_gen_helper_0_0(helper_fwait);
5326
                    break;
5327
                default:
5328
                    goto illegal_op;
5329
                }
5330
                break;
5331
            case 0x0c: /* grp d9/4 */
5332
                switch(rm) {
5333
                case 0: /* fchs */
5334
                    tcg_gen_helper_0_0(helper_fchs_ST0);
5335
                    break;
5336
                case 1: /* fabs */
5337
                    tcg_gen_helper_0_0(helper_fabs_ST0);
5338
                    break;
5339
                case 4: /* ftst */
5340
                    tcg_gen_helper_0_0(helper_fldz_FT0);
5341
                    tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5342
                    break;
5343
                case 5: /* fxam */
5344
                    tcg_gen_helper_0_0(helper_fxam_ST0);
5345
                    break;
5346
                default:
5347
                    goto illegal_op;
5348
                }
5349
                break;
5350
            case 0x0d: /* grp d9/5 */
5351
                {
5352
                    switch(rm) {
5353
                    case 0:
5354
                        tcg_gen_helper_0_0(helper_fpush);
5355
                        tcg_gen_helper_0_0(helper_fld1_ST0);
5356
                        break;
5357
                    case 1:
5358
                        tcg_gen_helper_0_0(helper_fpush);
5359
                        tcg_gen_helper_0_0(helper_fldl2t_ST0);
5360
                        break;
5361
                    case 2:
5362
                        tcg_gen_helper_0_0(helper_fpush);
5363
                        tcg_gen_helper_0_0(helper_fldl2e_ST0);
5364
                        break;
5365
                    case 3:
5366
                        tcg_gen_helper_0_0(helper_fpush);
5367
                        tcg_gen_helper_0_0(helper_fldpi_ST0);
5368
                        break;
5369
                    case 4:
5370
                        tcg_gen_helper_0_0(helper_fpush);
5371
                        tcg_gen_helper_0_0(helper_fldlg2_ST0);
5372
                        break;
5373
                    case 5:
5374
                        tcg_gen_helper_0_0(helper_fpush);
5375
                        tcg_gen_helper_0_0(helper_fldln2_ST0);
5376
                        break;
5377
                    case 6:
5378
                        tcg_gen_helper_0_0(helper_fpush);
5379
                        tcg_gen_helper_0_0(helper_fldz_ST0);
5380
                        break;
5381
                    default:
5382
                        goto illegal_op;
5383
                    }
5384
                }
5385
                break;
5386
            case 0x0e: /* grp d9/6 */
5387
                switch(rm) {
5388
                case 0: /* f2xm1 */
5389
                    tcg_gen_helper_0_0(helper_f2xm1);
5390
                    break;
5391
                case 1: /* fyl2x */
5392
                    tcg_gen_helper_0_0(helper_fyl2x);
5393
                    break;
5394
                case 2: /* fptan */
5395
                    tcg_gen_helper_0_0(helper_fptan);
5396
                    break;
5397
                case 3: /* fpatan */
5398
                    tcg_gen_helper_0_0(helper_fpatan);
5399
                    break;
5400
                case 4: /* fxtract */
5401
                    tcg_gen_helper_0_0(helper_fxtract);
5402
                    break;
5403
                case 5: /* fprem1 */
5404
                    tcg_gen_helper_0_0(helper_fprem1);
5405
                    break;
5406
                case 6: /* fdecstp */
5407
                    tcg_gen_helper_0_0(helper_fdecstp);
5408
                    break;
5409
                default:
5410
                case 7: /* fincstp */
5411
                    tcg_gen_helper_0_0(helper_fincstp);
5412
                    break;
5413
                }
5414
                break;
5415
            case 0x0f: /* grp d9/7 */
5416
                switch(rm) {
5417
                case 0: /* fprem */
5418
                    tcg_gen_helper_0_0(helper_fprem);
5419
                    break;
5420
                case 1: /* fyl2xp1 */
5421
                    tcg_gen_helper_0_0(helper_fyl2xp1);
5422
                    break;
5423
                case 2: /* fsqrt */
5424
                    tcg_gen_helper_0_0(helper_fsqrt);
5425
                    break;
5426
                case 3: /* fsincos */
5427
                    tcg_gen_helper_0_0(helper_fsincos);
5428
                    break;
5429
                case 5: /* fscale */
5430
                    tcg_gen_helper_0_0(helper_fscale);
5431
                    break;
5432
                case 4: /* frndint */
5433
                    tcg_gen_helper_0_0(helper_frndint);
5434
                    break;
5435
                case 6: /* fsin */
5436
                    tcg_gen_helper_0_0(helper_fsin);
5437
                    break;
5438
                default:
5439
                case 7: /* fcos */
5440
                    tcg_gen_helper_0_0(helper_fcos);
5441
                    break;
5442
                }
5443
                break;
5444
            case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5445
            case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5446
            case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5447
                {
5448
                    int op1;
5449

    
5450
                    op1 = op & 7;
5451
                    if (op >= 0x20) {
5452
                        tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
5453
                        if (op >= 0x30)
5454
                            tcg_gen_helper_0_0(helper_fpop);
5455
                    } else {
5456
                        tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5457
                        tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5458
                    }
5459
                }
5460
                break;
5461
            case 0x02: /* fcom */
5462
            case 0x22: /* fcom2, undocumented op */
5463
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5464
                tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5465
                break;
5466
            case 0x03: /* fcomp */
5467
            case 0x23: /* fcomp3, undocumented op */
5468
            case 0x32: /* fcomp5, undocumented op */
5469
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5470
                tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5471
                tcg_gen_helper_0_0(helper_fpop);
5472
                break;
5473
            case 0x15: /* da/5 */
5474
                switch(rm) {
5475
                case 1: /* fucompp */
5476
                    tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5477
                    tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5478
                    tcg_gen_helper_0_0(helper_fpop);
5479
                    tcg_gen_helper_0_0(helper_fpop);
5480
                    break;
5481
                default:
5482
                    goto illegal_op;
5483
                }
5484
                break;
5485
            case 0x1c:
5486
                switch(rm) {
5487
                case 0: /* feni (287 only, just do nop here) */
5488
                    break;
5489
                case 1: /* fdisi (287 only, just do nop here) */
5490
                    break;
5491
                case 2: /* fclex */
5492
                    tcg_gen_helper_0_0(helper_fclex);
5493
                    break;
5494
                case 3: /* fninit */
5495
                    tcg_gen_helper_0_0(helper_fninit);
5496
                    break;
5497
                case 4: /* fsetpm (287 only, just do nop here) */
5498
                    break;
5499
                default:
5500
                    goto illegal_op;
5501
                }
5502
                break;
5503
            case 0x1d: /* fucomi */
5504
                if (s->cc_op != CC_OP_DYNAMIC)
5505
                    gen_op_set_cc_op(s->cc_op);
5506
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5507
                tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5508
                s->cc_op = CC_OP_EFLAGS;
5509
                break;
5510
            case 0x1e: /* fcomi */
5511
                if (s->cc_op != CC_OP_DYNAMIC)
5512
                    gen_op_set_cc_op(s->cc_op);
5513
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5514
                tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5515
                s->cc_op = CC_OP_EFLAGS;
5516
                break;
5517
            case 0x28: /* ffree sti */
5518
                tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5519
                break;
5520
            case 0x2a: /* fst sti */
5521
                tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5522
                break;
5523
            case 0x2b: /* fstp sti */
5524
            case 0x0b: /* fstp1 sti, undocumented op */
5525
            case 0x3a: /* fstp8 sti, undocumented op */
5526
            case 0x3b: /* fstp9 sti, undocumented op */
5527
                tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5528
                tcg_gen_helper_0_0(helper_fpop);
5529
                break;
5530
            case 0x2c: /* fucom st(i) */
5531
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5532
                tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5533
                break;
5534
            case 0x2d: /* fucomp st(i) */
5535
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5536
                tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5537
                tcg_gen_helper_0_0(helper_fpop);
5538
                break;
5539
            case 0x33: /* de/3 */
5540
                switch(rm) {
5541
                case 1: /* fcompp */
5542
                    tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5543
                    tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5544
                    tcg_gen_helper_0_0(helper_fpop);
5545
                    tcg_gen_helper_0_0(helper_fpop);
5546
                    break;
5547
                default:
5548
                    goto illegal_op;
5549
                }
5550
                break;
5551
            case 0x38: /* ffreep sti, undocumented op */
5552
                tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5553
                tcg_gen_helper_0_0(helper_fpop);
5554
                break;
5555
            case 0x3c: /* df/4 */
5556
                switch(rm) {
5557
                case 0:
5558
                    tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5559
                    tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5560
                    gen_op_mov_reg_T0(OT_WORD, R_EAX);
5561
                    break;
5562
                default:
5563
                    goto illegal_op;
5564
                }
5565
                break;
5566
            case 0x3d: /* fucomip */
5567
                if (s->cc_op != CC_OP_DYNAMIC)
5568
                    gen_op_set_cc_op(s->cc_op);
5569
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5570
                tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5571
                tcg_gen_helper_0_0(helper_fpop);
5572
                s->cc_op = CC_OP_EFLAGS;
5573
                break;
5574
            case 0x3e: /* fcomip */
5575
                if (s->cc_op != CC_OP_DYNAMIC)
5576
                    gen_op_set_cc_op(s->cc_op);
5577
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5578
                tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5579
                tcg_gen_helper_0_0(helper_fpop);
5580
                s->cc_op = CC_OP_EFLAGS;
5581
                break;
5582
            case 0x10 ... 0x13: /* fcmovxx */
5583
            case 0x18 ... 0x1b:
5584
                {
5585
                    int op1, l1;
5586
                    static const uint8_t fcmov_cc[8] = {
5587
                        (JCC_B << 1),
5588
                        (JCC_Z << 1),
5589
                        (JCC_BE << 1),
5590
                        (JCC_P << 1),
5591
                    };
5592
                    op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
5593
                    l1 = gen_new_label();
5594
                    gen_jcc1(s, s->cc_op, op1, l1);
5595
                    tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
5596
                    gen_set_label(l1);
5597
                }
5598
                break;
5599
            default:
5600
                goto illegal_op;
5601
            }
5602
        }
5603
        break;
5604
        /************************/
5605
        /* string ops */
5606

    
5607
    case 0xa4: /* movsS */
5608
    case 0xa5:
5609
        if ((b & 1) == 0)
5610
            ot = OT_BYTE;
5611
        else
5612
            ot = dflag + OT_WORD;
5613

    
5614
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5615
            gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5616
        } else {
5617
            gen_movs(s, ot);
5618
        }
5619
        break;
5620

    
5621
    case 0xaa: /* stosS */
5622
    case 0xab:
5623
        if ((b & 1) == 0)
5624
            ot = OT_BYTE;
5625
        else
5626
            ot = dflag + OT_WORD;
5627

    
5628
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5629
            gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5630
        } else {
5631
            gen_stos(s, ot);
5632
        }
5633
        break;
5634
    case 0xac: /* lodsS */
5635
    case 0xad:
5636
        if ((b & 1) == 0)
5637
            ot = OT_BYTE;
5638
        else
5639
            ot = dflag + OT_WORD;
5640
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5641
            gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5642
        } else {
5643
            gen_lods(s, ot);
5644
        }
5645
        break;
5646
    case 0xae: /* scasS */
5647
    case 0xaf:
5648
        if ((b & 1) == 0)
5649
            ot = OT_BYTE;
5650
        else
5651
            ot = dflag + OT_WORD;
5652
        if (prefixes & PREFIX_REPNZ) {
5653
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5654
        } else if (prefixes & PREFIX_REPZ) {
5655
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5656
        } else {
5657
            gen_scas(s, ot);
5658
            s->cc_op = CC_OP_SUBB + ot;
5659
        }
5660
        break;
5661

    
5662
    case 0xa6: /* cmpsS */
5663
    case 0xa7:
5664
        if ((b & 1) == 0)
5665
            ot = OT_BYTE;
5666
        else
5667
            ot = dflag + OT_WORD;
5668
        if (prefixes & PREFIX_REPNZ) {
5669
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5670
        } else if (prefixes & PREFIX_REPZ) {
5671
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5672
        } else {
5673
            gen_cmps(s, ot);
5674
            s->cc_op = CC_OP_SUBB + ot;
5675
        }
5676
        break;
5677
    case 0x6c: /* insS */
5678
    case 0x6d:
5679
        if ((b & 1) == 0)
5680
            ot = OT_BYTE;
5681
        else
5682
            ot = dflag ? OT_LONG : OT_WORD;
5683
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5684
        gen_op_andl_T0_ffff();
5685
        gen_check_io(s, ot, pc_start - s->cs_base, 
5686
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
5687
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5688
            gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5689
        } else {
5690
            gen_ins(s, ot);
5691
            if (use_icount) {
5692
                gen_jmp(s, s->pc - s->cs_base);
5693
            }
5694
        }
5695
        break;
5696
    case 0x6e: /* outsS */
5697
    case 0x6f:
5698
        if ((b & 1) == 0)
5699
            ot = OT_BYTE;
5700
        else
5701
            ot = dflag ? OT_LONG : OT_WORD;
5702
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5703
        gen_op_andl_T0_ffff();
5704
        gen_check_io(s, ot, pc_start - s->cs_base,
5705
                     svm_is_rep(prefixes) | 4);
5706
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5707
            gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5708
        } else {
5709
            gen_outs(s, ot);
5710
            if (use_icount) {
5711
                gen_jmp(s, s->pc - s->cs_base);
5712
            }
5713
        }
5714
        break;
5715

    
5716
        /************************/
5717
        /* port I/O */
5718

    
5719
    case 0xe4:
5720
    case 0xe5:
5721
        if ((b & 1) == 0)
5722
            ot = OT_BYTE;
5723
        else
5724
            ot = dflag ? OT_LONG : OT_WORD;
5725
        val = ldub_code(s->pc++);
5726
        gen_op_movl_T0_im(val);
5727
        gen_check_io(s, ot, pc_start - s->cs_base,
5728
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5729
        if (use_icount)
5730
            gen_io_start();
5731
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5732
        tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5733
        gen_op_mov_reg_T1(ot, R_EAX);
5734
        if (use_icount) {
5735
            gen_io_end();
5736
            gen_jmp(s, s->pc - s->cs_base);
5737
        }
5738
        break;
5739
    case 0xe6:
5740
    case 0xe7:
5741
        if ((b & 1) == 0)
5742
            ot = OT_BYTE;
5743
        else
5744
            ot = dflag ? OT_LONG : OT_WORD;
5745
        val = ldub_code(s->pc++);
5746
        gen_op_movl_T0_im(val);
5747
        gen_check_io(s, ot, pc_start - s->cs_base,
5748
                     svm_is_rep(prefixes));
5749
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5750

    
5751
        if (use_icount)
5752
            gen_io_start();
5753
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5754
        tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5755
        tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5756
        tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5757
        if (use_icount) {
5758
            gen_io_end();
5759
            gen_jmp(s, s->pc - s->cs_base);
5760
        }
5761
        break;
5762
    case 0xec:
5763
    case 0xed:
5764
        if ((b & 1) == 0)
5765
            ot = OT_BYTE;
5766
        else
5767
            ot = dflag ? OT_LONG : OT_WORD;
5768
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5769
        gen_op_andl_T0_ffff();
5770
        gen_check_io(s, ot, pc_start - s->cs_base,
5771
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5772
        if (use_icount)
5773
            gen_io_start();
5774
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5775
        tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5776
        gen_op_mov_reg_T1(ot, R_EAX);
5777
        if (use_icount) {
5778
            gen_io_end();
5779
            gen_jmp(s, s->pc - s->cs_base);
5780
        }
5781
        break;
5782
    case 0xee:
5783
    case 0xef:
5784
        if ((b & 1) == 0)
5785
            ot = OT_BYTE;
5786
        else
5787
            ot = dflag ? OT_LONG : OT_WORD;
5788
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5789
        gen_op_andl_T0_ffff();
5790
        gen_check_io(s, ot, pc_start - s->cs_base,
5791
                     svm_is_rep(prefixes));
5792
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5793

    
5794
        if (use_icount)
5795
            gen_io_start();
5796
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5797
        tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5798
        tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5799
        tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5800
        if (use_icount) {
5801
            gen_io_end();
5802
            gen_jmp(s, s->pc - s->cs_base);
5803
        }
5804
        break;
5805

    
5806
        /************************/
5807
        /* control */
5808
    case 0xc2: /* ret im */
5809
        val = ldsw_code(s->pc);
5810
        s->pc += 2;
5811
        gen_pop_T0(s);
5812
        if (CODE64(s) && s->dflag)
5813
            s->dflag = 2;
5814
        gen_stack_update(s, val + (2 << s->dflag));
5815
        if (s->dflag == 0)
5816
            gen_op_andl_T0_ffff();
5817
        gen_op_jmp_T0();
5818
        gen_eob(s);
5819
        break;
5820
    case 0xc3: /* ret */
5821
        gen_pop_T0(s);
5822
        gen_pop_update(s);
5823
        if (s->dflag == 0)
5824
            gen_op_andl_T0_ffff();
5825
        gen_op_jmp_T0();
5826
        gen_eob(s);
5827
        break;
5828
    case 0xca: /* lret im */
5829
        val = ldsw_code(s->pc);
5830
        s->pc += 2;
5831
    do_lret:
5832
        if (s->pe && !s->vm86) {
5833
            if (s->cc_op != CC_OP_DYNAMIC)
5834
                gen_op_set_cc_op(s->cc_op);
5835
            gen_jmp_im(pc_start - s->cs_base);
5836
            tcg_gen_helper_0_2(helper_lret_protected,
5837
                               tcg_const_i32(s->dflag), 
5838
                               tcg_const_i32(val));
5839
        } else {
5840
            gen_stack_A0(s);
5841
            /* pop offset */
5842
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5843
            if (s->dflag == 0)
5844
                gen_op_andl_T0_ffff();
5845
            /* NOTE: keeping EIP updated is not a problem in case of
5846
               exception */
5847
            gen_op_jmp_T0();
5848
            /* pop selector */
5849
            gen_op_addl_A0_im(2 << s->dflag);
5850
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5851
            gen_op_movl_seg_T0_vm(R_CS);
5852
            /* add stack offset */
5853
            gen_stack_update(s, val + (4 << s->dflag));
5854
        }
5855
        gen_eob(s);
5856
        break;
5857
    case 0xcb: /* lret */
5858
        val = 0;
5859
        goto do_lret;
5860
    case 0xcf: /* iret */
5861
        gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
5862
        if (!s->pe) {
5863
            /* real mode */
5864
            tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5865
            s->cc_op = CC_OP_EFLAGS;
5866
        } else if (s->vm86) {
5867
            if (s->iopl != 3) {
5868
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5869
            } else {
5870
                tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5871
                s->cc_op = CC_OP_EFLAGS;
5872
            }
5873
        } else {
5874
            if (s->cc_op != CC_OP_DYNAMIC)
5875
                gen_op_set_cc_op(s->cc_op);
5876
            gen_jmp_im(pc_start - s->cs_base);
5877
            tcg_gen_helper_0_2(helper_iret_protected,
5878
                               tcg_const_i32(s->dflag), 
5879
                               tcg_const_i32(s->pc - s->cs_base));
5880
            s->cc_op = CC_OP_EFLAGS;
5881
        }
5882
        gen_eob(s);
5883
        break;
5884
    case 0xe8: /* call im */
5885
        {
5886
            if (dflag)
5887
                tval = (int32_t)insn_get(s, OT_LONG);
5888
            else
5889
                tval = (int16_t)insn_get(s, OT_WORD);
5890
            next_eip = s->pc - s->cs_base;
5891
            tval += next_eip;
5892
            if (s->dflag == 0)
5893
                tval &= 0xffff;
5894
            gen_movtl_T0_im(next_eip);
5895
            gen_push_T0(s);
5896
            gen_jmp(s, tval);
5897
        }
5898
        break;
5899
    case 0x9a: /* lcall im */
5900
        {
5901
            unsigned int selector, offset;
5902

    
5903
            if (CODE64(s))
5904
                goto illegal_op;
5905
            ot = dflag ? OT_LONG : OT_WORD;
5906
            offset = insn_get(s, ot);
5907
            selector = insn_get(s, OT_WORD);
5908

    
5909
            gen_op_movl_T0_im(selector);
5910
            gen_op_movl_T1_imu(offset);
5911
        }
5912
        goto do_lcall;
5913
    case 0xe9: /* jmp im */
5914
        if (dflag)
5915
            tval = (int32_t)insn_get(s, OT_LONG);
5916
        else
5917
            tval = (int16_t)insn_get(s, OT_WORD);
5918
        tval += s->pc - s->cs_base;
5919
        if (s->dflag == 0)
5920
            tval &= 0xffff;
5921
        gen_jmp(s, tval);
5922
        break;
5923
    case 0xea: /* ljmp im */
5924
        {
5925
            unsigned int selector, offset;
5926

    
5927
            if (CODE64(s))
5928
                goto illegal_op;
5929
            ot = dflag ? OT_LONG : OT_WORD;
5930
            offset = insn_get(s, ot);
5931
            selector = insn_get(s, OT_WORD);
5932

    
5933
            gen_op_movl_T0_im(selector);
5934
            gen_op_movl_T1_imu(offset);
5935
        }
5936
        goto do_ljmp;
5937
    case 0xeb: /* jmp Jb */
5938
        tval = (int8_t)insn_get(s, OT_BYTE);
5939
        tval += s->pc - s->cs_base;
5940
        if (s->dflag == 0)
5941
            tval &= 0xffff;
5942
        gen_jmp(s, tval);
5943
        break;
5944
    case 0x70 ... 0x7f: /* jcc Jb */
5945
        tval = (int8_t)insn_get(s, OT_BYTE);
5946
        goto do_jcc;
5947
    case 0x180 ... 0x18f: /* jcc Jv */
5948
        if (dflag) {
5949
            tval = (int32_t)insn_get(s, OT_LONG);
5950
        } else {
5951
            tval = (int16_t)insn_get(s, OT_WORD);
5952
        }
5953
    do_jcc:
5954
        next_eip = s->pc - s->cs_base;
5955
        tval += next_eip;
5956
        if (s->dflag == 0)
5957
            tval &= 0xffff;
5958
        gen_jcc(s, b, tval, next_eip);
5959
        break;
5960

    
5961
    case 0x190 ... 0x19f: /* setcc Gv */
5962
        modrm = ldub_code(s->pc++);
5963
        gen_setcc(s, b);
5964
        gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5965
        break;
5966
    case 0x140 ... 0x14f: /* cmov Gv, Ev */
5967
        {
5968
            int l1;
5969
            TCGv t0;
5970

    
5971
            ot = dflag + OT_WORD;
5972
            modrm = ldub_code(s->pc++);
5973
            reg = ((modrm >> 3) & 7) | rex_r;
5974
            mod = (modrm >> 6) & 3;
5975
            t0 = tcg_temp_local_new(TCG_TYPE_TL);
5976
            if (mod != 3) {
5977
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5978
                gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
5979
            } else {
5980
                rm = (modrm & 7) | REX_B(s);
5981
                gen_op_mov_v_reg(ot, t0, rm);
5982
            }
5983
#ifdef TARGET_X86_64
5984
            if (ot == OT_LONG) {
5985
                /* XXX: specific Intel behaviour ? */
5986
                l1 = gen_new_label();
5987
                gen_jcc1(s, s->cc_op, b ^ 1, l1);
5988
                tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
5989
                gen_set_label(l1);
5990
                tcg_gen_movi_tl(cpu_tmp0, 0);
5991
                tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
5992
            } else
5993
#endif
5994
            {
5995
                l1 = gen_new_label();
5996
                gen_jcc1(s, s->cc_op, b ^ 1, l1);
5997
                gen_op_mov_reg_v(ot, reg, t0);
5998
                gen_set_label(l1);
5999
            }
6000
            tcg_temp_free(t0);
6001
        }
6002
        break;
6003

    
6004
        /************************/
6005
        /* flags */
6006
    case 0x9c: /* pushf */
6007
        gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
6008
        if (s->vm86 && s->iopl != 3) {
6009
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6010
        } else {
6011
            if (s->cc_op != CC_OP_DYNAMIC)
6012
                gen_op_set_cc_op(s->cc_op);
6013
            tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
6014
            gen_push_T0(s);
6015
        }
6016
        break;
6017
    case 0x9d: /* popf */
6018
        gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
6019
        if (s->vm86 && s->iopl != 3) {
6020
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6021
        } else {
6022
            gen_pop_T0(s);
6023
            if (s->cpl == 0) {
6024
                if (s->dflag) {
6025
                    tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6026
                                       tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
6027
                } else {
6028
                    tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6029
                                       tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
6030
                }
6031
            } else {
6032
                if (s->cpl <= s->iopl) {
6033
                    if (s->dflag) {
6034
                        tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6035
                                           tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
6036
                    } else {
6037
                        tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6038
                                           tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
6039
                    }
6040
                } else {
6041
                    if (s->dflag) {
6042
                        tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6043
                                           tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
6044
                    } else {
6045
                        tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6046
                                           tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
6047
                    }
6048
                }
6049
            }
6050
            gen_pop_update(s);
6051
            s->cc_op = CC_OP_EFLAGS;
6052
            /* abort translation because TF flag may change */
6053
            gen_jmp_im(s->pc - s->cs_base);
6054
            gen_eob(s);
6055
        }
6056
        break;
6057
    case 0x9e: /* sahf */
6058
        if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
6059
            goto illegal_op;
6060
        gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
6061
        if (s->cc_op != CC_OP_DYNAMIC)
6062
            gen_op_set_cc_op(s->cc_op);
6063
        gen_compute_eflags(cpu_cc_src);
6064
        tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
6065
        tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
6066
        tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
6067
        s->cc_op = CC_OP_EFLAGS;
6068
        break;
6069
    case 0x9f: /* lahf */
6070
        if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
6071
            goto illegal_op;
6072
        if (s->cc_op != CC_OP_DYNAMIC)
6073
            gen_op_set_cc_op(s->cc_op);
6074
        gen_compute_eflags(cpu_T[0]);
6075
        /* Note: gen_compute_eflags() only gives the condition codes */
6076
        tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
6077
        gen_op_mov_reg_T0(OT_BYTE, R_AH);
6078
        break;
6079
    case 0xf5: /* cmc */
6080
        if (s->cc_op != CC_OP_DYNAMIC)
6081
            gen_op_set_cc_op(s->cc_op);
6082
        gen_compute_eflags(cpu_cc_src);
6083
        tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
6084
        s->cc_op = CC_OP_EFLAGS;
6085
        break;
6086
    case 0xf8: /* clc */
6087
        if (s->cc_op != CC_OP_DYNAMIC)
6088
            gen_op_set_cc_op(s->cc_op);
6089
        gen_compute_eflags(cpu_cc_src);
6090
        tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
6091
        s->cc_op = CC_OP_EFLAGS;
6092
        break;
6093
    case 0xf9: /* stc */
6094
        if (s->cc_op != CC_OP_DYNAMIC)
6095
            gen_op_set_cc_op(s->cc_op);
6096
        gen_compute_eflags(cpu_cc_src);
6097
        tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
6098
        s->cc_op = CC_OP_EFLAGS;
6099
        break;
6100
    case 0xfc: /* cld */
6101
        tcg_gen_movi_i32(cpu_tmp2_i32, 1);
6102
        tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
6103
        break;
6104
    case 0xfd: /* std */
6105
        tcg_gen_movi_i32(cpu_tmp2_i32, -1);
6106
        tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
6107
        break;
6108

    
6109
        /************************/
6110
        /* bit operations */
6111
    case 0x1ba: /* bt/bts/btr/btc Gv, im */
6112
        ot = dflag + OT_WORD;
6113
        modrm = ldub_code(s->pc++);
6114
        op = (modrm >> 3) & 7;
6115
        mod = (modrm >> 6) & 3;
6116
        rm = (modrm & 7) | REX_B(s);
6117
        if (mod != 3) {
6118
            s->rip_offset = 1;
6119
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6120
            gen_op_ld_T0_A0(ot + s->mem_index);
6121
        } else {
6122
            gen_op_mov_TN_reg(ot, 0, rm);
6123
        }
6124
        /* load shift */
6125
        val = ldub_code(s->pc++);
6126
        gen_op_movl_T1_im(val);
6127
        if (op < 4)
6128
            goto illegal_op;
6129
        op -= 4;
6130
        goto bt_op;
6131
    case 0x1a3: /* bt Gv, Ev */
6132
        op = 0;
6133
        goto do_btx;
6134
    case 0x1ab: /* bts */
6135
        op = 1;
6136
        goto do_btx;
6137
    case 0x1b3: /* btr */
6138
        op = 2;
6139
        goto do_btx;
6140
    case 0x1bb: /* btc */
6141
        op = 3;
6142
    do_btx:
6143
        ot = dflag + OT_WORD;
6144
        modrm = ldub_code(s->pc++);
6145
        reg = ((modrm >> 3) & 7) | rex_r;
6146
        mod = (modrm >> 6) & 3;
6147
        rm = (modrm & 7) | REX_B(s);
6148
        gen_op_mov_TN_reg(OT_LONG, 1, reg);
6149
        if (mod != 3) {
6150
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6151
            /* specific case: we need to add a displacement */
6152
            gen_exts(ot, cpu_T[1]);
6153
            tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
6154
            tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
6155
            tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
6156
            gen_op_ld_T0_A0(ot + s->mem_index);
6157
        } else {
6158
            gen_op_mov_TN_reg(ot, 0, rm);
6159
        }
6160
    bt_op:
6161
        tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
6162
        switch(op) {
6163
        case 0:
6164
            tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
6165
            tcg_gen_movi_tl(cpu_cc_dst, 0);
6166
            break;
6167
        case 1:
6168
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6169
            tcg_gen_movi_tl(cpu_tmp0, 1);
6170
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6171
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6172
            break;
6173
        case 2:
6174
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6175
            tcg_gen_movi_tl(cpu_tmp0, 1);
6176
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6177
            tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
6178
            tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6179
            break;
6180
        default:
6181
        case 3:
6182
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6183
            tcg_gen_movi_tl(cpu_tmp0, 1);
6184
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6185
            tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6186
            break;
6187
        }
6188
        s->cc_op = CC_OP_SARB + ot;
6189
        if (op != 0) {
6190
            if (mod != 3)
6191
                gen_op_st_T0_A0(ot + s->mem_index);
6192
            else
6193
                gen_op_mov_reg_T0(ot, rm);
6194
            tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
6195
            tcg_gen_movi_tl(cpu_cc_dst, 0);
6196
        }
6197
        break;
6198
    case 0x1bc: /* bsf */
6199
    case 0x1bd: /* bsr */
6200
        {
6201
            int label1;
6202
            TCGv t0;
6203

    
6204
            ot = dflag + OT_WORD;
6205
            modrm = ldub_code(s->pc++);
6206
            reg = ((modrm >> 3) & 7) | rex_r;
6207
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6208
            gen_extu(ot, cpu_T[0]);
6209
            label1 = gen_new_label();
6210
            tcg_gen_movi_tl(cpu_cc_dst, 0);
6211
            t0 = tcg_temp_local_new(TCG_TYPE_TL);
6212
            tcg_gen_mov_tl(t0, cpu_T[0]);
6213
            tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
6214
            if (b & 1) {
6215
                tcg_gen_helper_1_1(helper_bsr, cpu_T[0], t0);
6216
            } else {
6217
                tcg_gen_helper_1_1(helper_bsf, cpu_T[0], t0);
6218
            }
6219
            gen_op_mov_reg_T0(ot, reg);
6220
            tcg_gen_movi_tl(cpu_cc_dst, 1);
6221
            gen_set_label(label1);
6222
            tcg_gen_discard_tl(cpu_cc_src);
6223
            s->cc_op = CC_OP_LOGICB + ot;
6224
            tcg_temp_free(t0);
6225
        }
6226
        break;
6227
        /************************/
6228
        /* bcd */
6229
    case 0x27: /* daa */
6230
        if (CODE64(s))
6231
            goto illegal_op;
6232
        if (s->cc_op != CC_OP_DYNAMIC)
6233
            gen_op_set_cc_op(s->cc_op);
6234
        tcg_gen_helper_0_0(helper_daa);
6235
        s->cc_op = CC_OP_EFLAGS;
6236
        break;
6237
    case 0x2f: /* das */
6238
        if (CODE64(s))
6239
            goto illegal_op;
6240
        if (s->cc_op != CC_OP_DYNAMIC)
6241
            gen_op_set_cc_op(s->cc_op);
6242
        tcg_gen_helper_0_0(helper_das);
6243
        s->cc_op = CC_OP_EFLAGS;
6244
        break;
6245
    case 0x37: /* aaa */
6246
        if (CODE64(s))
6247
            goto illegal_op;
6248
        if (s->cc_op != CC_OP_DYNAMIC)
6249
            gen_op_set_cc_op(s->cc_op);
6250
        tcg_gen_helper_0_0(helper_aaa);
6251
        s->cc_op = CC_OP_EFLAGS;
6252
        break;
6253
    case 0x3f: /* aas */
6254
        if (CODE64(s))
6255
            goto illegal_op;
6256
        if (s->cc_op != CC_OP_DYNAMIC)
6257
            gen_op_set_cc_op(s->cc_op);
6258
        tcg_gen_helper_0_0(helper_aas);
6259
        s->cc_op = CC_OP_EFLAGS;
6260
        break;
6261
    case 0xd4: /* aam */
6262
        if (CODE64(s))
6263
            goto illegal_op;
6264
        val = ldub_code(s->pc++);
6265
        if (val == 0) {
6266
            gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
6267
        } else {
6268
            tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
6269
            s->cc_op = CC_OP_LOGICB;
6270
        }
6271
        break;
6272
    case 0xd5: /* aad */
6273
        if (CODE64(s))
6274
            goto illegal_op;
6275
        val = ldub_code(s->pc++);
6276
        tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
6277
        s->cc_op = CC_OP_LOGICB;
6278
        break;
6279
        /************************/
6280
        /* misc */
6281
    case 0x90: /* nop */
6282
        /* XXX: xchg + rex handling */
6283
        /* XXX: correct lock test for all insn */
6284
        if (prefixes & PREFIX_LOCK)
6285
            goto illegal_op;
6286
        if (prefixes & PREFIX_REPZ) {
6287
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
6288
        }
6289
        break;
6290
    case 0x9b: /* fwait */
6291
        if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
6292
            (HF_MP_MASK | HF_TS_MASK)) {
6293
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6294
        } else {
6295
            if (s->cc_op != CC_OP_DYNAMIC)
6296
                gen_op_set_cc_op(s->cc_op);
6297
            gen_jmp_im(pc_start - s->cs_base);
6298
            tcg_gen_helper_0_0(helper_fwait);
6299
        }
6300
        break;
6301
    case 0xcc: /* int3 */
6302
        gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
6303
        break;
6304
    case 0xcd: /* int N */
6305
        val = ldub_code(s->pc++);
6306
        if (s->vm86 && s->iopl != 3) {
6307
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6308
        } else {
6309
            gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
6310
        }
6311
        break;
6312
    case 0xce: /* into */
6313
        if (CODE64(s))
6314
            goto illegal_op;
6315
        if (s->cc_op != CC_OP_DYNAMIC)
6316
            gen_op_set_cc_op(s->cc_op);
6317
        gen_jmp_im(pc_start - s->cs_base);
6318
        tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
6319
        break;
6320
    case 0xf1: /* icebp (undocumented, exits to external debugger) */
6321
        gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
6322
#if 1
6323
        gen_debug(s, pc_start - s->cs_base);
6324
#else
6325
        /* start debug */
6326
        tb_flush(cpu_single_env);
6327
        cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
6328
#endif
6329
        break;
6330
    case 0xfa: /* cli */
6331
        if (!s->vm86) {
6332
            if (s->cpl <= s->iopl) {
6333
                tcg_gen_helper_0_0(helper_cli);
6334
            } else {
6335
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6336
            }
6337
        } else {
6338
            if (s->iopl == 3) {
6339
                tcg_gen_helper_0_0(helper_cli);
6340
            } else {
6341
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6342
            }
6343
        }
6344
        break;
6345
    case 0xfb: /* sti */
6346
        if (!s->vm86) {
6347
            if (s->cpl <= s->iopl) {
6348
            gen_sti:
6349
                tcg_gen_helper_0_0(helper_sti);
6350
                /* interruptions are enabled only the first insn after sti */
6351
                /* If several instructions disable interrupts, only the
6352
                   _first_ does it */
6353
                if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
6354
                    tcg_gen_helper_0_0(helper_set_inhibit_irq);
6355
                /* give a chance to handle pending irqs */
6356
                gen_jmp_im(s->pc - s->cs_base);
6357
                gen_eob(s);
6358
            } else {
6359
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6360
            }
6361
        } else {
6362
            if (s->iopl == 3) {
6363
                goto gen_sti;
6364
            } else {
6365
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6366
            }
6367
        }
6368
        break;
6369
    case 0x62: /* bound */
6370
        if (CODE64(s))
6371
            goto illegal_op;
6372
        ot = dflag ? OT_LONG : OT_WORD;
6373
        modrm = ldub_code(s->pc++);
6374
        reg = (modrm >> 3) & 7;
6375
        mod = (modrm >> 6) & 3;
6376
        if (mod == 3)
6377
            goto illegal_op;
6378
        gen_op_mov_TN_reg(ot, 0, reg);
6379
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6380
        gen_jmp_im(pc_start - s->cs_base);
6381
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6382
        if (ot == OT_WORD)
6383
            tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
6384
        else
6385
            tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
6386
        break;
6387
    case 0x1c8 ... 0x1cf: /* bswap reg */
6388
        reg = (b & 7) | REX_B(s);
6389
#ifdef TARGET_X86_64
6390
        if (dflag == 2) {
6391
            gen_op_mov_TN_reg(OT_QUAD, 0, reg);
6392
            tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
6393
            gen_op_mov_reg_T0(OT_QUAD, reg);
6394
        } else
6395
        {
6396
            TCGv tmp0;
6397
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
6398
            
6399
            tmp0 = tcg_temp_new(TCG_TYPE_I32);
6400
            tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
6401
            tcg_gen_bswap_i32(tmp0, tmp0);
6402
            tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
6403
            gen_op_mov_reg_T0(OT_LONG, reg);
6404
        }
6405
#else
6406
        {
6407
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
6408
            tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
6409
            gen_op_mov_reg_T0(OT_LONG, reg);
6410
        }
6411
#endif
6412
        break;
6413
    case 0xd6: /* salc */
6414
        if (CODE64(s))
6415
            goto illegal_op;
6416
        if (s->cc_op != CC_OP_DYNAMIC)
6417
            gen_op_set_cc_op(s->cc_op);
6418
        gen_compute_eflags_c(cpu_T[0]);
6419
        tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
6420
        gen_op_mov_reg_T0(OT_BYTE, R_EAX);
6421
        break;
6422
    case 0xe0: /* loopnz */
6423
    case 0xe1: /* loopz */
6424
    case 0xe2: /* loop */
6425
    case 0xe3: /* jecxz */
6426
        {
6427
            int l1, l2, l3;
6428

    
6429
            tval = (int8_t)insn_get(s, OT_BYTE);
6430
            next_eip = s->pc - s->cs_base;
6431
            tval += next_eip;
6432
            if (s->dflag == 0)
6433
                tval &= 0xffff;
6434

    
6435
            l1 = gen_new_label();
6436
            l2 = gen_new_label();
6437
            l3 = gen_new_label();
6438
            b &= 3;
6439
            switch(b) {
6440
            case 0: /* loopnz */
6441
            case 1: /* loopz */
6442
                if (s->cc_op != CC_OP_DYNAMIC)
6443
                    gen_op_set_cc_op(s->cc_op);
6444
                gen_op_add_reg_im(s->aflag, R_ECX, -1);
6445
                gen_op_jz_ecx(s->aflag, l3);
6446
                gen_compute_eflags(cpu_tmp0);
6447
                tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
6448
                if (b == 0) {
6449
                    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
6450
                } else {
6451
                    tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
6452
                }
6453
                break;
6454
            case 2: /* loop */
6455
                gen_op_add_reg_im(s->aflag, R_ECX, -1);
6456
                gen_op_jnz_ecx(s->aflag, l1);
6457
                break;
6458
            default:
6459
            case 3: /* jcxz */
6460
                gen_op_jz_ecx(s->aflag, l1);
6461
                break;
6462
            }
6463

    
6464
            gen_set_label(l3);
6465
            gen_jmp_im(next_eip);
6466
            tcg_gen_br(l2);
6467

    
6468
            gen_set_label(l1);
6469
            gen_jmp_im(tval);
6470
            gen_set_label(l2);
6471
            gen_eob(s);
6472
        }
6473
        break;
6474
    case 0x130: /* wrmsr */
6475
    case 0x132: /* rdmsr */
6476
        if (s->cpl != 0) {
6477
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6478
        } else {
6479
            if (s->cc_op != CC_OP_DYNAMIC)
6480
                gen_op_set_cc_op(s->cc_op);
6481
            gen_jmp_im(pc_start - s->cs_base);
6482
            if (b & 2) {
6483
                tcg_gen_helper_0_0(helper_rdmsr);
6484
            } else {
6485
                tcg_gen_helper_0_0(helper_wrmsr);
6486
            }
6487
        }
6488
        break;
6489
    case 0x131: /* rdtsc */
6490
        if (s->cc_op != CC_OP_DYNAMIC)
6491
            gen_op_set_cc_op(s->cc_op);
6492
        gen_jmp_im(pc_start - s->cs_base);
6493
        if (use_icount)
6494
            gen_io_start();
6495
        tcg_gen_helper_0_0(helper_rdtsc);
6496
        if (use_icount) {
6497
            gen_io_end();
6498
            gen_jmp(s, s->pc - s->cs_base);
6499
        }
6500
        break;
6501
    case 0x133: /* rdpmc */
6502
        if (s->cc_op != CC_OP_DYNAMIC)
6503
            gen_op_set_cc_op(s->cc_op);
6504
        gen_jmp_im(pc_start - s->cs_base);
6505
        tcg_gen_helper_0_0(helper_rdpmc);
6506
        break;
6507
    case 0x134: /* sysenter */
6508
        if (CODE64(s))
6509
            goto illegal_op;
6510
        if (!s->pe) {
6511
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6512
        } else {
6513
            if (s->cc_op != CC_OP_DYNAMIC) {
6514
                gen_op_set_cc_op(s->cc_op);
6515
                s->cc_op = CC_OP_DYNAMIC;
6516
            }
6517
            gen_jmp_im(pc_start - s->cs_base);
6518
            tcg_gen_helper_0_0(helper_sysenter);
6519
            gen_eob(s);
6520
        }
6521
        break;
6522
    case 0x135: /* sysexit */
6523
        if (CODE64(s))
6524
            goto illegal_op;
6525
        if (!s->pe) {
6526
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6527
        } else {
6528
            if (s->cc_op != CC_OP_DYNAMIC) {
6529
                gen_op_set_cc_op(s->cc_op);
6530
                s->cc_op = CC_OP_DYNAMIC;
6531
            }
6532
            gen_jmp_im(pc_start - s->cs_base);
6533
            tcg_gen_helper_0_0(helper_sysexit);
6534
            gen_eob(s);
6535
        }
6536
        break;
6537
#ifdef TARGET_X86_64
6538
    case 0x105: /* syscall */
6539
        /* XXX: is it usable in real mode ? */
6540
        if (s->cc_op != CC_OP_DYNAMIC) {
6541
            gen_op_set_cc_op(s->cc_op);
6542
            s->cc_op = CC_OP_DYNAMIC;
6543
        }
6544
        gen_jmp_im(pc_start - s->cs_base);
6545
        tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
6546
        gen_eob(s);
6547
        break;
6548
    case 0x107: /* sysret */
6549
        if (!s->pe) {
6550
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6551
        } else {
6552
            if (s->cc_op != CC_OP_DYNAMIC) {
6553
                gen_op_set_cc_op(s->cc_op);
6554
                s->cc_op = CC_OP_DYNAMIC;
6555
            }
6556
            gen_jmp_im(pc_start - s->cs_base);
6557
            tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
6558
            /* condition codes are modified only in long mode */
6559
            if (s->lma)
6560
                s->cc_op = CC_OP_EFLAGS;
6561
            gen_eob(s);
6562
        }
6563
        break;
6564
#endif
6565
    case 0x1a2: /* cpuid */
6566
        if (s->cc_op != CC_OP_DYNAMIC)
6567
            gen_op_set_cc_op(s->cc_op);
6568
        gen_jmp_im(pc_start - s->cs_base);
6569
        tcg_gen_helper_0_0(helper_cpuid);
6570
        break;
6571
    case 0xf4: /* hlt */
6572
        if (s->cpl != 0) {
6573
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6574
        } else {
6575
            if (s->cc_op != CC_OP_DYNAMIC)
6576
                gen_op_set_cc_op(s->cc_op);
6577
            gen_jmp_im(pc_start - s->cs_base);
6578
            tcg_gen_helper_0_1(helper_hlt, tcg_const_i32(s->pc - pc_start));
6579
            s->is_jmp = 3;
6580
        }
6581
        break;
6582
    case 0x100:
6583
        modrm = ldub_code(s->pc++);
6584
        mod = (modrm >> 6) & 3;
6585
        op = (modrm >> 3) & 7;
6586
        switch(op) {
6587
        case 0: /* sldt */
6588
            if (!s->pe || s->vm86)
6589
                goto illegal_op;
6590
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
6591
            tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
6592
            ot = OT_WORD;
6593
            if (mod == 3)
6594
                ot += s->dflag;
6595
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6596
            break;
6597
        case 2: /* lldt */
6598
            if (!s->pe || s->vm86)
6599
                goto illegal_op;
6600
            if (s->cpl != 0) {
6601
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6602
            } else {
6603
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
6604
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6605
                gen_jmp_im(pc_start - s->cs_base);
6606
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6607
                tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
6608
            }
6609
            break;
6610
        case 1: /* str */
6611
            if (!s->pe || s->vm86)
6612
                goto illegal_op;
6613
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
6614
            tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
6615
            ot = OT_WORD;
6616
            if (mod == 3)
6617
                ot += s->dflag;
6618
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6619
            break;
6620
        case 3: /* ltr */
6621
            if (!s->pe || s->vm86)
6622
                goto illegal_op;
6623
            if (s->cpl != 0) {
6624
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6625
            } else {
6626
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
6627
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6628
                gen_jmp_im(pc_start - s->cs_base);
6629
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6630
                tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
6631
            }
6632
            break;
6633
        case 4: /* verr */
6634
        case 5: /* verw */
6635
            if (!s->pe || s->vm86)
6636
                goto illegal_op;
6637
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6638
            if (s->cc_op != CC_OP_DYNAMIC)
6639
                gen_op_set_cc_op(s->cc_op);
6640
            if (op == 4)
6641
                tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
6642
            else
6643
                tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
6644
            s->cc_op = CC_OP_EFLAGS;
6645
            break;
6646
        default:
6647
            goto illegal_op;
6648
        }
6649
        break;
6650
    case 0x101:
6651
        modrm = ldub_code(s->pc++);
6652
        mod = (modrm >> 6) & 3;
6653
        op = (modrm >> 3) & 7;
6654
        rm = modrm & 7;
6655
        switch(op) {
6656
        case 0: /* sgdt */
6657
            if (mod == 3)
6658
                goto illegal_op;
6659
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
6660
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6661
            tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
6662
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
6663
            gen_add_A0_im(s, 2);
6664
            tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
6665
            if (!s->dflag)
6666
                gen_op_andl_T0_im(0xffffff);
6667
            gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6668
            break;
6669
        case 1:
6670
            if (mod == 3) {
6671
                switch (rm) {
6672
                case 0: /* monitor */
6673
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6674
                        s->cpl != 0)
6675
                        goto illegal_op;
6676
                    if (s->cc_op != CC_OP_DYNAMIC)
6677
                        gen_op_set_cc_op(s->cc_op);
6678
                    gen_jmp_im(pc_start - s->cs_base);
6679
#ifdef TARGET_X86_64
6680
                    if (s->aflag == 2) {
6681
                        gen_op_movq_A0_reg(R_EAX);
6682
                    } else
6683
#endif
6684
                    {
6685
                        gen_op_movl_A0_reg(R_EAX);
6686
                        if (s->aflag == 0)
6687
                            gen_op_andl_A0_ffff();
6688
                    }
6689
                    gen_add_A0_ds_seg(s);
6690
                    tcg_gen_helper_0_1(helper_monitor, cpu_A0);
6691
                    break;
6692
                case 1: /* mwait */
6693
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6694
                        s->cpl != 0)
6695
                        goto illegal_op;
6696
                    if (s->cc_op != CC_OP_DYNAMIC) {
6697
                        gen_op_set_cc_op(s->cc_op);
6698
                        s->cc_op = CC_OP_DYNAMIC;
6699
                    }
6700
                    gen_jmp_im(pc_start - s->cs_base);
6701
                    tcg_gen_helper_0_1(helper_mwait, tcg_const_i32(s->pc - pc_start));
6702
                    gen_eob(s);
6703
                    break;
6704
                default:
6705
                    goto illegal_op;
6706
                }
6707
            } else { /* sidt */
6708
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
6709
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6710
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
6711
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
6712
                gen_add_A0_im(s, 2);
6713
                tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
6714
                if (!s->dflag)
6715
                    gen_op_andl_T0_im(0xffffff);
6716
                gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6717
            }
6718
            break;
6719
        case 2: /* lgdt */
6720
        case 3: /* lidt */
6721
            if (mod == 3) {
6722
                if (s->cc_op != CC_OP_DYNAMIC)
6723
                    gen_op_set_cc_op(s->cc_op);
6724
                gen_jmp_im(pc_start - s->cs_base);
6725
                switch(rm) {
6726
                case 0: /* VMRUN */
6727
                    if (!(s->flags & HF_SVME_MASK) || !s->pe)
6728
                        goto illegal_op;
6729
                    if (s->cpl != 0) {
6730
                        gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6731
                        break;
6732
                    } else {
6733
                        tcg_gen_helper_0_2(helper_vmrun, 
6734
                                           tcg_const_i32(s->aflag),
6735
                                           tcg_const_i32(s->pc - pc_start));
6736
                        tcg_gen_exit_tb(0);
6737
                        s->is_jmp = 3;
6738
                    }
6739
                    break;
6740
                case 1: /* VMMCALL */
6741
                    if (!(s->flags & HF_SVME_MASK))
6742
                        goto illegal_op;
6743
                    tcg_gen_helper_0_0(helper_vmmcall);
6744
                    break;
6745
                case 2: /* VMLOAD */
6746
                    if (!(s->flags & HF_SVME_MASK) || !s->pe)
6747
                        goto illegal_op;
6748
                    if (s->cpl != 0) {
6749
                        gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6750
                        break;
6751
                    } else {
6752
                        tcg_gen_helper_0_1(helper_vmload,
6753
                                           tcg_const_i32(s->aflag));
6754
                    }
6755
                    break;
6756
                case 3: /* VMSAVE */
6757
                    if (!(s->flags & HF_SVME_MASK) || !s->pe)
6758
                        goto illegal_op;
6759
                    if (s->cpl != 0) {
6760
                        gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6761
                        break;
6762
                    } else {
6763
                        tcg_gen_helper_0_1(helper_vmsave,
6764
                                           tcg_const_i32(s->aflag));
6765
                    }
6766
                    break;
6767
                case 4: /* STGI */
6768
                    if ((!(s->flags & HF_SVME_MASK) &&
6769
                         !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) || 
6770
                        !s->pe)
6771
                        goto illegal_op;
6772
                    if (s->cpl != 0) {
6773
                        gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6774
                        break;
6775
                    } else {
6776
                        tcg_gen_helper_0_0(helper_stgi);
6777
                    }
6778
                    break;
6779
                case 5: /* CLGI */
6780
                    if (!(s->flags & HF_SVME_MASK) || !s->pe)
6781
                        goto illegal_op;
6782
                    if (s->cpl != 0) {
6783
                        gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6784
                        break;
6785
                    } else {
6786
                        tcg_gen_helper_0_0(helper_clgi);
6787
                    }
6788
                    break;
6789
                case 6: /* SKINIT */
6790
                    if ((!(s->flags & HF_SVME_MASK) && 
6791
                         !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) || 
6792
                        !s->pe)
6793
                        goto illegal_op;
6794
                    tcg_gen_helper_0_0(helper_skinit);
6795
                    break;
6796
                case 7: /* INVLPGA */
6797
                    if (!(s->flags & HF_SVME_MASK) || !s->pe)
6798
                        goto illegal_op;
6799
                    if (s->cpl != 0) {
6800
                        gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6801
                        break;
6802
                    } else {
6803
                        tcg_gen_helper_0_1(helper_invlpga,
6804
                                           tcg_const_i32(s->aflag));
6805
                    }
6806
                    break;
6807
                default:
6808
                    goto illegal_op;
6809
                }
6810
            } else if (s->cpl != 0) {
6811
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6812
            } else {
6813
                gen_svm_check_intercept(s, pc_start,
6814
                                        op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
6815
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6816
                gen_op_ld_T1_A0(OT_WORD + s->mem_index);
6817
                gen_add_A0_im(s, 2);
6818
                gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6819
                if (!s->dflag)
6820
                    gen_op_andl_T0_im(0xffffff);
6821
                if (op == 2) {
6822
                    tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
6823
                    tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
6824
                } else {
6825
                    tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
6826
                    tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
6827
                }
6828
            }
6829
            break;
6830
        case 4: /* smsw */
6831
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
6832
            tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
6833
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6834
            break;
6835
        case 6: /* lmsw */
6836
            if (s->cpl != 0) {
6837
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6838
            } else {
6839
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6840
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6841
                tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
6842
                gen_jmp_im(s->pc - s->cs_base);
6843
                gen_eob(s);
6844
            }
6845
            break;
6846
        case 7: /* invlpg */
6847
            if (s->cpl != 0) {
6848
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6849
            } else {
6850
                if (mod == 3) {
6851
#ifdef TARGET_X86_64
6852
                    if (CODE64(s) && rm == 0) {
6853
                        /* swapgs */
6854
                        tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
6855
                        tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
6856
                        tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
6857
                        tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
6858
                    } else
6859
#endif
6860
                    {
6861
                        goto illegal_op;
6862
                    }
6863
                } else {
6864
                    if (s->cc_op != CC_OP_DYNAMIC)
6865
                        gen_op_set_cc_op(s->cc_op);
6866
                    gen_jmp_im(pc_start - s->cs_base);
6867
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6868
                    tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
6869
                    gen_jmp_im(s->pc - s->cs_base);
6870
                    gen_eob(s);
6871
                }
6872
            }
6873
            break;
6874
        default:
6875
            goto illegal_op;
6876
        }
6877
        break;
6878
    case 0x108: /* invd */
6879
    case 0x109: /* wbinvd */
6880
        if (s->cpl != 0) {
6881
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6882
        } else {
6883
            gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
6884
            /* nothing to do */
6885
        }
6886
        break;
6887
    case 0x63: /* arpl or movslS (x86_64) */
6888
#ifdef TARGET_X86_64
6889
        if (CODE64(s)) {
6890
            int d_ot;
6891
            /* d_ot is the size of destination */
6892
            d_ot = dflag + OT_WORD;
6893

    
6894
            modrm = ldub_code(s->pc++);
6895
            reg = ((modrm >> 3) & 7) | rex_r;
6896
            mod = (modrm >> 6) & 3;
6897
            rm = (modrm & 7) | REX_B(s);
6898

    
6899
            if (mod == 3) {
6900
                gen_op_mov_TN_reg(OT_LONG, 0, rm);
6901
                /* sign extend */
6902
                if (d_ot == OT_QUAD)
6903
                    tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
6904
                gen_op_mov_reg_T0(d_ot, reg);
6905
            } else {
6906
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6907
                if (d_ot == OT_QUAD) {
6908
                    gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6909
                } else {
6910
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6911
                }
6912
                gen_op_mov_reg_T0(d_ot, reg);
6913
            }
6914
        } else
6915
#endif
6916
        {
6917
            int label1;
6918
            TCGv t0, t1, t2;
6919

    
6920
            if (!s->pe || s->vm86)
6921
                goto illegal_op;
6922
            t0 = tcg_temp_local_new(TCG_TYPE_TL);
6923
            t1 = tcg_temp_local_new(TCG_TYPE_TL);
6924
            t2 = tcg_temp_local_new(TCG_TYPE_TL);
6925
            ot = OT_WORD;
6926
            modrm = ldub_code(s->pc++);
6927
            reg = (modrm >> 3) & 7;
6928
            mod = (modrm >> 6) & 3;
6929
            rm = modrm & 7;
6930
            if (mod != 3) {
6931
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6932
                gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6933
            } else {
6934
                gen_op_mov_v_reg(ot, t0, rm);
6935
            }
6936
            gen_op_mov_v_reg(ot, t1, reg);
6937
            tcg_gen_andi_tl(cpu_tmp0, t0, 3);
6938
            tcg_gen_andi_tl(t1, t1, 3);
6939
            tcg_gen_movi_tl(t2, 0);
6940
            label1 = gen_new_label();
6941
            tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
6942
            tcg_gen_andi_tl(t0, t0, ~3);
6943
            tcg_gen_or_tl(t0, t0, t1);
6944
            tcg_gen_movi_tl(t2, CC_Z);
6945
            gen_set_label(label1);
6946
            if (mod != 3) {
6947
                gen_op_st_v(ot + s->mem_index, t0, cpu_A0);
6948
            } else {
6949
                gen_op_mov_reg_v(ot, rm, t0);
6950
            }
6951
            if (s->cc_op != CC_OP_DYNAMIC)
6952
                gen_op_set_cc_op(s->cc_op);
6953
            gen_compute_eflags(cpu_cc_src);
6954
            tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
6955
            tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
6956
            s->cc_op = CC_OP_EFLAGS;
6957
            tcg_temp_free(t0);
6958
            tcg_temp_free(t1);
6959
            tcg_temp_free(t2);
6960
        }
6961
        break;
6962
    case 0x102: /* lar */
6963
    case 0x103: /* lsl */
6964
        {
6965
            int label1;
6966
            TCGv t0;
6967
            if (!s->pe || s->vm86)
6968
                goto illegal_op;
6969
            ot = dflag ? OT_LONG : OT_WORD;
6970
            modrm = ldub_code(s->pc++);
6971
            reg = ((modrm >> 3) & 7) | rex_r;
6972
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6973
            t0 = tcg_temp_local_new(TCG_TYPE_TL);
6974
            if (s->cc_op != CC_OP_DYNAMIC)
6975
                gen_op_set_cc_op(s->cc_op);
6976
            if (b == 0x102)
6977
                tcg_gen_helper_1_1(helper_lar, t0, cpu_T[0]);
6978
            else
6979
                tcg_gen_helper_1_1(helper_lsl, t0, cpu_T[0]);
6980
            tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
6981
            label1 = gen_new_label();
6982
            tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
6983
            gen_op_mov_reg_v(ot, reg, t0);
6984
            gen_set_label(label1);
6985
            s->cc_op = CC_OP_EFLAGS;
6986
            tcg_temp_free(t0);
6987
        }
6988
        break;
6989
    case 0x118:
6990
        modrm = ldub_code(s->pc++);
6991
        mod = (modrm >> 6) & 3;
6992
        op = (modrm >> 3) & 7;
6993
        switch(op) {
6994
        case 0: /* prefetchnta */
6995
        case 1: /* prefetchnt0 */
6996
        case 2: /* prefetchnt0 */
6997
        case 3: /* prefetchnt0 */
6998
            if (mod == 3)
6999
                goto illegal_op;
7000
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7001
            /* nothing more to do */
7002
            break;
7003
        default: /* nop (multi byte) */
7004
            gen_nop_modrm(s, modrm);
7005
            break;
7006
        }
7007
        break;
7008
    case 0x119 ... 0x11f: /* nop (multi byte) */
7009
        modrm = ldub_code(s->pc++);
7010
        gen_nop_modrm(s, modrm);
7011
        break;
7012
    case 0x120: /* mov reg, crN */
7013
    case 0x122: /* mov crN, reg */
7014
        if (s->cpl != 0) {
7015
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7016
        } else {
7017
            modrm = ldub_code(s->pc++);
7018
            if ((modrm & 0xc0) != 0xc0)
7019
                goto illegal_op;
7020
            rm = (modrm & 7) | REX_B(s);
7021
            reg = ((modrm >> 3) & 7) | rex_r;
7022
            if (CODE64(s))
7023
                ot = OT_QUAD;
7024
            else
7025
                ot = OT_LONG;
7026
            switch(reg) {
7027
            case 0:
7028
            case 2:
7029
            case 3:
7030
            case 4:
7031
            case 8:
7032
                if (s->cc_op != CC_OP_DYNAMIC)
7033
                    gen_op_set_cc_op(s->cc_op);
7034
                gen_jmp_im(pc_start - s->cs_base);
7035
                if (b & 2) {
7036
                    gen_op_mov_TN_reg(ot, 0, rm);
7037
                    tcg_gen_helper_0_2(helper_write_crN, 
7038
                                       tcg_const_i32(reg), cpu_T[0]);
7039
                    gen_jmp_im(s->pc - s->cs_base);
7040
                    gen_eob(s);
7041
                } else {
7042
                    tcg_gen_helper_1_1(helper_read_crN, 
7043
                                       cpu_T[0], tcg_const_i32(reg));
7044
                    gen_op_mov_reg_T0(ot, rm);
7045
                }
7046
                break;
7047
            default:
7048
                goto illegal_op;
7049
            }
7050
        }
7051
        break;
7052
    case 0x121: /* mov reg, drN */
7053
    case 0x123: /* mov drN, reg */
7054
        if (s->cpl != 0) {
7055
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7056
        } else {
7057
            modrm = ldub_code(s->pc++);
7058
            if ((modrm & 0xc0) != 0xc0)
7059
                goto illegal_op;
7060
            rm = (modrm & 7) | REX_B(s);
7061
            reg = ((modrm >> 3) & 7) | rex_r;
7062
            if (CODE64(s))
7063
                ot = OT_QUAD;
7064
            else
7065
                ot = OT_LONG;
7066
            /* XXX: do it dynamically with CR4.DE bit */
7067
            if (reg == 4 || reg == 5 || reg >= 8)
7068
                goto illegal_op;
7069
            if (b & 2) {
7070
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
7071
                gen_op_mov_TN_reg(ot, 0, rm);
7072
                tcg_gen_helper_0_2(helper_movl_drN_T0,
7073
                                   tcg_const_i32(reg), cpu_T[0]);
7074
                gen_jmp_im(s->pc - s->cs_base);
7075
                gen_eob(s);
7076
            } else {
7077
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
7078
                tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
7079
                gen_op_mov_reg_T0(ot, rm);
7080
            }
7081
        }
7082
        break;
7083
    case 0x106: /* clts */
7084
        if (s->cpl != 0) {
7085
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7086
        } else {
7087
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7088
            tcg_gen_helper_0_0(helper_clts);
7089
            /* abort block because static cpu state changed */
7090
            gen_jmp_im(s->pc - s->cs_base);
7091
            gen_eob(s);
7092
        }
7093
        break;
7094
    /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3 support */
7095
    case 0x1c3: /* MOVNTI reg, mem */
7096
        if (!(s->cpuid_features & CPUID_SSE2))
7097
            goto illegal_op;
7098
        ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
7099
        modrm = ldub_code(s->pc++);
7100
        mod = (modrm >> 6) & 3;
7101
        if (mod == 3)
7102
            goto illegal_op;
7103
        reg = ((modrm >> 3) & 7) | rex_r;
7104
        /* generate a generic store */
7105
        gen_ldst_modrm(s, modrm, ot, reg, 1);
7106
        break;
7107
    case 0x1ae:
7108
        modrm = ldub_code(s->pc++);
7109
        mod = (modrm >> 6) & 3;
7110
        op = (modrm >> 3) & 7;
7111
        switch(op) {
7112
        case 0: /* fxsave */
7113
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
7114
                (s->flags & HF_EM_MASK))
7115
                goto illegal_op;
7116
            if (s->flags & HF_TS_MASK) {
7117
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7118
                break;
7119
            }
7120
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7121
            if (s->cc_op != CC_OP_DYNAMIC)
7122
                gen_op_set_cc_op(s->cc_op);
7123
            gen_jmp_im(pc_start - s->cs_base);
7124
            tcg_gen_helper_0_2(helper_fxsave, 
7125
                               cpu_A0, tcg_const_i32((s->dflag == 2)));
7126
            break;
7127
        case 1: /* fxrstor */
7128
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
7129
                (s->flags & HF_EM_MASK))
7130
                goto illegal_op;
7131
            if (s->flags & HF_TS_MASK) {
7132
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7133
                break;
7134
            }
7135
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7136
            if (s->cc_op != CC_OP_DYNAMIC)
7137
                gen_op_set_cc_op(s->cc_op);
7138
            gen_jmp_im(pc_start - s->cs_base);
7139
            tcg_gen_helper_0_2(helper_fxrstor,
7140
                               cpu_A0, tcg_const_i32((s->dflag == 2)));
7141
            break;
7142
        case 2: /* ldmxcsr */
7143
        case 3: /* stmxcsr */
7144
            if (s->flags & HF_TS_MASK) {
7145
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7146
                break;
7147
            }
7148
            if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
7149
                mod == 3)
7150
                goto illegal_op;
7151
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7152
            if (op == 2) {
7153
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7154
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
7155
            } else {
7156
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
7157
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
7158
            }
7159
            break;
7160
        case 5: /* lfence */
7161
        case 6: /* mfence */
7162
            if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
7163
                goto illegal_op;
7164
            break;
7165
        case 7: /* sfence / clflush */
7166
            if ((modrm & 0xc7) == 0xc0) {
7167
                /* sfence */
7168
                /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
7169
                if (!(s->cpuid_features & CPUID_SSE))
7170
                    goto illegal_op;
7171
            } else {
7172
                /* clflush */
7173
                if (!(s->cpuid_features & CPUID_CLFLUSH))
7174
                    goto illegal_op;
7175
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7176
            }
7177
            break;
7178
        default:
7179
            goto illegal_op;
7180
        }
7181
        break;
7182
    case 0x10d: /* 3DNow! prefetch(w) */
7183
        modrm = ldub_code(s->pc++);
7184
        mod = (modrm >> 6) & 3;
7185
        if (mod == 3)
7186
            goto illegal_op;
7187
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7188
        /* ignore for now */
7189
        break;
7190
    case 0x1aa: /* rsm */
7191
        gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
7192
        if (!(s->flags & HF_SMM_MASK))
7193
            goto illegal_op;
7194
        if (s->cc_op != CC_OP_DYNAMIC) {
7195
            gen_op_set_cc_op(s->cc_op);
7196
            s->cc_op = CC_OP_DYNAMIC;
7197
        }
7198
        gen_jmp_im(s->pc - s->cs_base);
7199
        tcg_gen_helper_0_0(helper_rsm);
7200
        gen_eob(s);
7201
        break;
7202
    case 0x10e ... 0x10f:
7203
        /* 3DNow! instructions, ignore prefixes */
7204
        s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
7205
    case 0x110 ... 0x117:
7206
    case 0x128 ... 0x12f:
7207
    case 0x138 ... 0x13a:
7208
    case 0x150 ... 0x177:
7209
    case 0x17c ... 0x17f:
7210
    case 0x1c2:
7211
    case 0x1c4 ... 0x1c6:
7212
    case 0x1d0 ... 0x1fe:
7213
        gen_sse(s, b, pc_start, rex_r);
7214
        break;
7215
    default:
7216
        goto illegal_op;
7217
    }
7218
    /* lock generation */
7219
    if (s->prefix & PREFIX_LOCK)
7220
        tcg_gen_helper_0_0(helper_unlock);
7221
    return s->pc;
7222
 illegal_op:
7223
    if (s->prefix & PREFIX_LOCK)
7224
        tcg_gen_helper_0_0(helper_unlock);
7225
    /* XXX: ensure that no lock was generated */
7226
    gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
7227
    return s->pc;
7228
}
7229

    
7230
void optimize_flags_init(void)
7231
{
7232
#if TCG_TARGET_REG_BITS == 32
7233
    assert(sizeof(CCTable) == (1 << 3));
7234
#else
7235
    assert(sizeof(CCTable) == (1 << 4));
7236
#endif
7237
    cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
7238
    cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
7239
                                   TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
7240
    cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
7241
                                    TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
7242
    cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
7243
                                    TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
7244
    cpu_cc_tmp = tcg_global_mem_new(TCG_TYPE_TL,
7245
                                    TCG_AREG0, offsetof(CPUState, cc_tmp), "cc_tmp");
7246

    
7247
    /* register helpers */
7248

    
7249
#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
7250
#include "helper.h"
7251
}
7252

    
7253
/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
7254
   basic block 'tb'. If search_pc is TRUE, also generate PC
7255
   information for each intermediate instruction. */
7256
static inline void gen_intermediate_code_internal(CPUState *env,
7257
                                                  TranslationBlock *tb,
7258
                                                  int search_pc)
7259
{
7260
    DisasContext dc1, *dc = &dc1;
7261
    target_ulong pc_ptr;
7262
    uint16_t *gen_opc_end;
7263
    int j, lj, cflags;
7264
    uint64_t flags;
7265
    target_ulong pc_start;
7266
    target_ulong cs_base;
7267
    int num_insns;
7268
    int max_insns;
7269

    
7270
    /* generate intermediate code */
7271
    pc_start = tb->pc;
7272
    cs_base = tb->cs_base;
7273
    flags = tb->flags;
7274
    cflags = tb->cflags;
7275

    
7276
    dc->pe = (flags >> HF_PE_SHIFT) & 1;
7277
    dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
7278
    dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
7279
    dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
7280
    dc->f_st = 0;
7281
    dc->vm86 = (flags >> VM_SHIFT) & 1;
7282
    dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
7283
    dc->iopl = (flags >> IOPL_SHIFT) & 3;
7284
    dc->tf = (flags >> TF_SHIFT) & 1;
7285
    dc->singlestep_enabled = env->singlestep_enabled;
7286
    dc->cc_op = CC_OP_DYNAMIC;
7287
    dc->cs_base = cs_base;
7288
    dc->tb = tb;
7289
    dc->popl_esp_hack = 0;
7290
    /* select memory access functions */
7291
    dc->mem_index = 0;
7292
    if (flags & HF_SOFTMMU_MASK) {
7293
        if (dc->cpl == 3)
7294
            dc->mem_index = 2 * 4;
7295
        else
7296
            dc->mem_index = 1 * 4;
7297
    }
7298
    dc->cpuid_features = env->cpuid_features;
7299
    dc->cpuid_ext_features = env->cpuid_ext_features;
7300
    dc->cpuid_ext2_features = env->cpuid_ext2_features;
7301
    dc->cpuid_ext3_features = env->cpuid_ext3_features;
7302
#ifdef TARGET_X86_64
7303
    dc->lma = (flags >> HF_LMA_SHIFT) & 1;
7304
    dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
7305
#endif
7306
    dc->flags = flags;
7307
    dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
7308
                    (flags & HF_INHIBIT_IRQ_MASK)
7309
#ifndef CONFIG_SOFTMMU
7310
                    || (flags & HF_SOFTMMU_MASK)
7311
#endif
7312
                    );
7313
#if 0
7314
    /* check addseg logic */
7315
    if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
7316
        printf("ERROR addseg\n");
7317
#endif
7318

    
7319
    cpu_T[0] = tcg_temp_new(TCG_TYPE_TL);
7320
    cpu_T[1] = tcg_temp_new(TCG_TYPE_TL);
7321
    cpu_A0 = tcg_temp_new(TCG_TYPE_TL);
7322
    cpu_T3 = tcg_temp_new(TCG_TYPE_TL);
7323

    
7324
    cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
7325
    cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
7326
    cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
7327
    cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
7328
    cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
7329
    cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
7330
    cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
7331
    cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
7332
    cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
7333

    
7334
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
7335

    
7336
    dc->is_jmp = DISAS_NEXT;
7337
    pc_ptr = pc_start;
7338
    lj = -1;
7339
    num_insns = 0;
7340
    max_insns = tb->cflags & CF_COUNT_MASK;
7341
    if (max_insns == 0)
7342
        max_insns = CF_COUNT_MASK;
7343

    
7344
    gen_icount_start();
7345
    for(;;) {
7346
        if (env->nb_breakpoints > 0) {
7347
            for(j = 0; j < env->nb_breakpoints; j++) {
7348
                if (env->breakpoints[j] == pc_ptr) {
7349
                    gen_debug(dc, pc_ptr - dc->cs_base);
7350
                    break;
7351
                }
7352
            }
7353
        }
7354
        if (search_pc) {
7355
            j = gen_opc_ptr - gen_opc_buf;
7356
            if (lj < j) {
7357
                lj++;
7358
                while (lj < j)
7359
                    gen_opc_instr_start[lj++] = 0;
7360
            }
7361
            gen_opc_pc[lj] = pc_ptr;
7362
            gen_opc_cc_op[lj] = dc->cc_op;
7363
            gen_opc_instr_start[lj] = 1;
7364
            gen_opc_icount[lj] = num_insns;
7365
        }
7366
        if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
7367
            gen_io_start();
7368

    
7369
        pc_ptr = disas_insn(dc, pc_ptr);
7370
        num_insns++;
7371
        /* stop translation if indicated */
7372
        if (dc->is_jmp)
7373
            break;
7374
        /* if single step mode, we generate only one instruction and
7375
           generate an exception */
7376
        /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
7377
           the flag and abort the translation to give the irqs a
7378
           change to be happen */
7379
        if (dc->tf || dc->singlestep_enabled ||
7380
            (flags & HF_INHIBIT_IRQ_MASK)) {
7381
            gen_jmp_im(pc_ptr - dc->cs_base);
7382
            gen_eob(dc);
7383
            break;
7384
        }
7385
        /* if too long translation, stop generation too */
7386
        if (gen_opc_ptr >= gen_opc_end ||
7387
            (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
7388
            num_insns >= max_insns) {
7389
            gen_jmp_im(pc_ptr - dc->cs_base);
7390
            gen_eob(dc);
7391
            break;
7392
        }
7393
    }
7394
    if (tb->cflags & CF_LAST_IO)
7395
        gen_io_end();
7396
    gen_icount_end(tb, num_insns);
7397
    *gen_opc_ptr = INDEX_op_end;
7398
    /* we don't forget to fill the last values */
7399
    if (search_pc) {
7400
        j = gen_opc_ptr - gen_opc_buf;
7401
        lj++;
7402
        while (lj <= j)
7403
            gen_opc_instr_start[lj++] = 0;
7404
    }
7405

    
7406
#ifdef DEBUG_DISAS
7407
    if (loglevel & CPU_LOG_TB_CPU) {
7408
        cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
7409
    }
7410
    if (loglevel & CPU_LOG_TB_IN_ASM) {
7411
        int disas_flags;
7412
        fprintf(logfile, "----------------\n");
7413
        fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
7414
#ifdef TARGET_X86_64
7415
        if (dc->code64)
7416
            disas_flags = 2;
7417
        else
7418
#endif
7419
            disas_flags = !dc->code32;
7420
        target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
7421
        fprintf(logfile, "\n");
7422
    }
7423
#endif
7424

    
7425
    if (!search_pc) {
7426
        tb->size = pc_ptr - pc_start;
7427
        tb->icount = num_insns;
7428
    }
7429
}
7430

    
7431
void gen_intermediate_code(CPUState *env, TranslationBlock *tb)
7432
{
7433
    gen_intermediate_code_internal(env, tb, 0);
7434
}
7435

    
7436
void gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
7437
{
7438
    gen_intermediate_code_internal(env, tb, 1);
7439
}
7440

    
7441
void gen_pc_load(CPUState *env, TranslationBlock *tb,
7442
                unsigned long searched_pc, int pc_pos, void *puc)
7443
{
7444
    int cc_op;
7445
#ifdef DEBUG_DISAS
7446
    if (loglevel & CPU_LOG_TB_OP) {
7447
        int i;
7448
        fprintf(logfile, "RESTORE:\n");
7449
        for(i = 0;i <= pc_pos; i++) {
7450
            if (gen_opc_instr_start[i]) {
7451
                fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7452
            }
7453
        }
7454
        fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7455
                searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7456
                (uint32_t)tb->cs_base);
7457
    }
7458
#endif
7459
    env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7460
    cc_op = gen_opc_cc_op[pc_pos];
7461
    if (cc_op != CC_OP_DYNAMIC)
7462
        env->cc_op = cc_op;
7463
}