Statistics
| Branch: | Revision:

root / target-i386 / translate.c @ f484d386

History | View | Annotate | Download (220.1 kB)

1
/*
2
 *  i386 translation
3
 *
4
 *  Copyright (c) 2003 Fabrice Bellard
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20
#include <stdarg.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23
#include <string.h>
24
#include <inttypes.h>
25
#include <signal.h>
26
#include <assert.h>
27

    
28
#include "cpu.h"
29
#include "exec-all.h"
30
#include "disas.h"
31
#include "helper.h"
32
#include "tcg-op.h"
33

    
34
#define PREFIX_REPZ   0x01
35
#define PREFIX_REPNZ  0x02
36
#define PREFIX_LOCK   0x04
37
#define PREFIX_DATA   0x08
38
#define PREFIX_ADR    0x10
39

    
40
#ifdef TARGET_X86_64
41
#define X86_64_ONLY(x) x
42
#define X86_64_DEF(x...) x
43
#define CODE64(s) ((s)->code64)
44
#define REX_X(s) ((s)->rex_x)
45
#define REX_B(s) ((s)->rex_b)
46
/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
47
#if 1
48
#define BUGGY_64(x) NULL
49
#endif
50
#else
51
#define X86_64_ONLY(x) NULL
52
#define X86_64_DEF(x...)
53
#define CODE64(s) 0
54
#define REX_X(s) 0
55
#define REX_B(s) 0
56
#endif
57

    
58
//#define MACRO_TEST   1
59

    
60
/* global register indexes */
61
static TCGv cpu_env, cpu_T[2], cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst;
62
static TCGv cpu_T3;
63
/* local register indexes (only used inside old micro ops) */
64
static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
65
static TCGv cpu_tmp5, cpu_tmp6;
66

    
67
#ifdef TARGET_X86_64
68
static int x86_64_hregs;
69
#endif
70

    
71
typedef struct DisasContext {
72
    /* current insn context */
73
    int override; /* -1 if no override */
74
    int prefix;
75
    int aflag, dflag;
76
    target_ulong pc; /* pc = eip + cs_base */
77
    int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
78
                   static state change (stop translation) */
79
    /* current block context */
80
    target_ulong cs_base; /* base of CS segment */
81
    int pe;     /* protected mode */
82
    int code32; /* 32 bit code segment */
83
#ifdef TARGET_X86_64
84
    int lma;    /* long mode active */
85
    int code64; /* 64 bit code segment */
86
    int rex_x, rex_b;
87
#endif
88
    int ss32;   /* 32 bit stack segment */
89
    int cc_op;  /* current CC operation */
90
    int addseg; /* non zero if either DS/ES/SS have a non zero base */
91
    int f_st;   /* currently unused */
92
    int vm86;   /* vm86 mode */
93
    int cpl;
94
    int iopl;
95
    int tf;     /* TF cpu flag */
96
    int singlestep_enabled; /* "hardware" single step enabled */
97
    int jmp_opt; /* use direct block chaining for direct jumps */
98
    int mem_index; /* select memory access functions */
99
    uint64_t flags; /* all execution flags */
100
    struct TranslationBlock *tb;
101
    int popl_esp_hack; /* for correct popl with esp base handling */
102
    int rip_offset; /* only used in x86_64, but left for simplicity */
103
    int cpuid_features;
104
    int cpuid_ext_features;
105
    int cpuid_ext2_features;
106
} DisasContext;
107

    
108
static void gen_eob(DisasContext *s);
109
static void gen_jmp(DisasContext *s, target_ulong eip);
110
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
111

    
112
/* i386 arith/logic operations */
113
enum {
114
    OP_ADDL,
115
    OP_ORL,
116
    OP_ADCL,
117
    OP_SBBL,
118
    OP_ANDL,
119
    OP_SUBL,
120
    OP_XORL,
121
    OP_CMPL,
122
};
123

    
124
/* i386 shift ops */
125
enum {
126
    OP_ROL,
127
    OP_ROR,
128
    OP_RCL,
129
    OP_RCR,
130
    OP_SHL,
131
    OP_SHR,
132
    OP_SHL1, /* undocumented */
133
    OP_SAR = 7,
134
};
135

    
136
/* operand size */
137
enum {
138
    OT_BYTE = 0,
139
    OT_WORD,
140
    OT_LONG,
141
    OT_QUAD,
142
};
143

    
144
enum {
145
    /* I386 int registers */
146
    OR_EAX,   /* MUST be even numbered */
147
    OR_ECX,
148
    OR_EDX,
149
    OR_EBX,
150
    OR_ESP,
151
    OR_EBP,
152
    OR_ESI,
153
    OR_EDI,
154

    
155
    OR_TMP0 = 16,    /* temporary operand register */
156
    OR_TMP1,
157
    OR_A0, /* temporary register used when doing address evaluation */
158
};
159

    
160
static inline void gen_op_movl_T0_0(void)
161
{
162
    tcg_gen_movi_tl(cpu_T[0], 0);
163
}
164

    
165
static inline void gen_op_movl_T0_im(int32_t val)
166
{
167
    tcg_gen_movi_tl(cpu_T[0], val);
168
}
169

    
170
static inline void gen_op_movl_T0_imu(uint32_t val)
171
{
172
    tcg_gen_movi_tl(cpu_T[0], val);
173
}
174

    
175
static inline void gen_op_movl_T1_im(int32_t val)
176
{
177
    tcg_gen_movi_tl(cpu_T[1], val);
178
}
179

    
180
static inline void gen_op_movl_T1_imu(uint32_t val)
181
{
182
    tcg_gen_movi_tl(cpu_T[1], val);
183
}
184

    
185
static inline void gen_op_movl_A0_im(uint32_t val)
186
{
187
    tcg_gen_movi_tl(cpu_A0, val);
188
}
189

    
190
#ifdef TARGET_X86_64
191
static inline void gen_op_movq_A0_im(int64_t val)
192
{
193
    tcg_gen_movi_tl(cpu_A0, val);
194
}
195
#endif
196

    
197
static inline void gen_movtl_T0_im(target_ulong val)
198
{
199
    tcg_gen_movi_tl(cpu_T[0], val);
200
}
201

    
202
static inline void gen_movtl_T1_im(target_ulong val)
203
{
204
    tcg_gen_movi_tl(cpu_T[1], val);
205
}
206

    
207
static inline void gen_op_andl_T0_ffff(void)
208
{
209
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
210
}
211

    
212
static inline void gen_op_andl_T0_im(uint32_t val)
213
{
214
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
215
}
216

    
217
static inline void gen_op_movl_T0_T1(void)
218
{
219
    tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
220
}
221

    
222
static inline void gen_op_andl_A0_ffff(void)
223
{
224
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
225
}
226

    
227
#ifdef TARGET_X86_64
228

    
229
#define NB_OP_SIZES 4
230

    
231
#define DEF_REGS(prefix, suffix) \
232
  prefix ## EAX ## suffix,\
233
  prefix ## ECX ## suffix,\
234
  prefix ## EDX ## suffix,\
235
  prefix ## EBX ## suffix,\
236
  prefix ## ESP ## suffix,\
237
  prefix ## EBP ## suffix,\
238
  prefix ## ESI ## suffix,\
239
  prefix ## EDI ## suffix,\
240
  prefix ## R8 ## suffix,\
241
  prefix ## R9 ## suffix,\
242
  prefix ## R10 ## suffix,\
243
  prefix ## R11 ## suffix,\
244
  prefix ## R12 ## suffix,\
245
  prefix ## R13 ## suffix,\
246
  prefix ## R14 ## suffix,\
247
  prefix ## R15 ## suffix,
248

    
249
#else /* !TARGET_X86_64 */
250

    
251
#define NB_OP_SIZES 3
252

    
253
#define DEF_REGS(prefix, suffix) \
254
  prefix ## EAX ## suffix,\
255
  prefix ## ECX ## suffix,\
256
  prefix ## EDX ## suffix,\
257
  prefix ## EBX ## suffix,\
258
  prefix ## ESP ## suffix,\
259
  prefix ## EBP ## suffix,\
260
  prefix ## ESI ## suffix,\
261
  prefix ## EDI ## suffix,
262

    
263
#endif /* !TARGET_X86_64 */
264

    
265
#if defined(WORDS_BIGENDIAN)
266
#define REG_B_OFFSET (sizeof(target_ulong) - 1)
267
#define REG_H_OFFSET (sizeof(target_ulong) - 2)
268
#define REG_W_OFFSET (sizeof(target_ulong) - 2)
269
#define REG_L_OFFSET (sizeof(target_ulong) - 4)
270
#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
271
#else
272
#define REG_B_OFFSET 0
273
#define REG_H_OFFSET 1
274
#define REG_W_OFFSET 0
275
#define REG_L_OFFSET 0
276
#define REG_LH_OFFSET 4
277
#endif
278

    
279
static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
280
{
281
    switch(ot) {
282
    case OT_BYTE:
283
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
284
            tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
285
        } else {
286
            tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
287
        }
288
        break;
289
    case OT_WORD:
290
        tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
291
        break;
292
#ifdef TARGET_X86_64
293
    case OT_LONG:
294
        tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
295
        /* high part of register set to zero */
296
        tcg_gen_movi_tl(cpu_tmp0, 0);
297
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
298
        break;
299
    default:
300
    case OT_QUAD:
301
        tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
302
        break;
303
#else
304
    default:
305
    case OT_LONG:
306
        tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
307
        break;
308
#endif
309
    }
310
}
311

    
312
static inline void gen_op_mov_reg_T0(int ot, int reg)
313
{
314
    gen_op_mov_reg_TN(ot, 0, reg);
315
}
316

    
317
static inline void gen_op_mov_reg_T1(int ot, int reg)
318
{
319
    gen_op_mov_reg_TN(ot, 1, reg);
320
}
321

    
322
static inline void gen_op_mov_reg_A0(int size, int reg)
323
{
324
    switch(size) {
325
    case 0:
326
        tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
327
        break;
328
#ifdef TARGET_X86_64
329
    case 1:
330
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
331
        /* high part of register set to zero */
332
        tcg_gen_movi_tl(cpu_tmp0, 0);
333
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
334
        break;
335
    default:
336
    case 2:
337
        tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
338
        break;
339
#else
340
    default:
341
    case 1:
342
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
343
        break;
344
#endif
345
    }
346
}
347

    
348
static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
349
{
350
    switch(ot) {
351
    case OT_BYTE:
352
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
353
            goto std_case;
354
        } else {
355
            tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
356
        }
357
        break;
358
    default:
359
    std_case:
360
        tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
361
        break;
362
    }
363
}
364

    
365
static inline void gen_op_movl_A0_reg(int reg)
366
{
367
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
368
}
369

    
370
static inline void gen_op_addl_A0_im(int32_t val)
371
{
372
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
373
#ifdef TARGET_X86_64
374
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
375
#endif
376
}
377

    
378
#ifdef TARGET_X86_64
379
static inline void gen_op_addq_A0_im(int64_t val)
380
{
381
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
382
}
383
#endif
384
    
385
static void gen_add_A0_im(DisasContext *s, int val)
386
{
387
#ifdef TARGET_X86_64
388
    if (CODE64(s))
389
        gen_op_addq_A0_im(val);
390
    else
391
#endif
392
        gen_op_addl_A0_im(val);
393
}
394

    
395
static inline void gen_op_addl_T0_T1(void)
396
{
397
    tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
398
}
399

    
400
static inline void gen_op_jmp_T0(void)
401
{
402
    tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
403
}
404

    
405
static inline void gen_op_addw_ESP_im(int32_t val)
406
{
407
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
408
    tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
409
    tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]) + REG_W_OFFSET);
410
}
411

    
412
static inline void gen_op_addl_ESP_im(int32_t val)
413
{
414
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
415
    tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
416
#ifdef TARGET_X86_64
417
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
418
#endif
419
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
420
}
421

    
422
#ifdef TARGET_X86_64
423
static inline void gen_op_addq_ESP_im(int32_t val)
424
{
425
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
426
    tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
427
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
428
}
429
#endif
430

    
431
static inline void gen_op_set_cc_op(int32_t val)
432
{
433
    tcg_gen_movi_i32(cpu_cc_op, val);
434
}
435

    
436
static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
437
{
438
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
439
    if (shift != 0) 
440
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
441
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
442
#ifdef TARGET_X86_64
443
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
444
#endif
445
}
446

    
447
static inline void gen_op_movl_A0_seg(int reg)
448
{
449
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
450
}
451

    
452
static inline void gen_op_addl_A0_seg(int reg)
453
{
454
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
455
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
456
#ifdef TARGET_X86_64
457
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
458
#endif
459
}
460

    
461
#ifdef TARGET_X86_64
462
static inline void gen_op_movq_A0_seg(int reg)
463
{
464
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
465
}
466

    
467
static inline void gen_op_addq_A0_seg(int reg)
468
{
469
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
470
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
471
}
472

    
473
static inline void gen_op_movq_A0_reg(int reg)
474
{
475
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
476
}
477

    
478
static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
479
{
480
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
481
    if (shift != 0) 
482
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
483
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
484
}
485
#endif
486

    
487
static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
488
    [0] = {
489
        DEF_REGS(gen_op_cmovw_, _T1_T0)
490
    },
491
    [1] = {
492
        DEF_REGS(gen_op_cmovl_, _T1_T0)
493
    },
494
#ifdef TARGET_X86_64
495
    [2] = {
496
        DEF_REGS(gen_op_cmovq_, _T1_T0)
497
    },
498
#endif
499
};
500

    
501
static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
502
    [0] = {
503
        gen_op_bsfw_T0_cc,
504
        gen_op_bsrw_T0_cc,
505
    },
506
    [1] = {
507
        gen_op_bsfl_T0_cc,
508
        gen_op_bsrl_T0_cc,
509
    },
510
#ifdef TARGET_X86_64
511
    [2] = {
512
        gen_op_bsfq_T0_cc,
513
        gen_op_bsrq_T0_cc,
514
    },
515
#endif
516
};
517

    
518
static inline void gen_op_lds_T0_A0(int idx)
519
{
520
    int mem_index = (idx >> 2) - 1;
521
    switch(idx & 3) {
522
    case 0:
523
        tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
524
        break;
525
    case 1:
526
        tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
527
        break;
528
    default:
529
    case 2:
530
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
531
        break;
532
    }
533
}
534

    
535
/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
536
static inline void gen_op_ld_T0_A0(int idx)
537
{
538
    int mem_index = (idx >> 2) - 1;
539
    switch(idx & 3) {
540
    case 0:
541
        tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
542
        break;
543
    case 1:
544
        tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
545
        break;
546
    case 2:
547
        tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
548
        break;
549
    default:
550
    case 3:
551
        tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
552
        break;
553
    }
554
}
555

    
556
static inline void gen_op_ldu_T0_A0(int idx)
557
{
558
    gen_op_ld_T0_A0(idx);
559
}
560

    
561
static inline void gen_op_ld_T1_A0(int idx)
562
{
563
    int mem_index = (idx >> 2) - 1;
564
    switch(idx & 3) {
565
    case 0:
566
        tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
567
        break;
568
    case 1:
569
        tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
570
        break;
571
    case 2:
572
        tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
573
        break;
574
    default:
575
    case 3:
576
        tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
577
        break;
578
    }
579
}
580

    
581
static inline void gen_op_st_T0_A0(int idx)
582
{
583
    int mem_index = (idx >> 2) - 1;
584
    switch(idx & 3) {
585
    case 0:
586
        tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
587
        break;
588
    case 1:
589
        tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
590
        break;
591
    case 2:
592
        tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
593
        break;
594
    default:
595
    case 3:
596
        tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
597
        break;
598
    }
599
}
600

    
601
static inline void gen_op_st_T1_A0(int idx)
602
{
603
    int mem_index = (idx >> 2) - 1;
604
    switch(idx & 3) {
605
    case 0:
606
        tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
607
        break;
608
    case 1:
609
        tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
610
        break;
611
    case 2:
612
        tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
613
        break;
614
    default:
615
    case 3:
616
        tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
617
        break;
618
    }
619
}
620

    
621
static inline void gen_jmp_im(target_ulong pc)
622
{
623
    tcg_gen_movi_tl(cpu_tmp0, pc);
624
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
625
}
626

    
627
static inline void gen_string_movl_A0_ESI(DisasContext *s)
628
{
629
    int override;
630

    
631
    override = s->override;
632
#ifdef TARGET_X86_64
633
    if (s->aflag == 2) {
634
        if (override >= 0) {
635
            gen_op_movq_A0_seg(override);
636
            gen_op_addq_A0_reg_sN(0, R_ESI);
637
        } else {
638
            gen_op_movq_A0_reg(R_ESI);
639
        }
640
    } else
641
#endif
642
    if (s->aflag) {
643
        /* 32 bit address */
644
        if (s->addseg && override < 0)
645
            override = R_DS;
646
        if (override >= 0) {
647
            gen_op_movl_A0_seg(override);
648
            gen_op_addl_A0_reg_sN(0, R_ESI);
649
        } else {
650
            gen_op_movl_A0_reg(R_ESI);
651
        }
652
    } else {
653
        /* 16 address, always override */
654
        if (override < 0)
655
            override = R_DS;
656
        gen_op_movl_A0_reg(R_ESI);
657
        gen_op_andl_A0_ffff();
658
        gen_op_addl_A0_seg(override);
659
    }
660
}
661

    
662
static inline void gen_string_movl_A0_EDI(DisasContext *s)
663
{
664
#ifdef TARGET_X86_64
665
    if (s->aflag == 2) {
666
        gen_op_movq_A0_reg(R_EDI);
667
    } else
668
#endif
669
    if (s->aflag) {
670
        if (s->addseg) {
671
            gen_op_movl_A0_seg(R_ES);
672
            gen_op_addl_A0_reg_sN(0, R_EDI);
673
        } else {
674
            gen_op_movl_A0_reg(R_EDI);
675
        }
676
    } else {
677
        gen_op_movl_A0_reg(R_EDI);
678
        gen_op_andl_A0_ffff();
679
        gen_op_addl_A0_seg(R_ES);
680
    }
681
}
682

    
683
static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
684
    gen_op_movl_T0_Dshiftb,
685
    gen_op_movl_T0_Dshiftw,
686
    gen_op_movl_T0_Dshiftl,
687
    X86_64_ONLY(gen_op_movl_T0_Dshiftq),
688
};
689

    
690
static GenOpFunc1 *gen_op_jnz_ecx[3] = {
691
    gen_op_jnz_ecxw,
692
    gen_op_jnz_ecxl,
693
    X86_64_ONLY(gen_op_jnz_ecxq),
694
};
695

    
696
static GenOpFunc1 *gen_op_jz_ecx[3] = {
697
    gen_op_jz_ecxw,
698
    gen_op_jz_ecxl,
699
    X86_64_ONLY(gen_op_jz_ecxq),
700
};
701

    
702
static GenOpFunc *gen_op_dec_ECX[3] = {
703
    gen_op_decw_ECX,
704
    gen_op_decl_ECX,
705
    X86_64_ONLY(gen_op_decq_ECX),
706
};
707

    
708
static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
709
    {
710
        gen_op_jnz_subb,
711
        gen_op_jnz_subw,
712
        gen_op_jnz_subl,
713
        X86_64_ONLY(gen_op_jnz_subq),
714
    },
715
    {
716
        gen_op_jz_subb,
717
        gen_op_jz_subw,
718
        gen_op_jz_subl,
719
        X86_64_ONLY(gen_op_jz_subq),
720
    },
721
};
722

    
723
static void *helper_in_func[3] = {
724
    helper_inb,
725
    helper_inw,
726
    helper_inl,
727
};
728

    
729
static void *helper_out_func[3] = {
730
    helper_outb,
731
    helper_outw,
732
    helper_outl,
733
};
734

    
735
static void *gen_check_io_func[3] = {
736
    helper_check_iob,
737
    helper_check_iow,
738
    helper_check_iol,
739
};
740

    
741
static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
742
                         uint32_t svm_flags)
743
{
744
    int state_saved;
745
    target_ulong next_eip;
746

    
747
    state_saved = 0;
748
    if (s->pe && (s->cpl > s->iopl || s->vm86)) {
749
        if (s->cc_op != CC_OP_DYNAMIC)
750
            gen_op_set_cc_op(s->cc_op);
751
        gen_jmp_im(cur_eip);
752
        state_saved = 1;
753
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
754
        tcg_gen_helper_0_1(gen_check_io_func[ot],
755
                           cpu_tmp2_i32);
756
    }
757
    if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
758
        if (!state_saved) {
759
            if (s->cc_op != CC_OP_DYNAMIC)
760
                gen_op_set_cc_op(s->cc_op);
761
            gen_jmp_im(cur_eip);
762
            state_saved = 1;
763
        }
764
        svm_flags |= (1 << (4 + ot));
765
        next_eip = s->pc - s->cs_base;
766
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
767
        tcg_gen_helper_0_3(helper_svm_check_io,
768
                           cpu_tmp2_i32,
769
                           tcg_const_i32(svm_flags),
770
                           tcg_const_i32(next_eip - cur_eip));
771
    }
772
}
773

    
774
static inline void gen_movs(DisasContext *s, int ot)
775
{
776
    gen_string_movl_A0_ESI(s);
777
    gen_op_ld_T0_A0(ot + s->mem_index);
778
    gen_string_movl_A0_EDI(s);
779
    gen_op_st_T0_A0(ot + s->mem_index);
780
    gen_op_movl_T0_Dshift[ot]();
781
#ifdef TARGET_X86_64
782
    if (s->aflag == 2) {
783
        gen_op_addq_ESI_T0();
784
        gen_op_addq_EDI_T0();
785
    } else
786
#endif
787
    if (s->aflag) {
788
        gen_op_addl_ESI_T0();
789
        gen_op_addl_EDI_T0();
790
    } else {
791
        gen_op_addw_ESI_T0();
792
        gen_op_addw_EDI_T0();
793
    }
794
}
795

    
796
static inline void gen_update_cc_op(DisasContext *s)
797
{
798
    if (s->cc_op != CC_OP_DYNAMIC) {
799
        gen_op_set_cc_op(s->cc_op);
800
        s->cc_op = CC_OP_DYNAMIC;
801
    }
802
}
803

    
804
static void gen_op_update1_cc(void)
805
{
806
    tcg_gen_discard_tl(cpu_cc_src);
807
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
808
}
809

    
810
static void gen_op_update2_cc(void)
811
{
812
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
813
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
814
}
815

    
816
static inline void gen_op_cmpl_T0_T1_cc(void)
817
{
818
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
819
    tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
820
}
821

    
822
static inline void gen_op_testl_T0_T1_cc(void)
823
{
824
    tcg_gen_discard_tl(cpu_cc_src);
825
    tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
826
}
827

    
828
static void gen_op_update_neg_cc(void)
829
{
830
    tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
831
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
832
}
833

    
834
/* XXX: does not work with gdbstub "ice" single step - not a
835
   serious problem */
836
static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
837
{
838
    int l1, l2;
839

    
840
    l1 = gen_new_label();
841
    l2 = gen_new_label();
842
    gen_op_jnz_ecx[s->aflag](l1);
843
    gen_set_label(l2);
844
    gen_jmp_tb(s, next_eip, 1);
845
    gen_set_label(l1);
846
    return l2;
847
}
848

    
849
static inline void gen_stos(DisasContext *s, int ot)
850
{
851
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
852
    gen_string_movl_A0_EDI(s);
853
    gen_op_st_T0_A0(ot + s->mem_index);
854
    gen_op_movl_T0_Dshift[ot]();
855
#ifdef TARGET_X86_64
856
    if (s->aflag == 2) {
857
        gen_op_addq_EDI_T0();
858
    } else
859
#endif
860
    if (s->aflag) {
861
        gen_op_addl_EDI_T0();
862
    } else {
863
        gen_op_addw_EDI_T0();
864
    }
865
}
866

    
867
static inline void gen_lods(DisasContext *s, int ot)
868
{
869
    gen_string_movl_A0_ESI(s);
870
    gen_op_ld_T0_A0(ot + s->mem_index);
871
    gen_op_mov_reg_T0(ot, R_EAX);
872
    gen_op_movl_T0_Dshift[ot]();
873
#ifdef TARGET_X86_64
874
    if (s->aflag == 2) {
875
        gen_op_addq_ESI_T0();
876
    } else
877
#endif
878
    if (s->aflag) {
879
        gen_op_addl_ESI_T0();
880
    } else {
881
        gen_op_addw_ESI_T0();
882
    }
883
}
884

    
885
static inline void gen_scas(DisasContext *s, int ot)
886
{
887
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
888
    gen_string_movl_A0_EDI(s);
889
    gen_op_ld_T1_A0(ot + s->mem_index);
890
    gen_op_cmpl_T0_T1_cc();
891
    gen_op_movl_T0_Dshift[ot]();
892
#ifdef TARGET_X86_64
893
    if (s->aflag == 2) {
894
        gen_op_addq_EDI_T0();
895
    } else
896
#endif
897
    if (s->aflag) {
898
        gen_op_addl_EDI_T0();
899
    } else {
900
        gen_op_addw_EDI_T0();
901
    }
902
}
903

    
904
static inline void gen_cmps(DisasContext *s, int ot)
905
{
906
    gen_string_movl_A0_ESI(s);
907
    gen_op_ld_T0_A0(ot + s->mem_index);
908
    gen_string_movl_A0_EDI(s);
909
    gen_op_ld_T1_A0(ot + s->mem_index);
910
    gen_op_cmpl_T0_T1_cc();
911
    gen_op_movl_T0_Dshift[ot]();
912
#ifdef TARGET_X86_64
913
    if (s->aflag == 2) {
914
        gen_op_addq_ESI_T0();
915
        gen_op_addq_EDI_T0();
916
    } else
917
#endif
918
    if (s->aflag) {
919
        gen_op_addl_ESI_T0();
920
        gen_op_addl_EDI_T0();
921
    } else {
922
        gen_op_addw_ESI_T0();
923
        gen_op_addw_EDI_T0();
924
    }
925
}
926

    
927
static inline void gen_ins(DisasContext *s, int ot)
928
{
929
    gen_string_movl_A0_EDI(s);
930
    gen_op_movl_T0_0();
931
    gen_op_st_T0_A0(ot + s->mem_index);
932
    gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
933
    tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
934
    tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
935
    tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
936
    gen_op_st_T0_A0(ot + s->mem_index);
937
    gen_op_movl_T0_Dshift[ot]();
938
#ifdef TARGET_X86_64
939
    if (s->aflag == 2) {
940
        gen_op_addq_EDI_T0();
941
    } else
942
#endif
943
    if (s->aflag) {
944
        gen_op_addl_EDI_T0();
945
    } else {
946
        gen_op_addw_EDI_T0();
947
    }
948
}
949

    
950
static inline void gen_outs(DisasContext *s, int ot)
951
{
952
    gen_string_movl_A0_ESI(s);
953
    gen_op_ld_T0_A0(ot + s->mem_index);
954

    
955
    gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
956
    tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
957
    tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
958
    tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
959
    tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
960

    
961
    gen_op_movl_T0_Dshift[ot]();
962
#ifdef TARGET_X86_64
963
    if (s->aflag == 2) {
964
        gen_op_addq_ESI_T0();
965
    } else
966
#endif
967
    if (s->aflag) {
968
        gen_op_addl_ESI_T0();
969
    } else {
970
        gen_op_addw_ESI_T0();
971
    }
972
}
973

    
974
/* same method as Valgrind : we generate jumps to current or next
975
   instruction */
976
#define GEN_REPZ(op)                                                          \
977
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
978
                                 target_ulong cur_eip, target_ulong next_eip) \
979
{                                                                             \
980
    int l2;\
981
    gen_update_cc_op(s);                                                      \
982
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
983
    gen_ ## op(s, ot);                                                        \
984
    gen_op_dec_ECX[s->aflag]();                                               \
985
    /* a loop would cause two single step exceptions if ECX = 1               \
986
       before rep string_insn */                                              \
987
    if (!s->jmp_opt)                                                          \
988
        gen_op_jz_ecx[s->aflag](l2);                                          \
989
    gen_jmp(s, cur_eip);                                                      \
990
}
991

    
992
#define GEN_REPZ2(op)                                                         \
993
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
994
                                   target_ulong cur_eip,                      \
995
                                   target_ulong next_eip,                     \
996
                                   int nz)                                    \
997
{                                                                             \
998
    int l2;\
999
    gen_update_cc_op(s);                                                      \
1000
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1001
    gen_ ## op(s, ot);                                                        \
1002
    gen_op_dec_ECX[s->aflag]();                                               \
1003
    gen_op_set_cc_op(CC_OP_SUBB + ot);                                        \
1004
    gen_op_string_jnz_sub[nz][ot](l2);\
1005
    if (!s->jmp_opt)                                                          \
1006
        gen_op_jz_ecx[s->aflag](l2);                                          \
1007
    gen_jmp(s, cur_eip);                                                      \
1008
}
1009

    
1010
GEN_REPZ(movs)
1011
GEN_REPZ(stos)
1012
GEN_REPZ(lods)
1013
GEN_REPZ(ins)
1014
GEN_REPZ(outs)
1015
GEN_REPZ2(scas)
1016
GEN_REPZ2(cmps)
1017

    
1018
enum {
1019
    JCC_O,
1020
    JCC_B,
1021
    JCC_Z,
1022
    JCC_BE,
1023
    JCC_S,
1024
    JCC_P,
1025
    JCC_L,
1026
    JCC_LE,
1027
};
1028

    
1029
static GenOpFunc1 *gen_jcc_sub[4][8] = {
1030
    [OT_BYTE] = {
1031
        NULL,
1032
        gen_op_jb_subb,
1033
        gen_op_jz_subb,
1034
        gen_op_jbe_subb,
1035
        gen_op_js_subb,
1036
        NULL,
1037
        gen_op_jl_subb,
1038
        gen_op_jle_subb,
1039
    },
1040
    [OT_WORD] = {
1041
        NULL,
1042
        gen_op_jb_subw,
1043
        gen_op_jz_subw,
1044
        gen_op_jbe_subw,
1045
        gen_op_js_subw,
1046
        NULL,
1047
        gen_op_jl_subw,
1048
        gen_op_jle_subw,
1049
    },
1050
    [OT_LONG] = {
1051
        NULL,
1052
        gen_op_jb_subl,
1053
        gen_op_jz_subl,
1054
        gen_op_jbe_subl,
1055
        gen_op_js_subl,
1056
        NULL,
1057
        gen_op_jl_subl,
1058
        gen_op_jle_subl,
1059
    },
1060
#ifdef TARGET_X86_64
1061
    [OT_QUAD] = {
1062
        NULL,
1063
        BUGGY_64(gen_op_jb_subq),
1064
        gen_op_jz_subq,
1065
        BUGGY_64(gen_op_jbe_subq),
1066
        gen_op_js_subq,
1067
        NULL,
1068
        BUGGY_64(gen_op_jl_subq),
1069
        BUGGY_64(gen_op_jle_subq),
1070
    },
1071
#endif
1072
};
1073
static GenOpFunc1 *gen_op_loop[3][4] = {
1074
    [0] = {
1075
        gen_op_loopnzw,
1076
        gen_op_loopzw,
1077
        gen_op_jnz_ecxw,
1078
    },
1079
    [1] = {
1080
        gen_op_loopnzl,
1081
        gen_op_loopzl,
1082
        gen_op_jnz_ecxl,
1083
    },
1084
#ifdef TARGET_X86_64
1085
    [2] = {
1086
        gen_op_loopnzq,
1087
        gen_op_loopzq,
1088
        gen_op_jnz_ecxq,
1089
    },
1090
#endif
1091
};
1092

    
1093
static GenOpFunc *gen_setcc_slow[8] = {
1094
    gen_op_seto_T0_cc,
1095
    gen_op_setb_T0_cc,
1096
    gen_op_setz_T0_cc,
1097
    gen_op_setbe_T0_cc,
1098
    gen_op_sets_T0_cc,
1099
    gen_op_setp_T0_cc,
1100
    gen_op_setl_T0_cc,
1101
    gen_op_setle_T0_cc,
1102
};
1103

    
1104
static GenOpFunc *gen_setcc_sub[4][8] = {
1105
    [OT_BYTE] = {
1106
        NULL,
1107
        gen_op_setb_T0_subb,
1108
        gen_op_setz_T0_subb,
1109
        gen_op_setbe_T0_subb,
1110
        gen_op_sets_T0_subb,
1111
        NULL,
1112
        gen_op_setl_T0_subb,
1113
        gen_op_setle_T0_subb,
1114
    },
1115
    [OT_WORD] = {
1116
        NULL,
1117
        gen_op_setb_T0_subw,
1118
        gen_op_setz_T0_subw,
1119
        gen_op_setbe_T0_subw,
1120
        gen_op_sets_T0_subw,
1121
        NULL,
1122
        gen_op_setl_T0_subw,
1123
        gen_op_setle_T0_subw,
1124
    },
1125
    [OT_LONG] = {
1126
        NULL,
1127
        gen_op_setb_T0_subl,
1128
        gen_op_setz_T0_subl,
1129
        gen_op_setbe_T0_subl,
1130
        gen_op_sets_T0_subl,
1131
        NULL,
1132
        gen_op_setl_T0_subl,
1133
        gen_op_setle_T0_subl,
1134
    },
1135
#ifdef TARGET_X86_64
1136
    [OT_QUAD] = {
1137
        NULL,
1138
        gen_op_setb_T0_subq,
1139
        gen_op_setz_T0_subq,
1140
        gen_op_setbe_T0_subq,
1141
        gen_op_sets_T0_subq,
1142
        NULL,
1143
        gen_op_setl_T0_subq,
1144
        gen_op_setle_T0_subq,
1145
    },
1146
#endif
1147
};
1148

    
1149
static void *helper_fp_arith_ST0_FT0[8] = {
1150
    helper_fadd_ST0_FT0,
1151
    helper_fmul_ST0_FT0,
1152
    helper_fcom_ST0_FT0,
1153
    helper_fcom_ST0_FT0,
1154
    helper_fsub_ST0_FT0,
1155
    helper_fsubr_ST0_FT0,
1156
    helper_fdiv_ST0_FT0,
1157
    helper_fdivr_ST0_FT0,
1158
};
1159

    
1160
/* NOTE the exception in "r" op ordering */
1161
static void *helper_fp_arith_STN_ST0[8] = {
1162
    helper_fadd_STN_ST0,
1163
    helper_fmul_STN_ST0,
1164
    NULL,
1165
    NULL,
1166
    helper_fsubr_STN_ST0,
1167
    helper_fsub_STN_ST0,
1168
    helper_fdivr_STN_ST0,
1169
    helper_fdiv_STN_ST0,
1170
};
1171

    
1172
/* compute eflags.C to reg */
1173
static void gen_compute_eflags_c(TCGv reg)
1174
{
1175
#if TCG_TARGET_REG_BITS == 32
1176
    tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1177
    tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 
1178
                     (long)cc_table + offsetof(CCTable, compute_c));
1179
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1180
    tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE, 
1181
                 1, &cpu_tmp2_i32, 0, NULL);
1182
#else
1183
    tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1184
    tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1185
    tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64, 
1186
                     (long)cc_table + offsetof(CCTable, compute_c));
1187
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1188
    tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE, 
1189
                 1, &cpu_tmp2_i32, 0, NULL);
1190
#endif
1191
    tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1192
}
1193

    
1194
/* compute all eflags to cc_src */
1195
static void gen_compute_eflags(TCGv reg)
1196
{
1197
#if TCG_TARGET_REG_BITS == 32
1198
    tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1199
    tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 
1200
                     (long)cc_table + offsetof(CCTable, compute_all));
1201
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1202
    tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE, 
1203
                 1, &cpu_tmp2_i32, 0, NULL);
1204
#else
1205
    tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1206
    tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1207
    tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64, 
1208
                     (long)cc_table + offsetof(CCTable, compute_all));
1209
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1210
    tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE, 
1211
                 1, &cpu_tmp2_i32, 0, NULL);
1212
#endif
1213
    tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1214
}
1215

    
1216
/* if d == OR_TMP0, it means memory operand (address in A0) */
1217
static void gen_op(DisasContext *s1, int op, int ot, int d)
1218
{
1219
    if (d != OR_TMP0) {
1220
        gen_op_mov_TN_reg(ot, 0, d);
1221
    } else {
1222
        gen_op_ld_T0_A0(ot + s1->mem_index);
1223
    }
1224
    switch(op) {
1225
    case OP_ADCL:
1226
        if (s1->cc_op != CC_OP_DYNAMIC)
1227
            gen_op_set_cc_op(s1->cc_op);
1228
        gen_compute_eflags_c(cpu_tmp4);
1229
        tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1230
        tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1231
        if (d != OR_TMP0)
1232
            gen_op_mov_reg_T0(ot, d);
1233
        else
1234
            gen_op_st_T0_A0(ot + s1->mem_index);
1235
        tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1236
        tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1237
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1238
        tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1239
        tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1240
        s1->cc_op = CC_OP_DYNAMIC;
1241
        break;
1242
    case OP_SBBL:
1243
        if (s1->cc_op != CC_OP_DYNAMIC)
1244
            gen_op_set_cc_op(s1->cc_op);
1245
        gen_compute_eflags_c(cpu_tmp4);
1246
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1247
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1248
        if (d != OR_TMP0)
1249
            gen_op_mov_reg_T0(ot, d);
1250
        else
1251
            gen_op_st_T0_A0(ot + s1->mem_index);
1252
        tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1253
        tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1254
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1255
        tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1256
        tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1257
        s1->cc_op = CC_OP_DYNAMIC;
1258
        break;
1259
    case OP_ADDL:
1260
        gen_op_addl_T0_T1();
1261
        if (d != OR_TMP0)
1262
            gen_op_mov_reg_T0(ot, d);
1263
        else
1264
            gen_op_st_T0_A0(ot + s1->mem_index);
1265
        gen_op_update2_cc();
1266
        s1->cc_op = CC_OP_ADDB + ot;
1267
        break;
1268
    case OP_SUBL:
1269
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1270
        if (d != OR_TMP0)
1271
            gen_op_mov_reg_T0(ot, d);
1272
        else
1273
            gen_op_st_T0_A0(ot + s1->mem_index);
1274
        gen_op_update2_cc();
1275
        s1->cc_op = CC_OP_SUBB + ot;
1276
        break;
1277
    default:
1278
    case OP_ANDL:
1279
        tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1280
        if (d != OR_TMP0)
1281
            gen_op_mov_reg_T0(ot, d);
1282
        else
1283
            gen_op_st_T0_A0(ot + s1->mem_index);
1284
        gen_op_update1_cc();
1285
        s1->cc_op = CC_OP_LOGICB + ot;
1286
        break;
1287
    case OP_ORL:
1288
        tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1289
        if (d != OR_TMP0)
1290
            gen_op_mov_reg_T0(ot, d);
1291
        else
1292
            gen_op_st_T0_A0(ot + s1->mem_index);
1293
        gen_op_update1_cc();
1294
        s1->cc_op = CC_OP_LOGICB + ot;
1295
        break;
1296
    case OP_XORL:
1297
        tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1298
        if (d != OR_TMP0)
1299
            gen_op_mov_reg_T0(ot, d);
1300
        else
1301
            gen_op_st_T0_A0(ot + s1->mem_index);
1302
        gen_op_update1_cc();
1303
        s1->cc_op = CC_OP_LOGICB + ot;
1304
        break;
1305
    case OP_CMPL:
1306
        gen_op_cmpl_T0_T1_cc();
1307
        s1->cc_op = CC_OP_SUBB + ot;
1308
        break;
1309
    }
1310
}
1311

    
1312
/* if d == OR_TMP0, it means memory operand (address in A0) */
1313
static void gen_inc(DisasContext *s1, int ot, int d, int c)
1314
{
1315
    if (d != OR_TMP0)
1316
        gen_op_mov_TN_reg(ot, 0, d);
1317
    else
1318
        gen_op_ld_T0_A0(ot + s1->mem_index);
1319
    if (s1->cc_op != CC_OP_DYNAMIC)
1320
        gen_op_set_cc_op(s1->cc_op);
1321
    if (c > 0) {
1322
        tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1323
        s1->cc_op = CC_OP_INCB + ot;
1324
    } else {
1325
        tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1326
        s1->cc_op = CC_OP_DECB + ot;
1327
    }
1328
    if (d != OR_TMP0)
1329
        gen_op_mov_reg_T0(ot, d);
1330
    else
1331
        gen_op_st_T0_A0(ot + s1->mem_index);
1332
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1333
    gen_compute_eflags_c(cpu_cc_src);
1334
}
1335

    
1336
static void gen_extu(int ot, TCGv reg)
1337
{
1338
    switch(ot) {
1339
    case OT_BYTE:
1340
        tcg_gen_ext8u_tl(reg, reg);
1341
        break;
1342
    case OT_WORD:
1343
        tcg_gen_ext16u_tl(reg, reg);
1344
        break;
1345
    case OT_LONG:
1346
        tcg_gen_ext32u_tl(reg, reg);
1347
        break;
1348
    default:
1349
        break;
1350
    }
1351
}
1352

    
1353
static void gen_exts(int ot, TCGv reg)
1354
{
1355
    switch(ot) {
1356
    case OT_BYTE:
1357
        tcg_gen_ext8s_tl(reg, reg);
1358
        break;
1359
    case OT_WORD:
1360
        tcg_gen_ext16s_tl(reg, reg);
1361
        break;
1362
    case OT_LONG:
1363
        tcg_gen_ext32s_tl(reg, reg);
1364
        break;
1365
    default:
1366
        break;
1367
    }
1368
}
1369

    
1370
/* XXX: add faster immediate case */
1371
static void gen_shift_rm_T1(DisasContext *s, int ot, int op1, 
1372
                            int is_right, int is_arith)
1373
{
1374
    target_ulong mask;
1375
    int shift_label;
1376
    
1377
    if (ot == OT_QUAD)
1378
        mask = 0x3f;
1379
    else
1380
        mask = 0x1f;
1381

    
1382
    /* load */
1383
    if (op1 == OR_TMP0)
1384
        gen_op_ld_T0_A0(ot + s->mem_index);
1385
    else
1386
        gen_op_mov_TN_reg(ot, 0, op1);
1387

    
1388
    tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1389

    
1390
    tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1391

    
1392
    if (is_right) {
1393
        if (is_arith) {
1394
            gen_exts(ot, cpu_T[0]);
1395
            tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1396
            tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1397
        } else {
1398
            gen_extu(ot, cpu_T[0]);
1399
            tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1400
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1401
        }
1402
    } else {
1403
        tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1404
        tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1405
    }
1406

    
1407
    /* store */
1408
    if (op1 == OR_TMP0)
1409
        gen_op_st_T0_A0(ot + s->mem_index);
1410
    else
1411
        gen_op_mov_reg_T0(ot, op1);
1412
        
1413
    /* update eflags if non zero shift */
1414
    if (s->cc_op != CC_OP_DYNAMIC)
1415
        gen_op_set_cc_op(s->cc_op);
1416

    
1417
    shift_label = gen_new_label();
1418
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), shift_label);
1419

    
1420
    tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1421
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1422
    if (is_right)
1423
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1424
    else
1425
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1426
        
1427
    gen_set_label(shift_label);
1428
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1429
}
1430

    
1431
static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1432
{
1433
    if (arg2 >= 0)
1434
        tcg_gen_shli_tl(ret, arg1, arg2);
1435
    else
1436
        tcg_gen_shri_tl(ret, arg1, -arg2);
1437
}
1438

    
1439
/* XXX: add faster immediate case */
1440
static void gen_rot_rm_T1(DisasContext *s, int ot, int op1, 
1441
                          int is_right)
1442
{
1443
    target_ulong mask;
1444
    int label1, label2, data_bits;
1445
    
1446
    if (ot == OT_QUAD)
1447
        mask = 0x3f;
1448
    else
1449
        mask = 0x1f;
1450

    
1451
    /* load */
1452
    if (op1 == OR_TMP0)
1453
        gen_op_ld_T0_A0(ot + s->mem_index);
1454
    else
1455
        gen_op_mov_TN_reg(ot, 0, op1);
1456

    
1457
    tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1458

    
1459
    /* Must test zero case to avoid using undefined behaviour in TCG
1460
       shifts. */
1461
    label1 = gen_new_label();
1462
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label1);
1463
    
1464
    if (ot <= OT_WORD)
1465
        tcg_gen_andi_tl(cpu_tmp0, cpu_T[1], (1 << (3 + ot)) - 1);
1466
    else
1467
        tcg_gen_mov_tl(cpu_tmp0, cpu_T[1]);
1468
    
1469
    gen_extu(ot, cpu_T[0]);
1470
    tcg_gen_mov_tl(cpu_T3, cpu_T[0]);
1471

    
1472
    data_bits = 8 << ot;
1473
    /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1474
       fix TCG definition) */
1475
    if (is_right) {
1476
        tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1477
        tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1478
        tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1479
    } else {
1480
        tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1481
        tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1482
        tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1483
    }
1484
    tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1485

    
1486
    gen_set_label(label1);
1487
    /* store */
1488
    if (op1 == OR_TMP0)
1489
        gen_op_st_T0_A0(ot + s->mem_index);
1490
    else
1491
        gen_op_mov_reg_T0(ot, op1);
1492
    
1493
    /* update eflags */
1494
    if (s->cc_op != CC_OP_DYNAMIC)
1495
        gen_op_set_cc_op(s->cc_op);
1496

    
1497
    label2 = gen_new_label();
1498
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label2);
1499

    
1500
    gen_compute_eflags(cpu_cc_src);
1501
    tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1502
    tcg_gen_xor_tl(cpu_tmp0, cpu_T3, cpu_T[0]);
1503
    tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1504
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1505
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1506
    if (is_right) {
1507
        tcg_gen_shri_tl(cpu_T[0], cpu_T[0], data_bits - 1);
1508
    }
1509
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_C);
1510
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
1511
    
1512
    tcg_gen_discard_tl(cpu_cc_dst);
1513
    tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1514
        
1515
    gen_set_label(label2);
1516
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1517
}
1518

    
1519
static void *helper_rotc[8] = {
1520
    helper_rclb,
1521
    helper_rclw,
1522
    helper_rcll,
1523
    X86_64_ONLY(helper_rclq),
1524
    helper_rcrb,
1525
    helper_rcrw,
1526
    helper_rcrl,
1527
    X86_64_ONLY(helper_rcrq),
1528
};
1529

    
1530
/* XXX: add faster immediate = 1 case */
1531
static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1, 
1532
                           int is_right)
1533
{
1534
    int label1;
1535

    
1536
    if (s->cc_op != CC_OP_DYNAMIC)
1537
        gen_op_set_cc_op(s->cc_op);
1538

    
1539
    /* load */
1540
    if (op1 == OR_TMP0)
1541
        gen_op_ld_T0_A0(ot + s->mem_index);
1542
    else
1543
        gen_op_mov_TN_reg(ot, 0, op1);
1544
    
1545
    tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
1546
                       cpu_T[0], cpu_T[0], cpu_T[1]);
1547
    /* store */
1548
    if (op1 == OR_TMP0)
1549
        gen_op_st_T0_A0(ot + s->mem_index);
1550
    else
1551
        gen_op_mov_reg_T0(ot, op1);
1552

    
1553
    /* update eflags */
1554
    label1 = gen_new_label();
1555
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(-1), label1);
1556

    
1557
    tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1558
    tcg_gen_discard_tl(cpu_cc_dst);
1559
    tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1560
        
1561
    gen_set_label(label1);
1562
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1563
}
1564

    
1565
/* XXX: add faster immediate case */
1566
static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1, 
1567
                                int is_right)
1568
{
1569
    int label1, label2, data_bits;
1570
    target_ulong mask;
1571

    
1572
    if (ot == OT_QUAD)
1573
        mask = 0x3f;
1574
    else
1575
        mask = 0x1f;
1576

    
1577
    /* load */
1578
    if (op1 == OR_TMP0)
1579
        gen_op_ld_T0_A0(ot + s->mem_index);
1580
    else
1581
        gen_op_mov_TN_reg(ot, 0, op1);
1582

    
1583
    tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
1584
    /* Must test zero case to avoid using undefined behaviour in TCG
1585
       shifts. */
1586
    label1 = gen_new_label();
1587
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
1588
    
1589
    tcg_gen_addi_tl(cpu_tmp5, cpu_T3, -1);
1590
    if (ot == OT_WORD) {
1591
        /* Note: we implement the Intel behaviour for shift count > 16 */
1592
        if (is_right) {
1593
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1594
            tcg_gen_shli_tl(cpu_tmp0, cpu_T[1], 16);
1595
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1596
            tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1597

    
1598
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1599
            
1600
            /* only needed if count > 16, but a test would complicate */
1601
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1602
            tcg_gen_shl_tl(cpu_tmp0, cpu_T[0], cpu_tmp5);
1603

    
1604
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1605

    
1606
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1607
        } else {
1608
            /* XXX: not optimal */
1609
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1610
            tcg_gen_shli_tl(cpu_T[1], cpu_T[1], 16);
1611
            tcg_gen_or_tl(cpu_T[1], cpu_T[1], cpu_T[0]);
1612
            tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1613
            
1614
            tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1615
            tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
1616
            tcg_gen_shr_tl(cpu_tmp6, cpu_T[1], cpu_tmp0);
1617
            tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
1618

    
1619
            tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1620
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1621
            tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1622
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1623
        }
1624
    } else {
1625
        data_bits = 8 << ot;
1626
        if (is_right) {
1627
            if (ot == OT_LONG)
1628
                tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1629

    
1630
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1631

    
1632
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1633
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1634
            tcg_gen_shl_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1635
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1636
            
1637
        } else {
1638
            if (ot == OT_LONG)
1639
                tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1640

    
1641
            tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1642
            
1643
            tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1644
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1645
            tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1646
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1647
        }
1648
    }
1649
    tcg_gen_mov_tl(cpu_T[1], cpu_tmp4);
1650

    
1651
    gen_set_label(label1);
1652
    /* store */
1653
    if (op1 == OR_TMP0)
1654
        gen_op_st_T0_A0(ot + s->mem_index);
1655
    else
1656
        gen_op_mov_reg_T0(ot, op1);
1657
    
1658
    /* update eflags */
1659
    if (s->cc_op != CC_OP_DYNAMIC)
1660
        gen_op_set_cc_op(s->cc_op);
1661

    
1662
    label2 = gen_new_label();
1663
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label2);
1664

    
1665
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1666
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1667
    if (is_right) {
1668
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1669
    } else {
1670
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1671
    }
1672
    gen_set_label(label2);
1673
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1674
}
1675

    
1676
static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1677
{
1678
    if (s != OR_TMP1)
1679
        gen_op_mov_TN_reg(ot, 1, s);
1680
    switch(op) {
1681
    case OP_ROL:
1682
        gen_rot_rm_T1(s1, ot, d, 0);
1683
        break;
1684
    case OP_ROR:
1685
        gen_rot_rm_T1(s1, ot, d, 1);
1686
        break;
1687
    case OP_SHL:
1688
    case OP_SHL1:
1689
        gen_shift_rm_T1(s1, ot, d, 0, 0);
1690
        break;
1691
    case OP_SHR:
1692
        gen_shift_rm_T1(s1, ot, d, 1, 0);
1693
        break;
1694
    case OP_SAR:
1695
        gen_shift_rm_T1(s1, ot, d, 1, 1);
1696
        break;
1697
    case OP_RCL:
1698
        gen_rotc_rm_T1(s1, ot, d, 0);
1699
        break;
1700
    case OP_RCR:
1701
        gen_rotc_rm_T1(s1, ot, d, 1);
1702
        break;
1703
    }
1704
}
1705

    
1706
static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1707
{
1708
    /* currently not optimized */
1709
    gen_op_movl_T1_im(c);
1710
    gen_shift(s1, op, ot, d, OR_TMP1);
1711
}
1712

    
1713
static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1714
{
1715
    target_long disp;
1716
    int havesib;
1717
    int base;
1718
    int index;
1719
    int scale;
1720
    int opreg;
1721
    int mod, rm, code, override, must_add_seg;
1722

    
1723
    override = s->override;
1724
    must_add_seg = s->addseg;
1725
    if (override >= 0)
1726
        must_add_seg = 1;
1727
    mod = (modrm >> 6) & 3;
1728
    rm = modrm & 7;
1729

    
1730
    if (s->aflag) {
1731

    
1732
        havesib = 0;
1733
        base = rm;
1734
        index = 0;
1735
        scale = 0;
1736

    
1737
        if (base == 4) {
1738
            havesib = 1;
1739
            code = ldub_code(s->pc++);
1740
            scale = (code >> 6) & 3;
1741
            index = ((code >> 3) & 7) | REX_X(s);
1742
            base = (code & 7);
1743
        }
1744
        base |= REX_B(s);
1745

    
1746
        switch (mod) {
1747
        case 0:
1748
            if ((base & 7) == 5) {
1749
                base = -1;
1750
                disp = (int32_t)ldl_code(s->pc);
1751
                s->pc += 4;
1752
                if (CODE64(s) && !havesib) {
1753
                    disp += s->pc + s->rip_offset;
1754
                }
1755
            } else {
1756
                disp = 0;
1757
            }
1758
            break;
1759
        case 1:
1760
            disp = (int8_t)ldub_code(s->pc++);
1761
            break;
1762
        default:
1763
        case 2:
1764
            disp = ldl_code(s->pc);
1765
            s->pc += 4;
1766
            break;
1767
        }
1768

    
1769
        if (base >= 0) {
1770
            /* for correct popl handling with esp */
1771
            if (base == 4 && s->popl_esp_hack)
1772
                disp += s->popl_esp_hack;
1773
#ifdef TARGET_X86_64
1774
            if (s->aflag == 2) {
1775
                gen_op_movq_A0_reg(base);
1776
                if (disp != 0) {
1777
                    gen_op_addq_A0_im(disp);
1778
                }
1779
            } else
1780
#endif
1781
            {
1782
                gen_op_movl_A0_reg(base);
1783
                if (disp != 0)
1784
                    gen_op_addl_A0_im(disp);
1785
            }
1786
        } else {
1787
#ifdef TARGET_X86_64
1788
            if (s->aflag == 2) {
1789
                gen_op_movq_A0_im(disp);
1790
            } else
1791
#endif
1792
            {
1793
                gen_op_movl_A0_im(disp);
1794
            }
1795
        }
1796
        /* XXX: index == 4 is always invalid */
1797
        if (havesib && (index != 4 || scale != 0)) {
1798
#ifdef TARGET_X86_64
1799
            if (s->aflag == 2) {
1800
                gen_op_addq_A0_reg_sN(scale, index);
1801
            } else
1802
#endif
1803
            {
1804
                gen_op_addl_A0_reg_sN(scale, index);
1805
            }
1806
        }
1807
        if (must_add_seg) {
1808
            if (override < 0) {
1809
                if (base == R_EBP || base == R_ESP)
1810
                    override = R_SS;
1811
                else
1812
                    override = R_DS;
1813
            }
1814
#ifdef TARGET_X86_64
1815
            if (s->aflag == 2) {
1816
                gen_op_addq_A0_seg(override);
1817
            } else
1818
#endif
1819
            {
1820
                gen_op_addl_A0_seg(override);
1821
            }
1822
        }
1823
    } else {
1824
        switch (mod) {
1825
        case 0:
1826
            if (rm == 6) {
1827
                disp = lduw_code(s->pc);
1828
                s->pc += 2;
1829
                gen_op_movl_A0_im(disp);
1830
                rm = 0; /* avoid SS override */
1831
                goto no_rm;
1832
            } else {
1833
                disp = 0;
1834
            }
1835
            break;
1836
        case 1:
1837
            disp = (int8_t)ldub_code(s->pc++);
1838
            break;
1839
        default:
1840
        case 2:
1841
            disp = lduw_code(s->pc);
1842
            s->pc += 2;
1843
            break;
1844
        }
1845
        switch(rm) {
1846
        case 0:
1847
            gen_op_movl_A0_reg(R_EBX);
1848
            gen_op_addl_A0_reg_sN(0, R_ESI);
1849
            break;
1850
        case 1:
1851
            gen_op_movl_A0_reg(R_EBX);
1852
            gen_op_addl_A0_reg_sN(0, R_EDI);
1853
            break;
1854
        case 2:
1855
            gen_op_movl_A0_reg(R_EBP);
1856
            gen_op_addl_A0_reg_sN(0, R_ESI);
1857
            break;
1858
        case 3:
1859
            gen_op_movl_A0_reg(R_EBP);
1860
            gen_op_addl_A0_reg_sN(0, R_EDI);
1861
            break;
1862
        case 4:
1863
            gen_op_movl_A0_reg(R_ESI);
1864
            break;
1865
        case 5:
1866
            gen_op_movl_A0_reg(R_EDI);
1867
            break;
1868
        case 6:
1869
            gen_op_movl_A0_reg(R_EBP);
1870
            break;
1871
        default:
1872
        case 7:
1873
            gen_op_movl_A0_reg(R_EBX);
1874
            break;
1875
        }
1876
        if (disp != 0)
1877
            gen_op_addl_A0_im(disp);
1878
        gen_op_andl_A0_ffff();
1879
    no_rm:
1880
        if (must_add_seg) {
1881
            if (override < 0) {
1882
                if (rm == 2 || rm == 3 || rm == 6)
1883
                    override = R_SS;
1884
                else
1885
                    override = R_DS;
1886
            }
1887
            gen_op_addl_A0_seg(override);
1888
        }
1889
    }
1890

    
1891
    opreg = OR_A0;
1892
    disp = 0;
1893
    *reg_ptr = opreg;
1894
    *offset_ptr = disp;
1895
}
1896

    
1897
static void gen_nop_modrm(DisasContext *s, int modrm)
1898
{
1899
    int mod, rm, base, code;
1900

    
1901
    mod = (modrm >> 6) & 3;
1902
    if (mod == 3)
1903
        return;
1904
    rm = modrm & 7;
1905

    
1906
    if (s->aflag) {
1907

    
1908
        base = rm;
1909

    
1910
        if (base == 4) {
1911
            code = ldub_code(s->pc++);
1912
            base = (code & 7);
1913
        }
1914

    
1915
        switch (mod) {
1916
        case 0:
1917
            if (base == 5) {
1918
                s->pc += 4;
1919
            }
1920
            break;
1921
        case 1:
1922
            s->pc++;
1923
            break;
1924
        default:
1925
        case 2:
1926
            s->pc += 4;
1927
            break;
1928
        }
1929
    } else {
1930
        switch (mod) {
1931
        case 0:
1932
            if (rm == 6) {
1933
                s->pc += 2;
1934
            }
1935
            break;
1936
        case 1:
1937
            s->pc++;
1938
            break;
1939
        default:
1940
        case 2:
1941
            s->pc += 2;
1942
            break;
1943
        }
1944
    }
1945
}
1946

    
1947
/* used for LEA and MOV AX, mem */
1948
static void gen_add_A0_ds_seg(DisasContext *s)
1949
{
1950
    int override, must_add_seg;
1951
    must_add_seg = s->addseg;
1952
    override = R_DS;
1953
    if (s->override >= 0) {
1954
        override = s->override;
1955
        must_add_seg = 1;
1956
    } else {
1957
        override = R_DS;
1958
    }
1959
    if (must_add_seg) {
1960
#ifdef TARGET_X86_64
1961
        if (CODE64(s)) {
1962
            gen_op_addq_A0_seg(override);
1963
        } else
1964
#endif
1965
        {
1966
            gen_op_addl_A0_seg(override);
1967
        }
1968
    }
1969
}
1970

    
1971
/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1972
   OR_TMP0 */
1973
static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1974
{
1975
    int mod, rm, opreg, disp;
1976

    
1977
    mod = (modrm >> 6) & 3;
1978
    rm = (modrm & 7) | REX_B(s);
1979
    if (mod == 3) {
1980
        if (is_store) {
1981
            if (reg != OR_TMP0)
1982
                gen_op_mov_TN_reg(ot, 0, reg);
1983
            gen_op_mov_reg_T0(ot, rm);
1984
        } else {
1985
            gen_op_mov_TN_reg(ot, 0, rm);
1986
            if (reg != OR_TMP0)
1987
                gen_op_mov_reg_T0(ot, reg);
1988
        }
1989
    } else {
1990
        gen_lea_modrm(s, modrm, &opreg, &disp);
1991
        if (is_store) {
1992
            if (reg != OR_TMP0)
1993
                gen_op_mov_TN_reg(ot, 0, reg);
1994
            gen_op_st_T0_A0(ot + s->mem_index);
1995
        } else {
1996
            gen_op_ld_T0_A0(ot + s->mem_index);
1997
            if (reg != OR_TMP0)
1998
                gen_op_mov_reg_T0(ot, reg);
1999
        }
2000
    }
2001
}
2002

    
2003
static inline uint32_t insn_get(DisasContext *s, int ot)
2004
{
2005
    uint32_t ret;
2006

    
2007
    switch(ot) {
2008
    case OT_BYTE:
2009
        ret = ldub_code(s->pc);
2010
        s->pc++;
2011
        break;
2012
    case OT_WORD:
2013
        ret = lduw_code(s->pc);
2014
        s->pc += 2;
2015
        break;
2016
    default:
2017
    case OT_LONG:
2018
        ret = ldl_code(s->pc);
2019
        s->pc += 4;
2020
        break;
2021
    }
2022
    return ret;
2023
}
2024

    
2025
static inline int insn_const_size(unsigned int ot)
2026
{
2027
    if (ot <= OT_LONG)
2028
        return 1 << ot;
2029
    else
2030
        return 4;
2031
}
2032

    
2033
static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2034
{
2035
    TranslationBlock *tb;
2036
    target_ulong pc;
2037

    
2038
    pc = s->cs_base + eip;
2039
    tb = s->tb;
2040
    /* NOTE: we handle the case where the TB spans two pages here */
2041
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2042
        (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK))  {
2043
        /* jump to same page: we can use a direct jump */
2044
        tcg_gen_goto_tb(tb_num);
2045
        gen_jmp_im(eip);
2046
        tcg_gen_exit_tb((long)tb + tb_num);
2047
    } else {
2048
        /* jump to another page: currently not optimized */
2049
        gen_jmp_im(eip);
2050
        gen_eob(s);
2051
    }
2052
}
2053

    
2054
static inline void gen_jcc(DisasContext *s, int b,
2055
                           target_ulong val, target_ulong next_eip)
2056
{
2057
    TranslationBlock *tb;
2058
    int inv, jcc_op;
2059
    GenOpFunc1 *func;
2060
    target_ulong tmp;
2061
    int l1, l2;
2062

    
2063
    inv = b & 1;
2064
    jcc_op = (b >> 1) & 7;
2065

    
2066
    if (s->jmp_opt) {
2067
        switch(s->cc_op) {
2068
            /* we optimize the cmp/jcc case */
2069
        case CC_OP_SUBB:
2070
        case CC_OP_SUBW:
2071
        case CC_OP_SUBL:
2072
        case CC_OP_SUBQ:
2073
            func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
2074
            break;
2075

    
2076
            /* some jumps are easy to compute */
2077
        case CC_OP_ADDB:
2078
        case CC_OP_ADDW:
2079
        case CC_OP_ADDL:
2080
        case CC_OP_ADDQ:
2081

    
2082
        case CC_OP_ADCB:
2083
        case CC_OP_ADCW:
2084
        case CC_OP_ADCL:
2085
        case CC_OP_ADCQ:
2086

    
2087
        case CC_OP_SBBB:
2088
        case CC_OP_SBBW:
2089
        case CC_OP_SBBL:
2090
        case CC_OP_SBBQ:
2091

    
2092
        case CC_OP_LOGICB:
2093
        case CC_OP_LOGICW:
2094
        case CC_OP_LOGICL:
2095
        case CC_OP_LOGICQ:
2096

    
2097
        case CC_OP_INCB:
2098
        case CC_OP_INCW:
2099
        case CC_OP_INCL:
2100
        case CC_OP_INCQ:
2101

    
2102
        case CC_OP_DECB:
2103
        case CC_OP_DECW:
2104
        case CC_OP_DECL:
2105
        case CC_OP_DECQ:
2106

    
2107
        case CC_OP_SHLB:
2108
        case CC_OP_SHLW:
2109
        case CC_OP_SHLL:
2110
        case CC_OP_SHLQ:
2111

    
2112
        case CC_OP_SARB:
2113
        case CC_OP_SARW:
2114
        case CC_OP_SARL:
2115
        case CC_OP_SARQ:
2116
            switch(jcc_op) {
2117
            case JCC_Z:
2118
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2119
                break;
2120
            case JCC_S:
2121
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2122
                break;
2123
            default:
2124
                func = NULL;
2125
                break;
2126
            }
2127
            break;
2128
        default:
2129
            func = NULL;
2130
            break;
2131
        }
2132

    
2133
        if (s->cc_op != CC_OP_DYNAMIC) {
2134
            gen_op_set_cc_op(s->cc_op);
2135
            s->cc_op = CC_OP_DYNAMIC;
2136
        }
2137

    
2138
        if (!func) {
2139
            gen_setcc_slow[jcc_op]();
2140
            func = gen_op_jnz_T0_label;
2141
        }
2142

    
2143
        if (inv) {
2144
            tmp = val;
2145
            val = next_eip;
2146
            next_eip = tmp;
2147
        }
2148
        tb = s->tb;
2149

    
2150
        l1 = gen_new_label();
2151
        func(l1);
2152

    
2153
        gen_goto_tb(s, 0, next_eip);
2154

    
2155
        gen_set_label(l1);
2156
        gen_goto_tb(s, 1, val);
2157

    
2158
        s->is_jmp = 3;
2159
    } else {
2160

    
2161
        if (s->cc_op != CC_OP_DYNAMIC) {
2162
            gen_op_set_cc_op(s->cc_op);
2163
            s->cc_op = CC_OP_DYNAMIC;
2164
        }
2165
        gen_setcc_slow[jcc_op]();
2166
        if (inv) {
2167
            tmp = val;
2168
            val = next_eip;
2169
            next_eip = tmp;
2170
        }
2171
        l1 = gen_new_label();
2172
        l2 = gen_new_label();
2173
        gen_op_jnz_T0_label(l1);
2174
        gen_jmp_im(next_eip);
2175
        gen_op_jmp_label(l2);
2176
        gen_set_label(l1);
2177
        gen_jmp_im(val);
2178
        gen_set_label(l2);
2179
        gen_eob(s);
2180
    }
2181
}
2182

    
2183
static void gen_setcc(DisasContext *s, int b)
2184
{
2185
    int inv, jcc_op;
2186
    GenOpFunc *func;
2187

    
2188
    inv = b & 1;
2189
    jcc_op = (b >> 1) & 7;
2190
    switch(s->cc_op) {
2191
        /* we optimize the cmp/jcc case */
2192
    case CC_OP_SUBB:
2193
    case CC_OP_SUBW:
2194
    case CC_OP_SUBL:
2195
    case CC_OP_SUBQ:
2196
        func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
2197
        if (!func)
2198
            goto slow_jcc;
2199
        break;
2200

    
2201
        /* some jumps are easy to compute */
2202
    case CC_OP_ADDB:
2203
    case CC_OP_ADDW:
2204
    case CC_OP_ADDL:
2205
    case CC_OP_ADDQ:
2206

    
2207
    case CC_OP_LOGICB:
2208
    case CC_OP_LOGICW:
2209
    case CC_OP_LOGICL:
2210
    case CC_OP_LOGICQ:
2211

    
2212
    case CC_OP_INCB:
2213
    case CC_OP_INCW:
2214
    case CC_OP_INCL:
2215
    case CC_OP_INCQ:
2216

    
2217
    case CC_OP_DECB:
2218
    case CC_OP_DECW:
2219
    case CC_OP_DECL:
2220
    case CC_OP_DECQ:
2221

    
2222
    case CC_OP_SHLB:
2223
    case CC_OP_SHLW:
2224
    case CC_OP_SHLL:
2225
    case CC_OP_SHLQ:
2226
        switch(jcc_op) {
2227
        case JCC_Z:
2228
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2229
            break;
2230
        case JCC_S:
2231
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2232
            break;
2233
        default:
2234
            goto slow_jcc;
2235
        }
2236
        break;
2237
    default:
2238
    slow_jcc:
2239
        if (s->cc_op != CC_OP_DYNAMIC)
2240
            gen_op_set_cc_op(s->cc_op);
2241
        func = gen_setcc_slow[jcc_op];
2242
        break;
2243
    }
2244
    func();
2245
    if (inv) {
2246
        gen_op_xor_T0_1();
2247
    }
2248
}
2249

    
2250
/* move T0 to seg_reg and compute if the CPU state may change. Never
2251
   call this function with seg_reg == R_CS */
2252
static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2253
{
2254
    if (s->pe && !s->vm86) {
2255
        /* XXX: optimize by finding processor state dynamically */
2256
        if (s->cc_op != CC_OP_DYNAMIC)
2257
            gen_op_set_cc_op(s->cc_op);
2258
        gen_jmp_im(cur_eip);
2259
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2260
        tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2261
        /* abort translation because the addseg value may change or
2262
           because ss32 may change. For R_SS, translation must always
2263
           stop as a special handling must be done to disable hardware
2264
           interrupts for the next instruction */
2265
        if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2266
            s->is_jmp = 3;
2267
    } else {
2268
        gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
2269
        if (seg_reg == R_SS)
2270
            s->is_jmp = 3;
2271
    }
2272
}
2273

    
2274
static inline int svm_is_rep(int prefixes)
2275
{
2276
    return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2277
}
2278

    
2279
static inline int
2280
gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2281
                              uint32_t type, uint64_t param)
2282
{
2283
    if(!(s->flags & (INTERCEPT_SVM_MASK)))
2284
        /* no SVM activated */
2285
        return 0;
2286
    switch(type) {
2287
        /* CRx and DRx reads/writes */
2288
        case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2289
            if (s->cc_op != CC_OP_DYNAMIC) {
2290
                gen_op_set_cc_op(s->cc_op);
2291
            }
2292
            gen_jmp_im(pc_start - s->cs_base);
2293
            tcg_gen_helper_0_2(helper_svm_check_intercept_param, 
2294
                               tcg_const_i32(type), tcg_const_i64(param));
2295
            /* this is a special case as we do not know if the interception occurs
2296
               so we assume there was none */
2297
            return 0;
2298
        case SVM_EXIT_MSR:
2299
            if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2300
                if (s->cc_op != CC_OP_DYNAMIC) {
2301
                    gen_op_set_cc_op(s->cc_op);
2302
                }
2303
                gen_jmp_im(pc_start - s->cs_base);
2304
                tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2305
                                   tcg_const_i32(type), tcg_const_i64(param));
2306
                /* this is a special case as we do not know if the interception occurs
2307
                   so we assume there was none */
2308
                return 0;
2309
            }
2310
            break;
2311
        default:
2312
            if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2313
                if (s->cc_op != CC_OP_DYNAMIC) {
2314
                    gen_op_set_cc_op(s->cc_op);
2315
                }
2316
                gen_jmp_im(pc_start - s->cs_base);
2317
                tcg_gen_helper_0_2(helper_vmexit,
2318
                                   tcg_const_i32(type), tcg_const_i64(param));
2319
                /* we can optimize this one so TBs don't get longer
2320
                   than up to vmexit */
2321
                gen_eob(s);
2322
                return 1;
2323
            }
2324
    }
2325
    return 0;
2326
}
2327

    
2328
static inline int
2329
gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2330
{
2331
    return gen_svm_check_intercept_param(s, pc_start, type, 0);
2332
}
2333

    
2334
static inline void gen_stack_update(DisasContext *s, int addend)
2335
{
2336
#ifdef TARGET_X86_64
2337
    if (CODE64(s)) {
2338
        gen_op_addq_ESP_im(addend);
2339
    } else
2340
#endif
2341
    if (s->ss32) {
2342
        gen_op_addl_ESP_im(addend);
2343
    } else {
2344
        gen_op_addw_ESP_im(addend);
2345
    }
2346
}
2347

    
2348
/* generate a push. It depends on ss32, addseg and dflag */
2349
static void gen_push_T0(DisasContext *s)
2350
{
2351
#ifdef TARGET_X86_64
2352
    if (CODE64(s)) {
2353
        gen_op_movq_A0_reg(R_ESP);
2354
        if (s->dflag) {
2355
            gen_op_addq_A0_im(-8);
2356
            gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2357
        } else {
2358
            gen_op_addq_A0_im(-2);
2359
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2360
        }
2361
        gen_op_mov_reg_A0(2, R_ESP);
2362
    } else
2363
#endif
2364
    {
2365
        gen_op_movl_A0_reg(R_ESP);
2366
        if (!s->dflag)
2367
            gen_op_addl_A0_im(-2);
2368
        else
2369
            gen_op_addl_A0_im(-4);
2370
        if (s->ss32) {
2371
            if (s->addseg) {
2372
                gen_op_movl_T1_A0();
2373
                gen_op_addl_A0_seg(R_SS);
2374
            }
2375
        } else {
2376
            gen_op_andl_A0_ffff();
2377
            gen_op_movl_T1_A0();
2378
            gen_op_addl_A0_seg(R_SS);
2379
        }
2380
        gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2381
        if (s->ss32 && !s->addseg)
2382
            gen_op_mov_reg_A0(1, R_ESP);
2383
        else
2384
            gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2385
    }
2386
}
2387

    
2388
/* generate a push. It depends on ss32, addseg and dflag */
2389
/* slower version for T1, only used for call Ev */
2390
static void gen_push_T1(DisasContext *s)
2391
{
2392
#ifdef TARGET_X86_64
2393
    if (CODE64(s)) {
2394
        gen_op_movq_A0_reg(R_ESP);
2395
        if (s->dflag) {
2396
            gen_op_addq_A0_im(-8);
2397
            gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2398
        } else {
2399
            gen_op_addq_A0_im(-2);
2400
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2401
        }
2402
        gen_op_mov_reg_A0(2, R_ESP);
2403
    } else
2404
#endif
2405
    {
2406
        gen_op_movl_A0_reg(R_ESP);
2407
        if (!s->dflag)
2408
            gen_op_addl_A0_im(-2);
2409
        else
2410
            gen_op_addl_A0_im(-4);
2411
        if (s->ss32) {
2412
            if (s->addseg) {
2413
                gen_op_addl_A0_seg(R_SS);
2414
            }
2415
        } else {
2416
            gen_op_andl_A0_ffff();
2417
            gen_op_addl_A0_seg(R_SS);
2418
        }
2419
        gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2420

    
2421
        if (s->ss32 && !s->addseg)
2422
            gen_op_mov_reg_A0(1, R_ESP);
2423
        else
2424
            gen_stack_update(s, (-2) << s->dflag);
2425
    }
2426
}
2427

    
2428
/* two step pop is necessary for precise exceptions */
2429
static void gen_pop_T0(DisasContext *s)
2430
{
2431
#ifdef TARGET_X86_64
2432
    if (CODE64(s)) {
2433
        gen_op_movq_A0_reg(R_ESP);
2434
        gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2435
    } else
2436
#endif
2437
    {
2438
        gen_op_movl_A0_reg(R_ESP);
2439
        if (s->ss32) {
2440
            if (s->addseg)
2441
                gen_op_addl_A0_seg(R_SS);
2442
        } else {
2443
            gen_op_andl_A0_ffff();
2444
            gen_op_addl_A0_seg(R_SS);
2445
        }
2446
        gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2447
    }
2448
}
2449

    
2450
static void gen_pop_update(DisasContext *s)
2451
{
2452
#ifdef TARGET_X86_64
2453
    if (CODE64(s) && s->dflag) {
2454
        gen_stack_update(s, 8);
2455
    } else
2456
#endif
2457
    {
2458
        gen_stack_update(s, 2 << s->dflag);
2459
    }
2460
}
2461

    
2462
static void gen_stack_A0(DisasContext *s)
2463
{
2464
    gen_op_movl_A0_reg(R_ESP);
2465
    if (!s->ss32)
2466
        gen_op_andl_A0_ffff();
2467
    gen_op_movl_T1_A0();
2468
    if (s->addseg)
2469
        gen_op_addl_A0_seg(R_SS);
2470
}
2471

    
2472
/* NOTE: wrap around in 16 bit not fully handled */
2473
static void gen_pusha(DisasContext *s)
2474
{
2475
    int i;
2476
    gen_op_movl_A0_reg(R_ESP);
2477
    gen_op_addl_A0_im(-16 <<  s->dflag);
2478
    if (!s->ss32)
2479
        gen_op_andl_A0_ffff();
2480
    gen_op_movl_T1_A0();
2481
    if (s->addseg)
2482
        gen_op_addl_A0_seg(R_SS);
2483
    for(i = 0;i < 8; i++) {
2484
        gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2485
        gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2486
        gen_op_addl_A0_im(2 <<  s->dflag);
2487
    }
2488
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2489
}
2490

    
2491
/* NOTE: wrap around in 16 bit not fully handled */
2492
static void gen_popa(DisasContext *s)
2493
{
2494
    int i;
2495
    gen_op_movl_A0_reg(R_ESP);
2496
    if (!s->ss32)
2497
        gen_op_andl_A0_ffff();
2498
    gen_op_movl_T1_A0();
2499
    gen_op_addl_T1_im(16 <<  s->dflag);
2500
    if (s->addseg)
2501
        gen_op_addl_A0_seg(R_SS);
2502
    for(i = 0;i < 8; i++) {
2503
        /* ESP is not reloaded */
2504
        if (i != 3) {
2505
            gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2506
            gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2507
        }
2508
        gen_op_addl_A0_im(2 <<  s->dflag);
2509
    }
2510
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2511
}
2512

    
2513
static void gen_enter(DisasContext *s, int esp_addend, int level)
2514
{
2515
    int ot, opsize;
2516

    
2517
    level &= 0x1f;
2518
#ifdef TARGET_X86_64
2519
    if (CODE64(s)) {
2520
        ot = s->dflag ? OT_QUAD : OT_WORD;
2521
        opsize = 1 << ot;
2522

    
2523
        gen_op_movl_A0_reg(R_ESP);
2524
        gen_op_addq_A0_im(-opsize);
2525
        gen_op_movl_T1_A0();
2526

    
2527
        /* push bp */
2528
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2529
        gen_op_st_T0_A0(ot + s->mem_index);
2530
        if (level) {
2531
            /* XXX: must save state */
2532
            tcg_gen_helper_0_3(helper_enter64_level,
2533
                               tcg_const_i32(level),
2534
                               tcg_const_i32((ot == OT_QUAD)),
2535
                               cpu_T[1]);
2536
        }
2537
        gen_op_mov_reg_T1(ot, R_EBP);
2538
        gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2539
        gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2540
    } else
2541
#endif
2542
    {
2543
        ot = s->dflag + OT_WORD;
2544
        opsize = 2 << s->dflag;
2545

    
2546
        gen_op_movl_A0_reg(R_ESP);
2547
        gen_op_addl_A0_im(-opsize);
2548
        if (!s->ss32)
2549
            gen_op_andl_A0_ffff();
2550
        gen_op_movl_T1_A0();
2551
        if (s->addseg)
2552
            gen_op_addl_A0_seg(R_SS);
2553
        /* push bp */
2554
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2555
        gen_op_st_T0_A0(ot + s->mem_index);
2556
        if (level) {
2557
            /* XXX: must save state */
2558
            tcg_gen_helper_0_3(helper_enter_level,
2559
                               tcg_const_i32(level),
2560
                               tcg_const_i32(s->dflag),
2561
                               cpu_T[1]);
2562
        }
2563
        gen_op_mov_reg_T1(ot, R_EBP);
2564
        gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2565
        gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2566
    }
2567
}
2568

    
2569
static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2570
{
2571
    if (s->cc_op != CC_OP_DYNAMIC)
2572
        gen_op_set_cc_op(s->cc_op);
2573
    gen_jmp_im(cur_eip);
2574
    tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
2575
    s->is_jmp = 3;
2576
}
2577

    
2578
/* an interrupt is different from an exception because of the
2579
   privilege checks */
2580
static void gen_interrupt(DisasContext *s, int intno,
2581
                          target_ulong cur_eip, target_ulong next_eip)
2582
{
2583
    if (s->cc_op != CC_OP_DYNAMIC)
2584
        gen_op_set_cc_op(s->cc_op);
2585
    gen_jmp_im(cur_eip);
2586
    tcg_gen_helper_0_2(helper_raise_interrupt, 
2587
                       tcg_const_i32(intno), 
2588
                       tcg_const_i32(next_eip - cur_eip));
2589
    s->is_jmp = 3;
2590
}
2591

    
2592
static void gen_debug(DisasContext *s, target_ulong cur_eip)
2593
{
2594
    if (s->cc_op != CC_OP_DYNAMIC)
2595
        gen_op_set_cc_op(s->cc_op);
2596
    gen_jmp_im(cur_eip);
2597
    tcg_gen_helper_0_0(helper_debug);
2598
    s->is_jmp = 3;
2599
}
2600

    
2601
/* generate a generic end of block. Trace exception is also generated
2602
   if needed */
2603
static void gen_eob(DisasContext *s)
2604
{
2605
    if (s->cc_op != CC_OP_DYNAMIC)
2606
        gen_op_set_cc_op(s->cc_op);
2607
    if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2608
        tcg_gen_helper_0_0(helper_reset_inhibit_irq);
2609
    }
2610
    if (s->singlestep_enabled) {
2611
        tcg_gen_helper_0_0(helper_debug);
2612
    } else if (s->tf) {
2613
        tcg_gen_helper_0_0(helper_single_step);
2614
    } else {
2615
        tcg_gen_exit_tb(0);
2616
    }
2617
    s->is_jmp = 3;
2618
}
2619

    
2620
/* generate a jump to eip. No segment change must happen before as a
2621
   direct call to the next block may occur */
2622
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2623
{
2624
    if (s->jmp_opt) {
2625
        if (s->cc_op != CC_OP_DYNAMIC) {
2626
            gen_op_set_cc_op(s->cc_op);
2627
            s->cc_op = CC_OP_DYNAMIC;
2628
        }
2629
        gen_goto_tb(s, tb_num, eip);
2630
        s->is_jmp = 3;
2631
    } else {
2632
        gen_jmp_im(eip);
2633
        gen_eob(s);
2634
    }
2635
}
2636

    
2637
static void gen_jmp(DisasContext *s, target_ulong eip)
2638
{
2639
    gen_jmp_tb(s, eip, 0);
2640
}
2641

    
2642
static inline void gen_ldq_env_A0(int idx, int offset)
2643
{
2644
    int mem_index = (idx >> 2) - 1;
2645
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2646
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
2647
}
2648

    
2649
static inline void gen_stq_env_A0(int idx, int offset)
2650
{
2651
    int mem_index = (idx >> 2) - 1;
2652
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2653
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2654
}
2655

    
2656
static inline void gen_ldo_env_A0(int idx, int offset)
2657
{
2658
    int mem_index = (idx >> 2) - 1;
2659
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2660
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2661
    tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2662
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2663
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2664
}
2665

    
2666
static inline void gen_sto_env_A0(int idx, int offset)
2667
{
2668
    int mem_index = (idx >> 2) - 1;
2669
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2670
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2671
    tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2672
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2673
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2674
}
2675

    
2676
static inline void gen_op_movo(int d_offset, int s_offset)
2677
{
2678
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2679
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2680
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2681
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
2682
}
2683

    
2684
static inline void gen_op_movq(int d_offset, int s_offset)
2685
{
2686
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2687
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2688
}
2689

    
2690
static inline void gen_op_movl(int d_offset, int s_offset)
2691
{
2692
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2693
    tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
2694
}
2695

    
2696
static inline void gen_op_movq_env_0(int d_offset)
2697
{
2698
    tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2699
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2700
}
2701

    
2702
#define SSE_SPECIAL ((void *)1)
2703
#define SSE_DUMMY ((void *)2)
2704

    
2705
#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2706
#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2707
                     helper_ ## x ## ss, helper_ ## x ## sd, }
2708

    
2709
static void *sse_op_table1[256][4] = {
2710
    /* 3DNow! extensions */
2711
    [0x0e] = { SSE_DUMMY }, /* femms */
2712
    [0x0f] = { SSE_DUMMY }, /* pf... */
2713
    /* pure SSE operations */
2714
    [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2715
    [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2716
    [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2717
    [0x13] = { SSE_SPECIAL, SSE_SPECIAL },  /* movlps, movlpd */
2718
    [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2719
    [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2720
    [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd, movshdup */
2721
    [0x17] = { SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd */
2722

    
2723
    [0x28] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2724
    [0x29] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2725
    [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2726
    [0x2b] = { SSE_SPECIAL, SSE_SPECIAL },  /* movntps, movntpd */
2727
    [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2728
    [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2729
    [0x2e] = { helper_ucomiss, helper_ucomisd },
2730
    [0x2f] = { helper_comiss, helper_comisd },
2731
    [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2732
    [0x51] = SSE_FOP(sqrt),
2733
    [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
2734
    [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
2735
    [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
2736
    [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
2737
    [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
2738
    [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
2739
    [0x58] = SSE_FOP(add),
2740
    [0x59] = SSE_FOP(mul),
2741
    [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
2742
               helper_cvtss2sd, helper_cvtsd2ss },
2743
    [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
2744
    [0x5c] = SSE_FOP(sub),
2745
    [0x5d] = SSE_FOP(min),
2746
    [0x5e] = SSE_FOP(div),
2747
    [0x5f] = SSE_FOP(max),
2748

    
2749
    [0xc2] = SSE_FOP(cmpeq),
2750
    [0xc6] = { helper_shufps, helper_shufpd },
2751

    
2752
    /* MMX ops and their SSE extensions */
2753
    [0x60] = MMX_OP2(punpcklbw),
2754
    [0x61] = MMX_OP2(punpcklwd),
2755
    [0x62] = MMX_OP2(punpckldq),
2756
    [0x63] = MMX_OP2(packsswb),
2757
    [0x64] = MMX_OP2(pcmpgtb),
2758
    [0x65] = MMX_OP2(pcmpgtw),
2759
    [0x66] = MMX_OP2(pcmpgtl),
2760
    [0x67] = MMX_OP2(packuswb),
2761
    [0x68] = MMX_OP2(punpckhbw),
2762
    [0x69] = MMX_OP2(punpckhwd),
2763
    [0x6a] = MMX_OP2(punpckhdq),
2764
    [0x6b] = MMX_OP2(packssdw),
2765
    [0x6c] = { NULL, helper_punpcklqdq_xmm },
2766
    [0x6d] = { NULL, helper_punpckhqdq_xmm },
2767
    [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2768
    [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2769
    [0x70] = { helper_pshufw_mmx,
2770
               helper_pshufd_xmm,
2771
               helper_pshufhw_xmm,
2772
               helper_pshuflw_xmm },
2773
    [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2774
    [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2775
    [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2776
    [0x74] = MMX_OP2(pcmpeqb),
2777
    [0x75] = MMX_OP2(pcmpeqw),
2778
    [0x76] = MMX_OP2(pcmpeql),
2779
    [0x77] = { SSE_DUMMY }, /* emms */
2780
    [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
2781
    [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
2782
    [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2783
    [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2784
    [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2785
    [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2786
    [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
2787
    [0xd1] = MMX_OP2(psrlw),
2788
    [0xd2] = MMX_OP2(psrld),
2789
    [0xd3] = MMX_OP2(psrlq),
2790
    [0xd4] = MMX_OP2(paddq),
2791
    [0xd5] = MMX_OP2(pmullw),
2792
    [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2793
    [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2794
    [0xd8] = MMX_OP2(psubusb),
2795
    [0xd9] = MMX_OP2(psubusw),
2796
    [0xda] = MMX_OP2(pminub),
2797
    [0xdb] = MMX_OP2(pand),
2798
    [0xdc] = MMX_OP2(paddusb),
2799
    [0xdd] = MMX_OP2(paddusw),
2800
    [0xde] = MMX_OP2(pmaxub),
2801
    [0xdf] = MMX_OP2(pandn),
2802
    [0xe0] = MMX_OP2(pavgb),
2803
    [0xe1] = MMX_OP2(psraw),
2804
    [0xe2] = MMX_OP2(psrad),
2805
    [0xe3] = MMX_OP2(pavgw),
2806
    [0xe4] = MMX_OP2(pmulhuw),
2807
    [0xe5] = MMX_OP2(pmulhw),
2808
    [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
2809
    [0xe7] = { SSE_SPECIAL , SSE_SPECIAL },  /* movntq, movntq */
2810
    [0xe8] = MMX_OP2(psubsb),
2811
    [0xe9] = MMX_OP2(psubsw),
2812
    [0xea] = MMX_OP2(pminsw),
2813
    [0xeb] = MMX_OP2(por),
2814
    [0xec] = MMX_OP2(paddsb),
2815
    [0xed] = MMX_OP2(paddsw),
2816
    [0xee] = MMX_OP2(pmaxsw),
2817
    [0xef] = MMX_OP2(pxor),
2818
    [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2819
    [0xf1] = MMX_OP2(psllw),
2820
    [0xf2] = MMX_OP2(pslld),
2821
    [0xf3] = MMX_OP2(psllq),
2822
    [0xf4] = MMX_OP2(pmuludq),
2823
    [0xf5] = MMX_OP2(pmaddwd),
2824
    [0xf6] = MMX_OP2(psadbw),
2825
    [0xf7] = MMX_OP2(maskmov),
2826
    [0xf8] = MMX_OP2(psubb),
2827
    [0xf9] = MMX_OP2(psubw),
2828
    [0xfa] = MMX_OP2(psubl),
2829
    [0xfb] = MMX_OP2(psubq),
2830
    [0xfc] = MMX_OP2(paddb),
2831
    [0xfd] = MMX_OP2(paddw),
2832
    [0xfe] = MMX_OP2(paddl),
2833
};
2834

    
2835
static void *sse_op_table2[3 * 8][2] = {
2836
    [0 + 2] = MMX_OP2(psrlw),
2837
    [0 + 4] = MMX_OP2(psraw),
2838
    [0 + 6] = MMX_OP2(psllw),
2839
    [8 + 2] = MMX_OP2(psrld),
2840
    [8 + 4] = MMX_OP2(psrad),
2841
    [8 + 6] = MMX_OP2(pslld),
2842
    [16 + 2] = MMX_OP2(psrlq),
2843
    [16 + 3] = { NULL, helper_psrldq_xmm },
2844
    [16 + 6] = MMX_OP2(psllq),
2845
    [16 + 7] = { NULL, helper_pslldq_xmm },
2846
};
2847

    
2848
static void *sse_op_table3[4 * 3] = {
2849
    helper_cvtsi2ss,
2850
    helper_cvtsi2sd,
2851
    X86_64_ONLY(helper_cvtsq2ss),
2852
    X86_64_ONLY(helper_cvtsq2sd),
2853

    
2854
    helper_cvttss2si,
2855
    helper_cvttsd2si,
2856
    X86_64_ONLY(helper_cvttss2sq),
2857
    X86_64_ONLY(helper_cvttsd2sq),
2858

    
2859
    helper_cvtss2si,
2860
    helper_cvtsd2si,
2861
    X86_64_ONLY(helper_cvtss2sq),
2862
    X86_64_ONLY(helper_cvtsd2sq),
2863
};
2864

    
2865
static void *sse_op_table4[8][4] = {
2866
    SSE_FOP(cmpeq),
2867
    SSE_FOP(cmplt),
2868
    SSE_FOP(cmple),
2869
    SSE_FOP(cmpunord),
2870
    SSE_FOP(cmpneq),
2871
    SSE_FOP(cmpnlt),
2872
    SSE_FOP(cmpnle),
2873
    SSE_FOP(cmpord),
2874
};
2875

    
2876
static void *sse_op_table5[256] = {
2877
    [0x0c] = helper_pi2fw,
2878
    [0x0d] = helper_pi2fd,
2879
    [0x1c] = helper_pf2iw,
2880
    [0x1d] = helper_pf2id,
2881
    [0x8a] = helper_pfnacc,
2882
    [0x8e] = helper_pfpnacc,
2883
    [0x90] = helper_pfcmpge,
2884
    [0x94] = helper_pfmin,
2885
    [0x96] = helper_pfrcp,
2886
    [0x97] = helper_pfrsqrt,
2887
    [0x9a] = helper_pfsub,
2888
    [0x9e] = helper_pfadd,
2889
    [0xa0] = helper_pfcmpgt,
2890
    [0xa4] = helper_pfmax,
2891
    [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2892
    [0xa7] = helper_movq, /* pfrsqit1 */
2893
    [0xaa] = helper_pfsubr,
2894
    [0xae] = helper_pfacc,
2895
    [0xb0] = helper_pfcmpeq,
2896
    [0xb4] = helper_pfmul,
2897
    [0xb6] = helper_movq, /* pfrcpit2 */
2898
    [0xb7] = helper_pmulhrw_mmx,
2899
    [0xbb] = helper_pswapd,
2900
    [0xbf] = helper_pavgb_mmx /* pavgusb */
2901
};
2902

    
2903
static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2904
{
2905
    int b1, op1_offset, op2_offset, is_xmm, val, ot;
2906
    int modrm, mod, rm, reg, reg_addr, offset_addr;
2907
    void *sse_op2;
2908

    
2909
    b &= 0xff;
2910
    if (s->prefix & PREFIX_DATA)
2911
        b1 = 1;
2912
    else if (s->prefix & PREFIX_REPZ)
2913
        b1 = 2;
2914
    else if (s->prefix & PREFIX_REPNZ)
2915
        b1 = 3;
2916
    else
2917
        b1 = 0;
2918
    sse_op2 = sse_op_table1[b][b1];
2919
    if (!sse_op2)
2920
        goto illegal_op;
2921
    if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2922
        is_xmm = 1;
2923
    } else {
2924
        if (b1 == 0) {
2925
            /* MMX case */
2926
            is_xmm = 0;
2927
        } else {
2928
            is_xmm = 1;
2929
        }
2930
    }
2931
    /* simple MMX/SSE operation */
2932
    if (s->flags & HF_TS_MASK) {
2933
        gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2934
        return;
2935
    }
2936
    if (s->flags & HF_EM_MASK) {
2937
    illegal_op:
2938
        gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2939
        return;
2940
    }
2941
    if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2942
        goto illegal_op;
2943
    if (b == 0x0e) {
2944
        if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
2945
            goto illegal_op;
2946
        /* femms */
2947
        tcg_gen_helper_0_0(helper_emms);
2948
        return;
2949
    }
2950
    if (b == 0x77) {
2951
        /* emms */
2952
        tcg_gen_helper_0_0(helper_emms);
2953
        return;
2954
    }
2955
    /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2956
       the static cpu state) */
2957
    if (!is_xmm) {
2958
        tcg_gen_helper_0_0(helper_enter_mmx);
2959
    }
2960

    
2961
    modrm = ldub_code(s->pc++);
2962
    reg = ((modrm >> 3) & 7);
2963
    if (is_xmm)
2964
        reg |= rex_r;
2965
    mod = (modrm >> 6) & 3;
2966
    if (sse_op2 == SSE_SPECIAL) {
2967
        b |= (b1 << 8);
2968
        switch(b) {
2969
        case 0x0e7: /* movntq */
2970
            if (mod == 3)
2971
                goto illegal_op;
2972
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2973
            gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2974
            break;
2975
        case 0x1e7: /* movntdq */
2976
        case 0x02b: /* movntps */
2977
        case 0x12b: /* movntps */
2978
        case 0x3f0: /* lddqu */
2979
            if (mod == 3)
2980
                goto illegal_op;
2981
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2982
            gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2983
            break;
2984
        case 0x6e: /* movd mm, ea */
2985
#ifdef TARGET_X86_64
2986
            if (s->dflag == 2) {
2987
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2988
                tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
2989
            } else
2990
#endif
2991
            {
2992
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2993
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
2994
                                 offsetof(CPUX86State,fpregs[reg].mmx));
2995
                tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
2996
            }
2997
            break;
2998
        case 0x16e: /* movd xmm, ea */
2999
#ifdef TARGET_X86_64
3000
            if (s->dflag == 2) {
3001
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3002
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3003
                                 offsetof(CPUX86State,xmm_regs[reg]));
3004
                tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3005
            } else
3006
#endif
3007
            {
3008
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3009
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3010
                                 offsetof(CPUX86State,xmm_regs[reg]));
3011
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3012
                tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3013
            }
3014
            break;
3015
        case 0x6f: /* movq mm, ea */
3016
            if (mod != 3) {
3017
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3018
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3019
            } else {
3020
                rm = (modrm & 7);
3021
                tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3022
                               offsetof(CPUX86State,fpregs[rm].mmx));
3023
                tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3024
                               offsetof(CPUX86State,fpregs[reg].mmx));
3025
            }
3026
            break;
3027
        case 0x010: /* movups */
3028
        case 0x110: /* movupd */
3029
        case 0x028: /* movaps */
3030
        case 0x128: /* movapd */
3031
        case 0x16f: /* movdqa xmm, ea */
3032
        case 0x26f: /* movdqu xmm, ea */
3033
            if (mod != 3) {
3034
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3035
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3036
            } else {
3037
                rm = (modrm & 7) | REX_B(s);
3038
                gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3039
                            offsetof(CPUX86State,xmm_regs[rm]));
3040
            }
3041
            break;
3042
        case 0x210: /* movss xmm, ea */
3043
            if (mod != 3) {
3044
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3045
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3046
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3047
                gen_op_movl_T0_0();
3048
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3049
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3050
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3051
            } else {
3052
                rm = (modrm & 7) | REX_B(s);
3053
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3054
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3055
            }
3056
            break;
3057
        case 0x310: /* movsd xmm, ea */
3058
            if (mod != 3) {
3059
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3060
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3061
                gen_op_movl_T0_0();
3062
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3063
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3064
            } else {
3065
                rm = (modrm & 7) | REX_B(s);
3066
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3067
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3068
            }
3069
            break;
3070
        case 0x012: /* movlps */
3071
        case 0x112: /* movlpd */
3072
            if (mod != 3) {
3073
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3074
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3075
            } else {
3076
                /* movhlps */
3077
                rm = (modrm & 7) | REX_B(s);
3078
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3079
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3080
            }
3081
            break;
3082
        case 0x212: /* movsldup */
3083
            if (mod != 3) {
3084
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3085
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3086
            } else {
3087
                rm = (modrm & 7) | REX_B(s);
3088
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3089
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3090
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3091
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3092
            }
3093
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3094
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3095
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3096
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3097
            break;
3098
        case 0x312: /* movddup */
3099
            if (mod != 3) {
3100
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3101
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3102
            } else {
3103
                rm = (modrm & 7) | REX_B(s);
3104
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3105
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3106
            }
3107
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3108
                        offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3109
            break;
3110
        case 0x016: /* movhps */
3111
        case 0x116: /* movhpd */
3112
            if (mod != 3) {
3113
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3114
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3115
            } else {
3116
                /* movlhps */
3117
                rm = (modrm & 7) | REX_B(s);
3118
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3119
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3120
            }
3121
            break;
3122
        case 0x216: /* movshdup */
3123
            if (mod != 3) {
3124
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3125
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3126
            } else {
3127
                rm = (modrm & 7) | REX_B(s);
3128
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3129
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3130
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3131
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3132
            }
3133
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3134
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3135
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3136
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3137
            break;
3138
        case 0x7e: /* movd ea, mm */
3139
#ifdef TARGET_X86_64
3140
            if (s->dflag == 2) {
3141
                tcg_gen_ld_i64(cpu_T[0], cpu_env, 
3142
                               offsetof(CPUX86State,fpregs[reg].mmx));
3143
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3144
            } else
3145
#endif
3146
            {
3147
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
3148
                                 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3149
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3150
            }
3151
            break;
3152
        case 0x17e: /* movd ea, xmm */
3153
#ifdef TARGET_X86_64
3154
            if (s->dflag == 2) {
3155
                tcg_gen_ld_i64(cpu_T[0], cpu_env, 
3156
                               offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3157
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3158
            } else
3159
#endif
3160
            {
3161
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
3162
                                 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3163
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3164
            }
3165
            break;
3166
        case 0x27e: /* movq xmm, ea */
3167
            if (mod != 3) {
3168
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3169
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3170
            } else {
3171
                rm = (modrm & 7) | REX_B(s);
3172
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3173
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3174
            }
3175
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3176
            break;
3177
        case 0x7f: /* movq ea, mm */
3178
            if (mod != 3) {
3179
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3180
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3181
            } else {
3182
                rm = (modrm & 7);
3183
                gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3184
                            offsetof(CPUX86State,fpregs[reg].mmx));
3185
            }
3186
            break;
3187
        case 0x011: /* movups */
3188
        case 0x111: /* movupd */
3189
        case 0x029: /* movaps */
3190
        case 0x129: /* movapd */
3191
        case 0x17f: /* movdqa ea, xmm */
3192
        case 0x27f: /* movdqu ea, xmm */
3193
            if (mod != 3) {
3194
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3195
                gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3196
            } else {
3197
                rm = (modrm & 7) | REX_B(s);
3198
                gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3199
                            offsetof(CPUX86State,xmm_regs[reg]));
3200
            }
3201
            break;
3202
        case 0x211: /* movss ea, xmm */
3203
            if (mod != 3) {
3204
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3205
                gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3206
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
3207
            } else {
3208
                rm = (modrm & 7) | REX_B(s);
3209
                gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3210
                            offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3211
            }
3212
            break;
3213
        case 0x311: /* movsd ea, xmm */
3214
            if (mod != 3) {
3215
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3216
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3217
            } else {
3218
                rm = (modrm & 7) | REX_B(s);
3219
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3220
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3221
            }
3222
            break;
3223
        case 0x013: /* movlps */
3224
        case 0x113: /* movlpd */
3225
            if (mod != 3) {
3226
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3227
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3228
            } else {
3229
                goto illegal_op;
3230
            }
3231
            break;
3232
        case 0x017: /* movhps */
3233
        case 0x117: /* movhpd */
3234
            if (mod != 3) {
3235
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3236
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3237
            } else {
3238
                goto illegal_op;
3239
            }
3240
            break;
3241
        case 0x71: /* shift mm, im */
3242
        case 0x72:
3243
        case 0x73:
3244
        case 0x171: /* shift xmm, im */
3245
        case 0x172:
3246
        case 0x173:
3247
            val = ldub_code(s->pc++);
3248
            if (is_xmm) {
3249
                gen_op_movl_T0_im(val);
3250
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3251
                gen_op_movl_T0_0();
3252
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3253
                op1_offset = offsetof(CPUX86State,xmm_t0);
3254
            } else {
3255
                gen_op_movl_T0_im(val);
3256
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3257
                gen_op_movl_T0_0();
3258
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3259
                op1_offset = offsetof(CPUX86State,mmx_t0);
3260
            }
3261
            sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3262
            if (!sse_op2)
3263
                goto illegal_op;
3264
            if (is_xmm) {
3265
                rm = (modrm & 7) | REX_B(s);
3266
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3267
            } else {
3268
                rm = (modrm & 7);
3269
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3270
            }
3271
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3272
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3273
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3274
            break;
3275
        case 0x050: /* movmskps */
3276
            rm = (modrm & 7) | REX_B(s);
3277
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3278
                             offsetof(CPUX86State,xmm_regs[rm]));
3279
            tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3280
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3281
            gen_op_mov_reg_T0(OT_LONG, reg);
3282
            break;
3283
        case 0x150: /* movmskpd */
3284
            rm = (modrm & 7) | REX_B(s);
3285
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3286
                             offsetof(CPUX86State,xmm_regs[rm]));
3287
            tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3288
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3289
            gen_op_mov_reg_T0(OT_LONG, reg);
3290
            break;
3291
        case 0x02a: /* cvtpi2ps */
3292
        case 0x12a: /* cvtpi2pd */
3293
            tcg_gen_helper_0_0(helper_enter_mmx);
3294
            if (mod != 3) {
3295
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3296
                op2_offset = offsetof(CPUX86State,mmx_t0);
3297
                gen_ldq_env_A0(s->mem_index, op2_offset);
3298
            } else {
3299
                rm = (modrm & 7);
3300
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3301
            }
3302
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3303
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3304
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3305
            switch(b >> 8) {
3306
            case 0x0:
3307
                tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3308
                break;
3309
            default:
3310
            case 0x1:
3311
                tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3312
                break;
3313
            }
3314
            break;
3315
        case 0x22a: /* cvtsi2ss */
3316
        case 0x32a: /* cvtsi2sd */
3317
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3318
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3319
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3320
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3321
            sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3322
            tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3323
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3324
            break;
3325
        case 0x02c: /* cvttps2pi */
3326
        case 0x12c: /* cvttpd2pi */
3327
        case 0x02d: /* cvtps2pi */
3328
        case 0x12d: /* cvtpd2pi */
3329
            tcg_gen_helper_0_0(helper_enter_mmx);
3330
            if (mod != 3) {
3331
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3332
                op2_offset = offsetof(CPUX86State,xmm_t0);
3333
                gen_ldo_env_A0(s->mem_index, op2_offset);
3334
            } else {
3335
                rm = (modrm & 7) | REX_B(s);
3336
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3337
            }
3338
            op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3339
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3340
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3341
            switch(b) {
3342
            case 0x02c:
3343
                tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3344
                break;
3345
            case 0x12c:
3346
                tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3347
                break;
3348
            case 0x02d:
3349
                tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3350
                break;
3351
            case 0x12d:
3352
                tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3353
                break;
3354
            }
3355
            break;
3356
        case 0x22c: /* cvttss2si */
3357
        case 0x32c: /* cvttsd2si */
3358
        case 0x22d: /* cvtss2si */
3359
        case 0x32d: /* cvtsd2si */
3360
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3361
            if (mod != 3) {
3362
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3363
                if ((b >> 8) & 1) {
3364
                    gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3365
                } else {
3366
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3367
                    gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3368
                }
3369
                op2_offset = offsetof(CPUX86State,xmm_t0);
3370
            } else {
3371
                rm = (modrm & 7) | REX_B(s);
3372
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3373
            }
3374
            sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3375
                                    (b & 1) * 4];
3376
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3377
            if (ot == OT_LONG) {
3378
                tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
3379
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3380
            } else {
3381
                tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3382
            }
3383
            gen_op_mov_reg_T0(ot, reg);
3384
            break;
3385
        case 0xc4: /* pinsrw */
3386
        case 0x1c4:
3387
            s->rip_offset = 1;
3388
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3389
            val = ldub_code(s->pc++);
3390
            if (b1) {
3391
                val &= 7;
3392
                tcg_gen_st16_tl(cpu_T[0], cpu_env,
3393
                                offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3394
            } else {
3395
                val &= 3;
3396
                tcg_gen_st16_tl(cpu_T[0], cpu_env,
3397
                                offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3398
            }
3399
            break;
3400
        case 0xc5: /* pextrw */
3401
        case 0x1c5:
3402
            if (mod != 3)
3403
                goto illegal_op;
3404
            val = ldub_code(s->pc++);
3405
            if (b1) {
3406
                val &= 7;
3407
                rm = (modrm & 7) | REX_B(s);
3408
                tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3409
                                 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3410
            } else {
3411
                val &= 3;
3412
                rm = (modrm & 7);
3413
                tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3414
                                offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3415
            }
3416
            reg = ((modrm >> 3) & 7) | rex_r;
3417
            gen_op_mov_reg_T0(OT_LONG, reg);
3418
            break;
3419
        case 0x1d6: /* movq ea, xmm */
3420
            if (mod != 3) {
3421
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3422
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3423
            } else {
3424
                rm = (modrm & 7) | REX_B(s);
3425
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3426
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3427
                gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3428
            }
3429
            break;
3430
        case 0x2d6: /* movq2dq */
3431
            tcg_gen_helper_0_0(helper_enter_mmx);
3432
            rm = (modrm & 7);
3433
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3434
                        offsetof(CPUX86State,fpregs[rm].mmx));
3435
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3436
            break;
3437
        case 0x3d6: /* movdq2q */
3438
            tcg_gen_helper_0_0(helper_enter_mmx);
3439
            rm = (modrm & 7) | REX_B(s);
3440
            gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3441
                        offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3442
            break;
3443
        case 0xd7: /* pmovmskb */
3444
        case 0x1d7:
3445
            if (mod != 3)
3446
                goto illegal_op;
3447
            if (b1) {
3448
                rm = (modrm & 7) | REX_B(s);
3449
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3450
                tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
3451
            } else {
3452
                rm = (modrm & 7);
3453
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3454
                tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
3455
            }
3456
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3457
            reg = ((modrm >> 3) & 7) | rex_r;
3458
            gen_op_mov_reg_T0(OT_LONG, reg);
3459
            break;
3460
        default:
3461
            goto illegal_op;
3462
        }
3463
    } else {
3464
        /* generic MMX or SSE operation */
3465
        switch(b) {
3466
        case 0x70: /* pshufx insn */
3467
        case 0xc6: /* pshufx insn */
3468
        case 0xc2: /* compare insns */
3469
            s->rip_offset = 1;
3470
            break;
3471
        default:
3472
            break;
3473
        }
3474
        if (is_xmm) {
3475
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3476
            if (mod != 3) {
3477
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3478
                op2_offset = offsetof(CPUX86State,xmm_t0);
3479
                if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3480
                                b == 0xc2)) {
3481
                    /* specific case for SSE single instructions */
3482
                    if (b1 == 2) {
3483
                        /* 32 bit access */
3484
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3485
                        gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3486
                    } else {
3487
                        /* 64 bit access */
3488
                        gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3489
                    }
3490
                } else {
3491
                    gen_ldo_env_A0(s->mem_index, op2_offset);
3492
                }
3493
            } else {
3494
                rm = (modrm & 7) | REX_B(s);
3495
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3496
            }
3497
        } else {
3498
            op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3499
            if (mod != 3) {
3500
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3501
                op2_offset = offsetof(CPUX86State,mmx_t0);
3502
                gen_ldq_env_A0(s->mem_index, op2_offset);
3503
            } else {
3504
                rm = (modrm & 7);
3505
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3506
            }
3507
        }
3508
        switch(b) {
3509
        case 0x0f: /* 3DNow! data insns */
3510
            if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3511
                goto illegal_op;
3512
            val = ldub_code(s->pc++);
3513
            sse_op2 = sse_op_table5[val];
3514
            if (!sse_op2)
3515
                goto illegal_op;
3516
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3517
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3518
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3519
            break;
3520
        case 0x70: /* pshufx insn */
3521
        case 0xc6: /* pshufx insn */
3522
            val = ldub_code(s->pc++);
3523
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3524
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3525
            tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3526
            break;
3527
        case 0xc2:
3528
            /* compare insns */
3529
            val = ldub_code(s->pc++);
3530
            if (val >= 8)
3531
                goto illegal_op;
3532
            sse_op2 = sse_op_table4[val][b1];
3533
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3534
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3535
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3536
            break;
3537
        case 0xf7:
3538
            /* maskmov : we must prepare A0 */
3539
            if (mod != 3)
3540
                goto illegal_op;
3541
#ifdef TARGET_X86_64
3542
            if (s->aflag == 2) {
3543
                gen_op_movq_A0_reg(R_EDI);
3544
            } else
3545
#endif
3546
            {
3547
                gen_op_movl_A0_reg(R_EDI);
3548
                if (s->aflag == 0)
3549
                    gen_op_andl_A0_ffff();
3550
            }
3551
            gen_add_A0_ds_seg(s);
3552

    
3553
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3554
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3555
            tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
3556
            break;
3557
        default:
3558
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3559
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3560
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3561
            break;
3562
        }
3563
        if (b == 0x2e || b == 0x2f) {
3564
            /* just to keep the EFLAGS optimization correct */
3565
            gen_op_com_dummy();
3566
            s->cc_op = CC_OP_EFLAGS;
3567
        }
3568
    }
3569
}
3570

    
3571
/* convert one instruction. s->is_jmp is set if the translation must
3572
   be stopped. Return the next pc value */
3573
static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3574
{
3575
    int b, prefixes, aflag, dflag;
3576
    int shift, ot;
3577
    int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3578
    target_ulong next_eip, tval;
3579
    int rex_w, rex_r;
3580

    
3581
    s->pc = pc_start;
3582
    prefixes = 0;
3583
    aflag = s->code32;
3584
    dflag = s->code32;
3585
    s->override = -1;
3586
    rex_w = -1;
3587
    rex_r = 0;
3588
#ifdef TARGET_X86_64
3589
    s->rex_x = 0;
3590
    s->rex_b = 0;
3591
    x86_64_hregs = 0;
3592
#endif
3593
    s->rip_offset = 0; /* for relative ip address */
3594
 next_byte:
3595
    b = ldub_code(s->pc);
3596
    s->pc++;
3597
    /* check prefixes */
3598
#ifdef TARGET_X86_64
3599
    if (CODE64(s)) {
3600
        switch (b) {
3601
        case 0xf3:
3602
            prefixes |= PREFIX_REPZ;
3603
            goto next_byte;
3604
        case 0xf2:
3605
            prefixes |= PREFIX_REPNZ;
3606
            goto next_byte;
3607
        case 0xf0:
3608
            prefixes |= PREFIX_LOCK;
3609
            goto next_byte;
3610
        case 0x2e:
3611
            s->override = R_CS;
3612
            goto next_byte;
3613
        case 0x36:
3614
            s->override = R_SS;
3615
            goto next_byte;
3616
        case 0x3e:
3617
            s->override = R_DS;
3618
            goto next_byte;
3619
        case 0x26:
3620
            s->override = R_ES;
3621
            goto next_byte;
3622
        case 0x64:
3623
            s->override = R_FS;
3624
            goto next_byte;
3625
        case 0x65:
3626
            s->override = R_GS;
3627
            goto next_byte;
3628
        case 0x66:
3629
            prefixes |= PREFIX_DATA;
3630
            goto next_byte;
3631
        case 0x67:
3632
            prefixes |= PREFIX_ADR;
3633
            goto next_byte;
3634
        case 0x40 ... 0x4f:
3635
            /* REX prefix */
3636
            rex_w = (b >> 3) & 1;
3637
            rex_r = (b & 0x4) << 1;
3638
            s->rex_x = (b & 0x2) << 2;
3639
            REX_B(s) = (b & 0x1) << 3;
3640
            x86_64_hregs = 1; /* select uniform byte register addressing */
3641
            goto next_byte;
3642
        }
3643
        if (rex_w == 1) {
3644
            /* 0x66 is ignored if rex.w is set */
3645
            dflag = 2;
3646
        } else {
3647
            if (prefixes & PREFIX_DATA)
3648
                dflag ^= 1;
3649
        }
3650
        if (!(prefixes & PREFIX_ADR))
3651
            aflag = 2;
3652
    } else
3653
#endif
3654
    {
3655
        switch (b) {
3656
        case 0xf3:
3657
            prefixes |= PREFIX_REPZ;
3658
            goto next_byte;
3659
        case 0xf2:
3660
            prefixes |= PREFIX_REPNZ;
3661
            goto next_byte;
3662
        case 0xf0:
3663
            prefixes |= PREFIX_LOCK;
3664
            goto next_byte;
3665
        case 0x2e:
3666
            s->override = R_CS;
3667
            goto next_byte;
3668
        case 0x36:
3669
            s->override = R_SS;
3670
            goto next_byte;
3671
        case 0x3e:
3672
            s->override = R_DS;
3673
            goto next_byte;
3674
        case 0x26:
3675
            s->override = R_ES;
3676
            goto next_byte;
3677
        case 0x64:
3678
            s->override = R_FS;
3679
            goto next_byte;
3680
        case 0x65:
3681
            s->override = R_GS;
3682
            goto next_byte;
3683
        case 0x66:
3684
            prefixes |= PREFIX_DATA;
3685
            goto next_byte;
3686
        case 0x67:
3687
            prefixes |= PREFIX_ADR;
3688
            goto next_byte;
3689
        }
3690
        if (prefixes & PREFIX_DATA)
3691
            dflag ^= 1;
3692
        if (prefixes & PREFIX_ADR)
3693
            aflag ^= 1;
3694
    }
3695

    
3696
    s->prefix = prefixes;
3697
    s->aflag = aflag;
3698
    s->dflag = dflag;
3699

    
3700
    /* lock generation */
3701
    if (prefixes & PREFIX_LOCK)
3702
        tcg_gen_helper_0_0(helper_lock);
3703

    
3704
    /* now check op code */
3705
 reswitch:
3706
    switch(b) {
3707
    case 0x0f:
3708
        /**************************/
3709
        /* extended op code */
3710
        b = ldub_code(s->pc++) | 0x100;
3711
        goto reswitch;
3712

    
3713
        /**************************/
3714
        /* arith & logic */
3715
    case 0x00 ... 0x05:
3716
    case 0x08 ... 0x0d:
3717
    case 0x10 ... 0x15:
3718
    case 0x18 ... 0x1d:
3719
    case 0x20 ... 0x25:
3720
    case 0x28 ... 0x2d:
3721
    case 0x30 ... 0x35:
3722
    case 0x38 ... 0x3d:
3723
        {
3724
            int op, f, val;
3725
            op = (b >> 3) & 7;
3726
            f = (b >> 1) & 3;
3727

    
3728
            if ((b & 1) == 0)
3729
                ot = OT_BYTE;
3730
            else
3731
                ot = dflag + OT_WORD;
3732

    
3733
            switch(f) {
3734
            case 0: /* OP Ev, Gv */
3735
                modrm = ldub_code(s->pc++);
3736
                reg = ((modrm >> 3) & 7) | rex_r;
3737
                mod = (modrm >> 6) & 3;
3738
                rm = (modrm & 7) | REX_B(s);
3739
                if (mod != 3) {
3740
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3741
                    opreg = OR_TMP0;
3742
                } else if (op == OP_XORL && rm == reg) {
3743
                xor_zero:
3744
                    /* xor reg, reg optimisation */
3745
                    gen_op_movl_T0_0();
3746
                    s->cc_op = CC_OP_LOGICB + ot;
3747
                    gen_op_mov_reg_T0(ot, reg);
3748
                    gen_op_update1_cc();
3749
                    break;
3750
                } else {
3751
                    opreg = rm;
3752
                }
3753
                gen_op_mov_TN_reg(ot, 1, reg);
3754
                gen_op(s, op, ot, opreg);
3755
                break;
3756
            case 1: /* OP Gv, Ev */
3757
                modrm = ldub_code(s->pc++);
3758
                mod = (modrm >> 6) & 3;
3759
                reg = ((modrm >> 3) & 7) | rex_r;
3760
                rm = (modrm & 7) | REX_B(s);
3761
                if (mod != 3) {
3762
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3763
                    gen_op_ld_T1_A0(ot + s->mem_index);
3764
                } else if (op == OP_XORL && rm == reg) {
3765
                    goto xor_zero;
3766
                } else {
3767
                    gen_op_mov_TN_reg(ot, 1, rm);
3768
                }
3769
                gen_op(s, op, ot, reg);
3770
                break;
3771
            case 2: /* OP A, Iv */
3772
                val = insn_get(s, ot);
3773
                gen_op_movl_T1_im(val);
3774
                gen_op(s, op, ot, OR_EAX);
3775
                break;
3776
            }
3777
        }
3778
        break;
3779

    
3780
    case 0x80: /* GRP1 */
3781
    case 0x81:
3782
    case 0x82:
3783
    case 0x83:
3784
        {
3785
            int val;
3786

    
3787
            if ((b & 1) == 0)
3788
                ot = OT_BYTE;
3789
            else
3790
                ot = dflag + OT_WORD;
3791

    
3792
            modrm = ldub_code(s->pc++);
3793
            mod = (modrm >> 6) & 3;
3794
            rm = (modrm & 7) | REX_B(s);
3795
            op = (modrm >> 3) & 7;
3796

    
3797
            if (mod != 3) {
3798
                if (b == 0x83)
3799
                    s->rip_offset = 1;
3800
                else
3801
                    s->rip_offset = insn_const_size(ot);
3802
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3803
                opreg = OR_TMP0;
3804
            } else {
3805
                opreg = rm;
3806
            }
3807

    
3808
            switch(b) {
3809
            default:
3810
            case 0x80:
3811
            case 0x81:
3812
            case 0x82:
3813
                val = insn_get(s, ot);
3814
                break;
3815
            case 0x83:
3816
                val = (int8_t)insn_get(s, OT_BYTE);
3817
                break;
3818
            }
3819
            gen_op_movl_T1_im(val);
3820
            gen_op(s, op, ot, opreg);
3821
        }
3822
        break;
3823

    
3824
        /**************************/
3825
        /* inc, dec, and other misc arith */
3826
    case 0x40 ... 0x47: /* inc Gv */
3827
        ot = dflag ? OT_LONG : OT_WORD;
3828
        gen_inc(s, ot, OR_EAX + (b & 7), 1);
3829
        break;
3830
    case 0x48 ... 0x4f: /* dec Gv */
3831
        ot = dflag ? OT_LONG : OT_WORD;
3832
        gen_inc(s, ot, OR_EAX + (b & 7), -1);
3833
        break;
3834
    case 0xf6: /* GRP3 */
3835
    case 0xf7:
3836
        if ((b & 1) == 0)
3837
            ot = OT_BYTE;
3838
        else
3839
            ot = dflag + OT_WORD;
3840

    
3841
        modrm = ldub_code(s->pc++);
3842
        mod = (modrm >> 6) & 3;
3843
        rm = (modrm & 7) | REX_B(s);
3844
        op = (modrm >> 3) & 7;
3845
        if (mod != 3) {
3846
            if (op == 0)
3847
                s->rip_offset = insn_const_size(ot);
3848
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3849
            gen_op_ld_T0_A0(ot + s->mem_index);
3850
        } else {
3851
            gen_op_mov_TN_reg(ot, 0, rm);
3852
        }
3853

    
3854
        switch(op) {
3855
        case 0: /* test */
3856
            val = insn_get(s, ot);
3857
            gen_op_movl_T1_im(val);
3858
            gen_op_testl_T0_T1_cc();
3859
            s->cc_op = CC_OP_LOGICB + ot;
3860
            break;
3861
        case 2: /* not */
3862
            tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
3863
            if (mod != 3) {
3864
                gen_op_st_T0_A0(ot + s->mem_index);
3865
            } else {
3866
                gen_op_mov_reg_T0(ot, rm);
3867
            }
3868
            break;
3869
        case 3: /* neg */
3870
            tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
3871
            if (mod != 3) {
3872
                gen_op_st_T0_A0(ot + s->mem_index);
3873
            } else {
3874
                gen_op_mov_reg_T0(ot, rm);
3875
            }
3876
            gen_op_update_neg_cc();
3877
            s->cc_op = CC_OP_SUBB + ot;
3878
            break;
3879
        case 4: /* mul */
3880
            switch(ot) {
3881
            case OT_BYTE:
3882
                gen_op_mulb_AL_T0();
3883
                s->cc_op = CC_OP_MULB;
3884
                break;
3885
            case OT_WORD:
3886
                gen_op_mulw_AX_T0();
3887
                s->cc_op = CC_OP_MULW;
3888
                break;
3889
            default:
3890
            case OT_LONG:
3891
                gen_op_mull_EAX_T0();
3892
                s->cc_op = CC_OP_MULL;
3893
                break;
3894
#ifdef TARGET_X86_64
3895
            case OT_QUAD:
3896
                gen_op_mulq_EAX_T0();
3897
                s->cc_op = CC_OP_MULQ;
3898
                break;
3899
#endif
3900
            }
3901
            break;
3902
        case 5: /* imul */
3903
            switch(ot) {
3904
            case OT_BYTE:
3905
                gen_op_imulb_AL_T0();
3906
                s->cc_op = CC_OP_MULB;
3907
                break;
3908
            case OT_WORD:
3909
                gen_op_imulw_AX_T0();
3910
                s->cc_op = CC_OP_MULW;
3911
                break;
3912
            default:
3913
            case OT_LONG:
3914
                gen_op_imull_EAX_T0();
3915
                s->cc_op = CC_OP_MULL;
3916
                break;
3917
#ifdef TARGET_X86_64
3918
            case OT_QUAD:
3919
                gen_op_imulq_EAX_T0();
3920
                s->cc_op = CC_OP_MULQ;
3921
                break;
3922
#endif
3923
            }
3924
            break;
3925
        case 6: /* div */
3926
            switch(ot) {
3927
            case OT_BYTE:
3928
                gen_jmp_im(pc_start - s->cs_base);
3929
                tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
3930
                break;
3931
            case OT_WORD:
3932
                gen_jmp_im(pc_start - s->cs_base);
3933
                tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
3934
                break;
3935
            default:
3936
            case OT_LONG:
3937
                gen_jmp_im(pc_start - s->cs_base);
3938
                tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
3939
                break;
3940
#ifdef TARGET_X86_64
3941
            case OT_QUAD:
3942
                gen_jmp_im(pc_start - s->cs_base);
3943
                tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
3944
                break;
3945
#endif
3946
            }
3947
            break;
3948
        case 7: /* idiv */
3949
            switch(ot) {
3950
            case OT_BYTE:
3951
                gen_jmp_im(pc_start - s->cs_base);
3952
                tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
3953
                break;
3954
            case OT_WORD:
3955
                gen_jmp_im(pc_start - s->cs_base);
3956
                tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
3957
                break;
3958
            default:
3959
            case OT_LONG:
3960
                gen_jmp_im(pc_start - s->cs_base);
3961
                tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
3962
                break;
3963
#ifdef TARGET_X86_64
3964
            case OT_QUAD:
3965
                gen_jmp_im(pc_start - s->cs_base);
3966
                tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
3967
                break;
3968
#endif
3969
            }
3970
            break;
3971
        default:
3972
            goto illegal_op;
3973
        }
3974
        break;
3975

    
3976
    case 0xfe: /* GRP4 */
3977
    case 0xff: /* GRP5 */
3978
        if ((b & 1) == 0)
3979
            ot = OT_BYTE;
3980
        else
3981
            ot = dflag + OT_WORD;
3982

    
3983
        modrm = ldub_code(s->pc++);
3984
        mod = (modrm >> 6) & 3;
3985
        rm = (modrm & 7) | REX_B(s);
3986
        op = (modrm >> 3) & 7;
3987
        if (op >= 2 && b == 0xfe) {
3988
            goto illegal_op;
3989
        }
3990
        if (CODE64(s)) {
3991
            if (op == 2 || op == 4) {
3992
                /* operand size for jumps is 64 bit */
3993
                ot = OT_QUAD;
3994
            } else if (op == 3 || op == 5) {
3995
                /* for call calls, the operand is 16 or 32 bit, even
3996
                   in long mode */
3997
                ot = dflag ? OT_LONG : OT_WORD;
3998
            } else if (op == 6) {
3999
                /* default push size is 64 bit */
4000
                ot = dflag ? OT_QUAD : OT_WORD;
4001
            }
4002
        }
4003
        if (mod != 3) {
4004
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4005
            if (op >= 2 && op != 3 && op != 5)
4006
                gen_op_ld_T0_A0(ot + s->mem_index);
4007
        } else {
4008
            gen_op_mov_TN_reg(ot, 0, rm);
4009
        }
4010

    
4011
        switch(op) {
4012
        case 0: /* inc Ev */
4013
            if (mod != 3)
4014
                opreg = OR_TMP0;
4015
            else
4016
                opreg = rm;
4017
            gen_inc(s, ot, opreg, 1);
4018
            break;
4019
        case 1: /* dec Ev */
4020
            if (mod != 3)
4021
                opreg = OR_TMP0;
4022
            else
4023
                opreg = rm;
4024
            gen_inc(s, ot, opreg, -1);
4025
            break;
4026
        case 2: /* call Ev */
4027
            /* XXX: optimize if memory (no 'and' is necessary) */
4028
            if (s->dflag == 0)
4029
                gen_op_andl_T0_ffff();
4030
            next_eip = s->pc - s->cs_base;
4031
            gen_movtl_T1_im(next_eip);
4032
            gen_push_T1(s);
4033
            gen_op_jmp_T0();
4034
            gen_eob(s);
4035
            break;
4036
        case 3: /* lcall Ev */
4037
            gen_op_ld_T1_A0(ot + s->mem_index);
4038
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4039
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4040
        do_lcall:
4041
            if (s->pe && !s->vm86) {
4042
                if (s->cc_op != CC_OP_DYNAMIC)
4043
                    gen_op_set_cc_op(s->cc_op);
4044
                gen_jmp_im(pc_start - s->cs_base);
4045
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4046
                tcg_gen_helper_0_4(helper_lcall_protected,
4047
                                   cpu_tmp2_i32, cpu_T[1],
4048
                                   tcg_const_i32(dflag), 
4049
                                   tcg_const_i32(s->pc - pc_start));
4050
            } else {
4051
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4052
                tcg_gen_helper_0_4(helper_lcall_real,
4053
                                   cpu_tmp2_i32, cpu_T[1],
4054
                                   tcg_const_i32(dflag), 
4055
                                   tcg_const_i32(s->pc - s->cs_base));
4056
            }
4057
            gen_eob(s);
4058
            break;
4059
        case 4: /* jmp Ev */
4060
            if (s->dflag == 0)
4061
                gen_op_andl_T0_ffff();
4062
            gen_op_jmp_T0();
4063
            gen_eob(s);
4064
            break;
4065
        case 5: /* ljmp Ev */
4066
            gen_op_ld_T1_A0(ot + s->mem_index);
4067
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4068
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4069
        do_ljmp:
4070
            if (s->pe && !s->vm86) {
4071
                if (s->cc_op != CC_OP_DYNAMIC)
4072
                    gen_op_set_cc_op(s->cc_op);
4073
                gen_jmp_im(pc_start - s->cs_base);
4074
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4075
                tcg_gen_helper_0_3(helper_ljmp_protected,
4076
                                   cpu_tmp2_i32,
4077
                                   cpu_T[1],
4078
                                   tcg_const_i32(s->pc - pc_start));
4079
            } else {
4080
                gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
4081
                gen_op_movl_T0_T1();
4082
                gen_op_jmp_T0();
4083
            }
4084
            gen_eob(s);
4085
            break;
4086
        case 6: /* push Ev */
4087
            gen_push_T0(s);
4088
            break;
4089
        default:
4090
            goto illegal_op;
4091
        }
4092
        break;
4093

    
4094
    case 0x84: /* test Ev, Gv */
4095
    case 0x85:
4096
        if ((b & 1) == 0)
4097
            ot = OT_BYTE;
4098
        else
4099
            ot = dflag + OT_WORD;
4100

    
4101
        modrm = ldub_code(s->pc++);
4102
        mod = (modrm >> 6) & 3;
4103
        rm = (modrm & 7) | REX_B(s);
4104
        reg = ((modrm >> 3) & 7) | rex_r;
4105

    
4106
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4107
        gen_op_mov_TN_reg(ot, 1, reg);
4108
        gen_op_testl_T0_T1_cc();
4109
        s->cc_op = CC_OP_LOGICB + ot;
4110
        break;
4111

    
4112
    case 0xa8: /* test eAX, Iv */
4113
    case 0xa9:
4114
        if ((b & 1) == 0)
4115
            ot = OT_BYTE;
4116
        else
4117
            ot = dflag + OT_WORD;
4118
        val = insn_get(s, ot);
4119

    
4120
        gen_op_mov_TN_reg(ot, 0, OR_EAX);
4121
        gen_op_movl_T1_im(val);
4122
        gen_op_testl_T0_T1_cc();
4123
        s->cc_op = CC_OP_LOGICB + ot;
4124
        break;
4125

    
4126
    case 0x98: /* CWDE/CBW */
4127
#ifdef TARGET_X86_64
4128
        if (dflag == 2) {
4129
            gen_op_movslq_RAX_EAX();
4130
        } else
4131
#endif
4132
        if (dflag == 1)
4133
            gen_op_movswl_EAX_AX();
4134
        else
4135
            gen_op_movsbw_AX_AL();
4136
        break;
4137
    case 0x99: /* CDQ/CWD */
4138
#ifdef TARGET_X86_64
4139
        if (dflag == 2) {
4140
            gen_op_movsqo_RDX_RAX();
4141
        } else
4142
#endif
4143
        if (dflag == 1)
4144
            gen_op_movslq_EDX_EAX();
4145
        else
4146
            gen_op_movswl_DX_AX();
4147
        break;
4148
    case 0x1af: /* imul Gv, Ev */
4149
    case 0x69: /* imul Gv, Ev, I */
4150
    case 0x6b:
4151
        ot = dflag + OT_WORD;
4152
        modrm = ldub_code(s->pc++);
4153
        reg = ((modrm >> 3) & 7) | rex_r;
4154
        if (b == 0x69)
4155
            s->rip_offset = insn_const_size(ot);
4156
        else if (b == 0x6b)
4157
            s->rip_offset = 1;
4158
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4159
        if (b == 0x69) {
4160
            val = insn_get(s, ot);
4161
            gen_op_movl_T1_im(val);
4162
        } else if (b == 0x6b) {
4163
            val = (int8_t)insn_get(s, OT_BYTE);
4164
            gen_op_movl_T1_im(val);
4165
        } else {
4166
            gen_op_mov_TN_reg(ot, 1, reg);
4167
        }
4168

    
4169
#ifdef TARGET_X86_64
4170
        if (ot == OT_QUAD) {
4171
            gen_op_imulq_T0_T1();
4172
        } else
4173
#endif
4174
        if (ot == OT_LONG) {
4175
            gen_op_imull_T0_T1();
4176
        } else {
4177
            gen_op_imulw_T0_T1();
4178
        }
4179
        gen_op_mov_reg_T0(ot, reg);
4180
        s->cc_op = CC_OP_MULB + ot;
4181
        break;
4182
    case 0x1c0:
4183
    case 0x1c1: /* xadd Ev, Gv */
4184
        if ((b & 1) == 0)
4185
            ot = OT_BYTE;
4186
        else
4187
            ot = dflag + OT_WORD;
4188
        modrm = ldub_code(s->pc++);
4189
        reg = ((modrm >> 3) & 7) | rex_r;
4190
        mod = (modrm >> 6) & 3;
4191
        if (mod == 3) {
4192
            rm = (modrm & 7) | REX_B(s);
4193
            gen_op_mov_TN_reg(ot, 0, reg);
4194
            gen_op_mov_TN_reg(ot, 1, rm);
4195
            gen_op_addl_T0_T1();
4196
            gen_op_mov_reg_T1(ot, reg);
4197
            gen_op_mov_reg_T0(ot, rm);
4198
        } else {
4199
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4200
            gen_op_mov_TN_reg(ot, 0, reg);
4201
            gen_op_ld_T1_A0(ot + s->mem_index);
4202
            gen_op_addl_T0_T1();
4203
            gen_op_st_T0_A0(ot + s->mem_index);
4204
            gen_op_mov_reg_T1(ot, reg);
4205
        }
4206
        gen_op_update2_cc();
4207
        s->cc_op = CC_OP_ADDB + ot;
4208
        break;
4209
    case 0x1b0:
4210
    case 0x1b1: /* cmpxchg Ev, Gv */
4211
        {
4212
            int label1;
4213

    
4214
            if ((b & 1) == 0)
4215
                ot = OT_BYTE;
4216
            else
4217
                ot = dflag + OT_WORD;
4218
            modrm = ldub_code(s->pc++);
4219
            reg = ((modrm >> 3) & 7) | rex_r;
4220
            mod = (modrm >> 6) & 3;
4221
            gen_op_mov_TN_reg(ot, 1, reg);
4222
            if (mod == 3) {
4223
                rm = (modrm & 7) | REX_B(s);
4224
                gen_op_mov_TN_reg(ot, 0, rm);
4225
            } else {
4226
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4227
                gen_op_ld_T0_A0(ot + s->mem_index);
4228
                rm = 0; /* avoid warning */
4229
            }
4230
            label1 = gen_new_label();
4231
            tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_EAX]));
4232
            tcg_gen_sub_tl(cpu_T3, cpu_T3, cpu_T[0]);
4233
            gen_extu(ot, cpu_T3);
4234
            tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
4235
            tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
4236
            gen_op_mov_reg_T0(ot, R_EAX);
4237
            gen_set_label(label1);
4238
            if (mod == 3) {
4239
                gen_op_mov_reg_T1(ot, rm);
4240
            } else {
4241
                gen_op_st_T1_A0(ot + s->mem_index);
4242
            }
4243
            tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4244
            tcg_gen_mov_tl(cpu_cc_dst, cpu_T3);
4245
            s->cc_op = CC_OP_SUBB + ot;
4246
        }
4247
        break;
4248
    case 0x1c7: /* cmpxchg8b */
4249
        modrm = ldub_code(s->pc++);
4250
        mod = (modrm >> 6) & 3;
4251
        if ((mod == 3) || ((modrm & 0x38) != 0x8))
4252
            goto illegal_op;
4253
        gen_jmp_im(pc_start - s->cs_base);
4254
        if (s->cc_op != CC_OP_DYNAMIC)
4255
            gen_op_set_cc_op(s->cc_op);
4256
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4257
        gen_op_cmpxchg8b();
4258
        s->cc_op = CC_OP_EFLAGS;
4259
        break;
4260

    
4261
        /**************************/
4262
        /* push/pop */
4263
    case 0x50 ... 0x57: /* push */
4264
        gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
4265
        gen_push_T0(s);
4266
        break;
4267
    case 0x58 ... 0x5f: /* pop */
4268
        if (CODE64(s)) {
4269
            ot = dflag ? OT_QUAD : OT_WORD;
4270
        } else {
4271
            ot = dflag + OT_WORD;
4272
        }
4273
        gen_pop_T0(s);
4274
        /* NOTE: order is important for pop %sp */
4275
        gen_pop_update(s);
4276
        gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
4277
        break;
4278
    case 0x60: /* pusha */
4279
        if (CODE64(s))
4280
            goto illegal_op;
4281
        gen_pusha(s);
4282
        break;
4283
    case 0x61: /* popa */
4284
        if (CODE64(s))
4285
            goto illegal_op;
4286
        gen_popa(s);
4287
        break;
4288
    case 0x68: /* push Iv */
4289
    case 0x6a:
4290
        if (CODE64(s)) {
4291
            ot = dflag ? OT_QUAD : OT_WORD;
4292
        } else {
4293
            ot = dflag + OT_WORD;
4294
        }
4295
        if (b == 0x68)
4296
            val = insn_get(s, ot);
4297
        else
4298
            val = (int8_t)insn_get(s, OT_BYTE);
4299
        gen_op_movl_T0_im(val);
4300
        gen_push_T0(s);
4301
        break;
4302
    case 0x8f: /* pop Ev */
4303
        if (CODE64(s)) {
4304
            ot = dflag ? OT_QUAD : OT_WORD;
4305
        } else {
4306
            ot = dflag + OT_WORD;
4307
        }
4308
        modrm = ldub_code(s->pc++);
4309
        mod = (modrm >> 6) & 3;
4310
        gen_pop_T0(s);
4311
        if (mod == 3) {
4312
            /* NOTE: order is important for pop %sp */
4313
            gen_pop_update(s);
4314
            rm = (modrm & 7) | REX_B(s);
4315
            gen_op_mov_reg_T0(ot, rm);
4316
        } else {
4317
            /* NOTE: order is important too for MMU exceptions */
4318
            s->popl_esp_hack = 1 << ot;
4319
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4320
            s->popl_esp_hack = 0;
4321
            gen_pop_update(s);
4322
        }
4323
        break;
4324
    case 0xc8: /* enter */
4325
        {
4326
            int level;
4327
            val = lduw_code(s->pc);
4328
            s->pc += 2;
4329
            level = ldub_code(s->pc++);
4330
            gen_enter(s, val, level);
4331
        }
4332
        break;
4333
    case 0xc9: /* leave */
4334
        /* XXX: exception not precise (ESP is updated before potential exception) */
4335
        if (CODE64(s)) {
4336
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4337
            gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4338
        } else if (s->ss32) {
4339
            gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4340
            gen_op_mov_reg_T0(OT_LONG, R_ESP);
4341
        } else {
4342
            gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4343
            gen_op_mov_reg_T0(OT_WORD, R_ESP);
4344
        }
4345
        gen_pop_T0(s);
4346
        if (CODE64(s)) {
4347
            ot = dflag ? OT_QUAD : OT_WORD;
4348
        } else {
4349
            ot = dflag + OT_WORD;
4350
        }
4351
        gen_op_mov_reg_T0(ot, R_EBP);
4352
        gen_pop_update(s);
4353
        break;
4354
    case 0x06: /* push es */
4355
    case 0x0e: /* push cs */
4356
    case 0x16: /* push ss */
4357
    case 0x1e: /* push ds */
4358
        if (CODE64(s))
4359
            goto illegal_op;
4360
        gen_op_movl_T0_seg(b >> 3);
4361
        gen_push_T0(s);
4362
        break;
4363
    case 0x1a0: /* push fs */
4364
    case 0x1a8: /* push gs */
4365
        gen_op_movl_T0_seg((b >> 3) & 7);
4366
        gen_push_T0(s);
4367
        break;
4368
    case 0x07: /* pop es */
4369
    case 0x17: /* pop ss */
4370
    case 0x1f: /* pop ds */
4371
        if (CODE64(s))
4372
            goto illegal_op;
4373
        reg = b >> 3;
4374
        gen_pop_T0(s);
4375
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4376
        gen_pop_update(s);
4377
        if (reg == R_SS) {
4378
            /* if reg == SS, inhibit interrupts/trace. */
4379
            /* If several instructions disable interrupts, only the
4380
               _first_ does it */
4381
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4382
                tcg_gen_helper_0_0(helper_set_inhibit_irq);
4383
            s->tf = 0;
4384
        }
4385
        if (s->is_jmp) {
4386
            gen_jmp_im(s->pc - s->cs_base);
4387
            gen_eob(s);
4388
        }
4389
        break;
4390
    case 0x1a1: /* pop fs */
4391
    case 0x1a9: /* pop gs */
4392
        gen_pop_T0(s);
4393
        gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4394
        gen_pop_update(s);
4395
        if (s->is_jmp) {
4396
            gen_jmp_im(s->pc - s->cs_base);
4397
            gen_eob(s);
4398
        }
4399
        break;
4400

    
4401
        /**************************/
4402
        /* mov */
4403
    case 0x88:
4404
    case 0x89: /* mov Gv, Ev */
4405
        if ((b & 1) == 0)
4406
            ot = OT_BYTE;
4407
        else
4408
            ot = dflag + OT_WORD;
4409
        modrm = ldub_code(s->pc++);
4410
        reg = ((modrm >> 3) & 7) | rex_r;
4411

    
4412
        /* generate a generic store */
4413
        gen_ldst_modrm(s, modrm, ot, reg, 1);
4414
        break;
4415
    case 0xc6:
4416
    case 0xc7: /* mov Ev, Iv */
4417
        if ((b & 1) == 0)
4418
            ot = OT_BYTE;
4419
        else
4420
            ot = dflag + OT_WORD;
4421
        modrm = ldub_code(s->pc++);
4422
        mod = (modrm >> 6) & 3;
4423
        if (mod != 3) {
4424
            s->rip_offset = insn_const_size(ot);
4425
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4426
        }
4427
        val = insn_get(s, ot);
4428
        gen_op_movl_T0_im(val);
4429
        if (mod != 3)
4430
            gen_op_st_T0_A0(ot + s->mem_index);
4431
        else
4432
            gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4433
        break;
4434
    case 0x8a:
4435
    case 0x8b: /* mov Ev, Gv */
4436
        if ((b & 1) == 0)
4437
            ot = OT_BYTE;
4438
        else
4439
            ot = OT_WORD + dflag;
4440
        modrm = ldub_code(s->pc++);
4441
        reg = ((modrm >> 3) & 7) | rex_r;
4442

    
4443
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4444
        gen_op_mov_reg_T0(ot, reg);
4445
        break;
4446
    case 0x8e: /* mov seg, Gv */
4447
        modrm = ldub_code(s->pc++);
4448
        reg = (modrm >> 3) & 7;
4449
        if (reg >= 6 || reg == R_CS)
4450
            goto illegal_op;
4451
        gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4452
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4453
        if (reg == R_SS) {
4454
            /* if reg == SS, inhibit interrupts/trace */
4455
            /* If several instructions disable interrupts, only the
4456
               _first_ does it */
4457
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4458
                tcg_gen_helper_0_0(helper_set_inhibit_irq);
4459
            s->tf = 0;
4460
        }
4461
        if (s->is_jmp) {
4462
            gen_jmp_im(s->pc - s->cs_base);
4463
            gen_eob(s);
4464
        }
4465
        break;
4466
    case 0x8c: /* mov Gv, seg */
4467
        modrm = ldub_code(s->pc++);
4468
        reg = (modrm >> 3) & 7;
4469
        mod = (modrm >> 6) & 3;
4470
        if (reg >= 6)
4471
            goto illegal_op;
4472
        gen_op_movl_T0_seg(reg);
4473
        if (mod == 3)
4474
            ot = OT_WORD + dflag;
4475
        else
4476
            ot = OT_WORD;
4477
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4478
        break;
4479

    
4480
    case 0x1b6: /* movzbS Gv, Eb */
4481
    case 0x1b7: /* movzwS Gv, Eb */
4482
    case 0x1be: /* movsbS Gv, Eb */
4483
    case 0x1bf: /* movswS Gv, Eb */
4484
        {
4485
            int d_ot;
4486
            /* d_ot is the size of destination */
4487
            d_ot = dflag + OT_WORD;
4488
            /* ot is the size of source */
4489
            ot = (b & 1) + OT_BYTE;
4490
            modrm = ldub_code(s->pc++);
4491
            reg = ((modrm >> 3) & 7) | rex_r;
4492
            mod = (modrm >> 6) & 3;
4493
            rm = (modrm & 7) | REX_B(s);
4494

    
4495
            if (mod == 3) {
4496
                gen_op_mov_TN_reg(ot, 0, rm);
4497
                switch(ot | (b & 8)) {
4498
                case OT_BYTE:
4499
                    gen_op_movzbl_T0_T0();
4500
                    break;
4501
                case OT_BYTE | 8:
4502
                    gen_op_movsbl_T0_T0();
4503
                    break;
4504
                case OT_WORD:
4505
                    gen_op_movzwl_T0_T0();
4506
                    break;
4507
                default:
4508
                case OT_WORD | 8:
4509
                    gen_op_movswl_T0_T0();
4510
                    break;
4511
                }
4512
                gen_op_mov_reg_T0(d_ot, reg);
4513
            } else {
4514
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4515
                if (b & 8) {
4516
                    gen_op_lds_T0_A0(ot + s->mem_index);
4517
                } else {
4518
                    gen_op_ldu_T0_A0(ot + s->mem_index);
4519
                }
4520
                gen_op_mov_reg_T0(d_ot, reg);
4521
            }
4522
        }
4523
        break;
4524

    
4525
    case 0x8d: /* lea */
4526
        ot = dflag + OT_WORD;
4527
        modrm = ldub_code(s->pc++);
4528
        mod = (modrm >> 6) & 3;
4529
        if (mod == 3)
4530
            goto illegal_op;
4531
        reg = ((modrm >> 3) & 7) | rex_r;
4532
        /* we must ensure that no segment is added */
4533
        s->override = -1;
4534
        val = s->addseg;
4535
        s->addseg = 0;
4536
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4537
        s->addseg = val;
4538
        gen_op_mov_reg_A0(ot - OT_WORD, reg);
4539
        break;
4540

    
4541
    case 0xa0: /* mov EAX, Ov */
4542
    case 0xa1:
4543
    case 0xa2: /* mov Ov, EAX */
4544
    case 0xa3:
4545
        {
4546
            target_ulong offset_addr;
4547

    
4548
            if ((b & 1) == 0)
4549
                ot = OT_BYTE;
4550
            else
4551
                ot = dflag + OT_WORD;
4552
#ifdef TARGET_X86_64
4553
            if (s->aflag == 2) {
4554
                offset_addr = ldq_code(s->pc);
4555
                s->pc += 8;
4556
                gen_op_movq_A0_im(offset_addr);
4557
            } else
4558
#endif
4559
            {
4560
                if (s->aflag) {
4561
                    offset_addr = insn_get(s, OT_LONG);
4562
                } else {
4563
                    offset_addr = insn_get(s, OT_WORD);
4564
                }
4565
                gen_op_movl_A0_im(offset_addr);
4566
            }
4567
            gen_add_A0_ds_seg(s);
4568
            if ((b & 2) == 0) {
4569
                gen_op_ld_T0_A0(ot + s->mem_index);
4570
                gen_op_mov_reg_T0(ot, R_EAX);
4571
            } else {
4572
                gen_op_mov_TN_reg(ot, 0, R_EAX);
4573
                gen_op_st_T0_A0(ot + s->mem_index);
4574
            }
4575
        }
4576
        break;
4577
    case 0xd7: /* xlat */
4578
#ifdef TARGET_X86_64
4579
        if (s->aflag == 2) {
4580
            gen_op_movq_A0_reg(R_EBX);
4581
            gen_op_addq_A0_AL();
4582
        } else
4583
#endif
4584
        {
4585
            gen_op_movl_A0_reg(R_EBX);
4586
            gen_op_addl_A0_AL();
4587
            if (s->aflag == 0)
4588
                gen_op_andl_A0_ffff();
4589
        }
4590
        gen_add_A0_ds_seg(s);
4591
        gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4592
        gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4593
        break;
4594
    case 0xb0 ... 0xb7: /* mov R, Ib */
4595
        val = insn_get(s, OT_BYTE);
4596
        gen_op_movl_T0_im(val);
4597
        gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4598
        break;
4599
    case 0xb8 ... 0xbf: /* mov R, Iv */
4600
#ifdef TARGET_X86_64
4601
        if (dflag == 2) {
4602
            uint64_t tmp;
4603
            /* 64 bit case */
4604
            tmp = ldq_code(s->pc);
4605
            s->pc += 8;
4606
            reg = (b & 7) | REX_B(s);
4607
            gen_movtl_T0_im(tmp);
4608
            gen_op_mov_reg_T0(OT_QUAD, reg);
4609
        } else
4610
#endif
4611
        {
4612
            ot = dflag ? OT_LONG : OT_WORD;
4613
            val = insn_get(s, ot);
4614
            reg = (b & 7) | REX_B(s);
4615
            gen_op_movl_T0_im(val);
4616
            gen_op_mov_reg_T0(ot, reg);
4617
        }
4618
        break;
4619

    
4620
    case 0x91 ... 0x97: /* xchg R, EAX */
4621
        ot = dflag + OT_WORD;
4622
        reg = (b & 7) | REX_B(s);
4623
        rm = R_EAX;
4624
        goto do_xchg_reg;
4625
    case 0x86:
4626
    case 0x87: /* xchg Ev, Gv */
4627
        if ((b & 1) == 0)
4628
            ot = OT_BYTE;
4629
        else
4630
            ot = dflag + OT_WORD;
4631
        modrm = ldub_code(s->pc++);
4632
        reg = ((modrm >> 3) & 7) | rex_r;
4633
        mod = (modrm >> 6) & 3;
4634
        if (mod == 3) {
4635
            rm = (modrm & 7) | REX_B(s);
4636
        do_xchg_reg:
4637
            gen_op_mov_TN_reg(ot, 0, reg);
4638
            gen_op_mov_TN_reg(ot, 1, rm);
4639
            gen_op_mov_reg_T0(ot, rm);
4640
            gen_op_mov_reg_T1(ot, reg);
4641
        } else {
4642
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4643
            gen_op_mov_TN_reg(ot, 0, reg);
4644
            /* for xchg, lock is implicit */
4645
            if (!(prefixes & PREFIX_LOCK))
4646
                tcg_gen_helper_0_0(helper_lock);
4647
            gen_op_ld_T1_A0(ot + s->mem_index);
4648
            gen_op_st_T0_A0(ot + s->mem_index);
4649
            if (!(prefixes & PREFIX_LOCK))
4650
                tcg_gen_helper_0_0(helper_unlock);
4651
            gen_op_mov_reg_T1(ot, reg);
4652
        }
4653
        break;
4654
    case 0xc4: /* les Gv */
4655
        if (CODE64(s))
4656
            goto illegal_op;
4657
        op = R_ES;
4658
        goto do_lxx;
4659
    case 0xc5: /* lds Gv */
4660
        if (CODE64(s))
4661
            goto illegal_op;
4662
        op = R_DS;
4663
        goto do_lxx;
4664
    case 0x1b2: /* lss Gv */
4665
        op = R_SS;
4666
        goto do_lxx;
4667
    case 0x1b4: /* lfs Gv */
4668
        op = R_FS;
4669
        goto do_lxx;
4670
    case 0x1b5: /* lgs Gv */
4671
        op = R_GS;
4672
    do_lxx:
4673
        ot = dflag ? OT_LONG : OT_WORD;
4674
        modrm = ldub_code(s->pc++);
4675
        reg = ((modrm >> 3) & 7) | rex_r;
4676
        mod = (modrm >> 6) & 3;
4677
        if (mod == 3)
4678
            goto illegal_op;
4679
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4680
        gen_op_ld_T1_A0(ot + s->mem_index);
4681
        gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4682
        /* load the segment first to handle exceptions properly */
4683
        gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4684
        gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4685
        /* then put the data */
4686
        gen_op_mov_reg_T1(ot, reg);
4687
        if (s->is_jmp) {
4688
            gen_jmp_im(s->pc - s->cs_base);
4689
            gen_eob(s);
4690
        }
4691
        break;
4692

    
4693
        /************************/
4694
        /* shifts */
4695
    case 0xc0:
4696
    case 0xc1:
4697
        /* shift Ev,Ib */
4698
        shift = 2;
4699
    grp2:
4700
        {
4701
            if ((b & 1) == 0)
4702
                ot = OT_BYTE;
4703
            else
4704
                ot = dflag + OT_WORD;
4705

    
4706
            modrm = ldub_code(s->pc++);
4707
            mod = (modrm >> 6) & 3;
4708
            op = (modrm >> 3) & 7;
4709

    
4710
            if (mod != 3) {
4711
                if (shift == 2) {
4712
                    s->rip_offset = 1;
4713
                }
4714
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4715
                opreg = OR_TMP0;
4716
            } else {
4717
                opreg = (modrm & 7) | REX_B(s);
4718
            }
4719

    
4720
            /* simpler op */
4721
            if (shift == 0) {
4722
                gen_shift(s, op, ot, opreg, OR_ECX);
4723
            } else {
4724
                if (shift == 2) {
4725
                    shift = ldub_code(s->pc++);
4726
                }
4727
                gen_shifti(s, op, ot, opreg, shift);
4728
            }
4729
        }
4730
        break;
4731
    case 0xd0:
4732
    case 0xd1:
4733
        /* shift Ev,1 */
4734
        shift = 1;
4735
        goto grp2;
4736
    case 0xd2:
4737
    case 0xd3:
4738
        /* shift Ev,cl */
4739
        shift = 0;
4740
        goto grp2;
4741

    
4742
    case 0x1a4: /* shld imm */
4743
        op = 0;
4744
        shift = 1;
4745
        goto do_shiftd;
4746
    case 0x1a5: /* shld cl */
4747
        op = 0;
4748
        shift = 0;
4749
        goto do_shiftd;
4750
    case 0x1ac: /* shrd imm */
4751
        op = 1;
4752
        shift = 1;
4753
        goto do_shiftd;
4754
    case 0x1ad: /* shrd cl */
4755
        op = 1;
4756
        shift = 0;
4757
    do_shiftd:
4758
        ot = dflag + OT_WORD;
4759
        modrm = ldub_code(s->pc++);
4760
        mod = (modrm >> 6) & 3;
4761
        rm = (modrm & 7) | REX_B(s);
4762
        reg = ((modrm >> 3) & 7) | rex_r;
4763
        if (mod != 3) {
4764
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4765
            opreg = OR_TMP0;
4766
        } else {
4767
            opreg = rm;
4768
        }
4769
        gen_op_mov_TN_reg(ot, 1, reg);
4770

    
4771
        if (shift) {
4772
            val = ldub_code(s->pc++);
4773
            tcg_gen_movi_tl(cpu_T3, val);
4774
        } else {
4775
            tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
4776
        }
4777
        gen_shiftd_rm_T1_T3(s, ot, opreg, op);
4778
        break;
4779

    
4780
        /************************/
4781
        /* floats */
4782
    case 0xd8 ... 0xdf:
4783
        if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4784
            /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4785
            /* XXX: what to do if illegal op ? */
4786
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4787
            break;
4788
        }
4789
        modrm = ldub_code(s->pc++);
4790
        mod = (modrm >> 6) & 3;
4791
        rm = modrm & 7;
4792
        op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4793
        if (mod != 3) {
4794
            /* memory op */
4795
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4796
            switch(op) {
4797
            case 0x00 ... 0x07: /* fxxxs */
4798
            case 0x10 ... 0x17: /* fixxxl */
4799
            case 0x20 ... 0x27: /* fxxxl */
4800
            case 0x30 ... 0x37: /* fixxx */
4801
                {
4802
                    int op1;
4803
                    op1 = op & 7;
4804

    
4805
                    switch(op >> 4) {
4806
                    case 0:
4807
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4808
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4809
                        tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
4810
                        break;
4811
                    case 1:
4812
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4813
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4814
                        tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4815
                        break;
4816
                    case 2:
4817
                        tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
4818
                                          (s->mem_index >> 2) - 1);
4819
                        tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
4820
                        break;
4821
                    case 3:
4822
                    default:
4823
                        gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4824
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4825
                        tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4826
                        break;
4827
                    }
4828

    
4829
                    tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
4830
                    if (op1 == 3) {
4831
                        /* fcomp needs pop */
4832
                        tcg_gen_helper_0_0(helper_fpop);
4833
                    }
4834
                }
4835
                break;
4836
            case 0x08: /* flds */
4837
            case 0x0a: /* fsts */
4838
            case 0x0b: /* fstps */
4839
            case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4840
            case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4841
            case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4842
                switch(op & 7) {
4843
                case 0:
4844
                    switch(op >> 4) {
4845
                    case 0:
4846
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4847
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4848
                        tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
4849
                        break;
4850
                    case 1:
4851
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4852
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4853
                        tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4854
                        break;
4855
                    case 2:
4856
                        tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
4857
                                          (s->mem_index >> 2) - 1);
4858
                        tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
4859
                        break;
4860
                    case 3:
4861
                    default:
4862
                        gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4863
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4864
                        tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4865
                        break;
4866
                    }
4867
                    break;
4868
                case 1:
4869
                    /* XXX: the corresponding CPUID bit must be tested ! */
4870
                    switch(op >> 4) {
4871
                    case 1:
4872
                        tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
4873
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4874
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4875
                        break;
4876
                    case 2:
4877
                        tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
4878
                        tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
4879
                                          (s->mem_index >> 2) - 1);
4880
                        break;
4881
                    case 3:
4882
                    default:
4883
                        tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
4884
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4885
                        gen_op_st_T0_A0(OT_WORD + s->mem_index);
4886
                        break;
4887
                    }
4888
                    tcg_gen_helper_0_0(helper_fpop);
4889
                    break;
4890
                default:
4891
                    switch(op >> 4) {
4892
                    case 0:
4893
                        tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
4894
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4895
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4896
                        break;
4897
                    case 1:
4898
                        tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
4899
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4900
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4901
                        break;
4902
                    case 2:
4903
                        tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
4904
                        tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
4905
                                          (s->mem_index >> 2) - 1);
4906
                        break;
4907
                    case 3:
4908
                    default:
4909
                        tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
4910
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4911
                        gen_op_st_T0_A0(OT_WORD + s->mem_index);
4912
                        break;
4913
                    }
4914
                    if ((op & 7) == 3)
4915
                        tcg_gen_helper_0_0(helper_fpop);
4916
                    break;
4917
                }
4918
                break;
4919
            case 0x0c: /* fldenv mem */
4920
                if (s->cc_op != CC_OP_DYNAMIC)
4921
                    gen_op_set_cc_op(s->cc_op);
4922
                gen_jmp_im(pc_start - s->cs_base);
4923
                tcg_gen_helper_0_2(helper_fldenv, 
4924
                                   cpu_A0, tcg_const_i32(s->dflag));
4925
                break;
4926
            case 0x0d: /* fldcw mem */
4927
                gen_op_ld_T0_A0(OT_WORD + s->mem_index);
4928
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4929
                tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
4930
                break;
4931
            case 0x0e: /* fnstenv mem */
4932
                if (s->cc_op != CC_OP_DYNAMIC)
4933
                    gen_op_set_cc_op(s->cc_op);
4934
                gen_jmp_im(pc_start - s->cs_base);
4935
                tcg_gen_helper_0_2(helper_fstenv,
4936
                                   cpu_A0, tcg_const_i32(s->dflag));
4937
                break;
4938
            case 0x0f: /* fnstcw mem */
4939
                tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
4940
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4941
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
4942
                break;
4943
            case 0x1d: /* fldt mem */
4944
                if (s->cc_op != CC_OP_DYNAMIC)
4945
                    gen_op_set_cc_op(s->cc_op);
4946
                gen_jmp_im(pc_start - s->cs_base);
4947
                tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
4948
                break;
4949
            case 0x1f: /* fstpt mem */
4950
                if (s->cc_op != CC_OP_DYNAMIC)
4951
                    gen_op_set_cc_op(s->cc_op);
4952
                gen_jmp_im(pc_start - s->cs_base);
4953
                tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
4954
                tcg_gen_helper_0_0(helper_fpop);
4955
                break;
4956
            case 0x2c: /* frstor mem */
4957
                if (s->cc_op != CC_OP_DYNAMIC)
4958
                    gen_op_set_cc_op(s->cc_op);
4959
                gen_jmp_im(pc_start - s->cs_base);
4960
                tcg_gen_helper_0_2(helper_frstor,
4961
                                   cpu_A0, tcg_const_i32(s->dflag));
4962
                break;
4963
            case 0x2e: /* fnsave mem */
4964
                if (s->cc_op != CC_OP_DYNAMIC)
4965
                    gen_op_set_cc_op(s->cc_op);
4966
                gen_jmp_im(pc_start - s->cs_base);
4967
                tcg_gen_helper_0_2(helper_fsave,
4968
                                   cpu_A0, tcg_const_i32(s->dflag));
4969
                break;
4970
            case 0x2f: /* fnstsw mem */
4971
                tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
4972
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4973
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
4974
                break;
4975
            case 0x3c: /* fbld */
4976
                if (s->cc_op != CC_OP_DYNAMIC)
4977
                    gen_op_set_cc_op(s->cc_op);
4978
                gen_jmp_im(pc_start - s->cs_base);
4979
                tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
4980
                break;
4981
            case 0x3e: /* fbstp */
4982
                if (s->cc_op != CC_OP_DYNAMIC)
4983
                    gen_op_set_cc_op(s->cc_op);
4984
                gen_jmp_im(pc_start - s->cs_base);
4985
                tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
4986
                tcg_gen_helper_0_0(helper_fpop);
4987
                break;
4988
            case 0x3d: /* fildll */
4989
                tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
4990
                                  (s->mem_index >> 2) - 1);
4991
                tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
4992
                break;
4993
            case 0x3f: /* fistpll */
4994
                tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
4995
                tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
4996
                                  (s->mem_index >> 2) - 1);
4997
                tcg_gen_helper_0_0(helper_fpop);
4998
                break;
4999
            default:
5000
                goto illegal_op;
5001
            }
5002
        } else {
5003
            /* register float ops */
5004
            opreg = rm;
5005

    
5006
            switch(op) {
5007
            case 0x08: /* fld sti */
5008
                tcg_gen_helper_0_0(helper_fpush);
5009
                tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
5010
                break;
5011
            case 0x09: /* fxchg sti */
5012
            case 0x29: /* fxchg4 sti, undocumented op */
5013
            case 0x39: /* fxchg7 sti, undocumented op */
5014
                tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
5015
                break;
5016
            case 0x0a: /* grp d9/2 */
5017
                switch(rm) {
5018
                case 0: /* fnop */
5019
                    /* check exceptions (FreeBSD FPU probe) */
5020
                    if (s->cc_op != CC_OP_DYNAMIC)
5021
                        gen_op_set_cc_op(s->cc_op);
5022
                    gen_jmp_im(pc_start - s->cs_base);
5023
                    tcg_gen_helper_0_0(helper_fwait);
5024
                    break;
5025
                default:
5026
                    goto illegal_op;
5027
                }
5028
                break;
5029
            case 0x0c: /* grp d9/4 */
5030
                switch(rm) {
5031
                case 0: /* fchs */
5032
                    tcg_gen_helper_0_0(helper_fchs_ST0);
5033
                    break;
5034
                case 1: /* fabs */
5035
                    tcg_gen_helper_0_0(helper_fabs_ST0);
5036
                    break;
5037
                case 4: /* ftst */
5038
                    tcg_gen_helper_0_0(helper_fldz_FT0);
5039
                    tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5040
                    break;
5041
                case 5: /* fxam */
5042
                    tcg_gen_helper_0_0(helper_fxam_ST0);
5043
                    break;
5044
                default:
5045
                    goto illegal_op;
5046
                }
5047
                break;
5048
            case 0x0d: /* grp d9/5 */
5049
                {
5050
                    switch(rm) {
5051
                    case 0:
5052
                        tcg_gen_helper_0_0(helper_fpush);
5053
                        tcg_gen_helper_0_0(helper_fld1_ST0);
5054
                        break;
5055
                    case 1:
5056
                        tcg_gen_helper_0_0(helper_fpush);
5057
                        tcg_gen_helper_0_0(helper_fldl2t_ST0);
5058
                        break;
5059
                    case 2:
5060
                        tcg_gen_helper_0_0(helper_fpush);
5061
                        tcg_gen_helper_0_0(helper_fldl2e_ST0);
5062
                        break;
5063
                    case 3:
5064
                        tcg_gen_helper_0_0(helper_fpush);
5065
                        tcg_gen_helper_0_0(helper_fldpi_ST0);
5066
                        break;
5067
                    case 4:
5068
                        tcg_gen_helper_0_0(helper_fpush);
5069
                        tcg_gen_helper_0_0(helper_fldlg2_ST0);
5070
                        break;
5071
                    case 5:
5072
                        tcg_gen_helper_0_0(helper_fpush);
5073
                        tcg_gen_helper_0_0(helper_fldln2_ST0);
5074
                        break;
5075
                    case 6:
5076
                        tcg_gen_helper_0_0(helper_fpush);
5077
                        tcg_gen_helper_0_0(helper_fldz_ST0);
5078
                        break;
5079
                    default:
5080
                        goto illegal_op;
5081
                    }
5082
                }
5083
                break;
5084
            case 0x0e: /* grp d9/6 */
5085
                switch(rm) {
5086
                case 0: /* f2xm1 */
5087
                    tcg_gen_helper_0_0(helper_f2xm1);
5088
                    break;
5089
                case 1: /* fyl2x */
5090
                    tcg_gen_helper_0_0(helper_fyl2x);
5091
                    break;
5092
                case 2: /* fptan */
5093
                    tcg_gen_helper_0_0(helper_fptan);
5094
                    break;
5095
                case 3: /* fpatan */
5096
                    tcg_gen_helper_0_0(helper_fpatan);
5097
                    break;
5098
                case 4: /* fxtract */
5099
                    tcg_gen_helper_0_0(helper_fxtract);
5100
                    break;
5101
                case 5: /* fprem1 */
5102
                    tcg_gen_helper_0_0(helper_fprem1);
5103
                    break;
5104
                case 6: /* fdecstp */
5105
                    tcg_gen_helper_0_0(helper_fdecstp);
5106
                    break;
5107
                default:
5108
                case 7: /* fincstp */
5109
                    tcg_gen_helper_0_0(helper_fincstp);
5110
                    break;
5111
                }
5112
                break;
5113
            case 0x0f: /* grp d9/7 */
5114
                switch(rm) {
5115
                case 0: /* fprem */
5116
                    tcg_gen_helper_0_0(helper_fprem);
5117
                    break;
5118
                case 1: /* fyl2xp1 */
5119
                    tcg_gen_helper_0_0(helper_fyl2xp1);
5120
                    break;
5121
                case 2: /* fsqrt */
5122
                    tcg_gen_helper_0_0(helper_fsqrt);
5123
                    break;
5124
                case 3: /* fsincos */
5125
                    tcg_gen_helper_0_0(helper_fsincos);
5126
                    break;
5127
                case 5: /* fscale */
5128
                    tcg_gen_helper_0_0(helper_fscale);
5129
                    break;
5130
                case 4: /* frndint */
5131
                    tcg_gen_helper_0_0(helper_frndint);
5132
                    break;
5133
                case 6: /* fsin */
5134
                    tcg_gen_helper_0_0(helper_fsin);
5135
                    break;
5136
                default:
5137
                case 7: /* fcos */
5138
                    tcg_gen_helper_0_0(helper_fcos);
5139
                    break;
5140
                }
5141
                break;
5142
            case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5143
            case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5144
            case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5145
                {
5146
                    int op1;
5147

    
5148
                    op1 = op & 7;
5149
                    if (op >= 0x20) {
5150
                        tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
5151
                        if (op >= 0x30)
5152
                            tcg_gen_helper_0_0(helper_fpop);
5153
                    } else {
5154
                        tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5155
                        tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5156
                    }
5157
                }
5158
                break;
5159
            case 0x02: /* fcom */
5160
            case 0x22: /* fcom2, undocumented op */
5161
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5162
                tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5163
                break;
5164
            case 0x03: /* fcomp */
5165
            case 0x23: /* fcomp3, undocumented op */
5166
            case 0x32: /* fcomp5, undocumented op */
5167
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5168
                tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5169
                tcg_gen_helper_0_0(helper_fpop);
5170
                break;
5171
            case 0x15: /* da/5 */
5172
                switch(rm) {
5173
                case 1: /* fucompp */
5174
                    tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5175
                    tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5176
                    tcg_gen_helper_0_0(helper_fpop);
5177
                    tcg_gen_helper_0_0(helper_fpop);
5178
                    break;
5179
                default:
5180
                    goto illegal_op;
5181
                }
5182
                break;
5183
            case 0x1c:
5184
                switch(rm) {
5185
                case 0: /* feni (287 only, just do nop here) */
5186
                    break;
5187
                case 1: /* fdisi (287 only, just do nop here) */
5188
                    break;
5189
                case 2: /* fclex */
5190
                    tcg_gen_helper_0_0(helper_fclex);
5191
                    break;
5192
                case 3: /* fninit */
5193
                    tcg_gen_helper_0_0(helper_fninit);
5194
                    break;
5195
                case 4: /* fsetpm (287 only, just do nop here) */
5196
                    break;
5197
                default:
5198
                    goto illegal_op;
5199
                }
5200
                break;
5201
            case 0x1d: /* fucomi */
5202
                if (s->cc_op != CC_OP_DYNAMIC)
5203
                    gen_op_set_cc_op(s->cc_op);
5204
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5205
                tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5206
                gen_op_fcomi_dummy();
5207
                s->cc_op = CC_OP_EFLAGS;
5208
                break;
5209
            case 0x1e: /* fcomi */
5210
                if (s->cc_op != CC_OP_DYNAMIC)
5211
                    gen_op_set_cc_op(s->cc_op);
5212
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5213
                tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5214
                gen_op_fcomi_dummy();
5215
                s->cc_op = CC_OP_EFLAGS;
5216
                break;
5217
            case 0x28: /* ffree sti */
5218
                tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5219
                break;
5220
            case 0x2a: /* fst sti */
5221
                tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5222
                break;
5223
            case 0x2b: /* fstp sti */
5224
            case 0x0b: /* fstp1 sti, undocumented op */
5225
            case 0x3a: /* fstp8 sti, undocumented op */
5226
            case 0x3b: /* fstp9 sti, undocumented op */
5227
                tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5228
                tcg_gen_helper_0_0(helper_fpop);
5229
                break;
5230
            case 0x2c: /* fucom st(i) */
5231
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5232
                tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5233
                break;
5234
            case 0x2d: /* fucomp st(i) */
5235
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5236
                tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5237
                tcg_gen_helper_0_0(helper_fpop);
5238
                break;
5239
            case 0x33: /* de/3 */
5240
                switch(rm) {
5241
                case 1: /* fcompp */
5242
                    tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5243
                    tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5244
                    tcg_gen_helper_0_0(helper_fpop);
5245
                    tcg_gen_helper_0_0(helper_fpop);
5246
                    break;
5247
                default:
5248
                    goto illegal_op;
5249
                }
5250
                break;
5251
            case 0x38: /* ffreep sti, undocumented op */
5252
                tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5253
                tcg_gen_helper_0_0(helper_fpop);
5254
                break;
5255
            case 0x3c: /* df/4 */
5256
                switch(rm) {
5257
                case 0:
5258
                    tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5259
                    tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5260
                    gen_op_mov_reg_T0(OT_WORD, R_EAX);
5261
                    break;
5262
                default:
5263
                    goto illegal_op;
5264
                }
5265
                break;
5266
            case 0x3d: /* fucomip */
5267
                if (s->cc_op != CC_OP_DYNAMIC)
5268
                    gen_op_set_cc_op(s->cc_op);
5269
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5270
                tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5271
                tcg_gen_helper_0_0(helper_fpop);
5272
                gen_op_fcomi_dummy();
5273
                s->cc_op = CC_OP_EFLAGS;
5274
                break;
5275
            case 0x3e: /* fcomip */
5276
                if (s->cc_op != CC_OP_DYNAMIC)
5277
                    gen_op_set_cc_op(s->cc_op);
5278
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5279
                tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5280
                tcg_gen_helper_0_0(helper_fpop);
5281
                gen_op_fcomi_dummy();
5282
                s->cc_op = CC_OP_EFLAGS;
5283
                break;
5284
            case 0x10 ... 0x13: /* fcmovxx */
5285
            case 0x18 ... 0x1b:
5286
                {
5287
                    int op1, l1;
5288
                    const static uint8_t fcmov_cc[8] = {
5289
                        (JCC_B << 1),
5290
                        (JCC_Z << 1),
5291
                        (JCC_BE << 1),
5292
                        (JCC_P << 1),
5293
                    };
5294
                    op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
5295
                    gen_setcc(s, op1);
5296
                    l1 = gen_new_label();
5297
                    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), l1);
5298
                    tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
5299
                    gen_set_label(l1);
5300
                }
5301
                break;
5302
            default:
5303
                goto illegal_op;
5304
            }
5305
        }
5306
        break;
5307
        /************************/
5308
        /* string ops */
5309

    
5310
    case 0xa4: /* movsS */
5311
    case 0xa5:
5312
        if ((b & 1) == 0)
5313
            ot = OT_BYTE;
5314
        else
5315
            ot = dflag + OT_WORD;
5316

    
5317
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5318
            gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5319
        } else {
5320
            gen_movs(s, ot);
5321
        }
5322
        break;
5323

    
5324
    case 0xaa: /* stosS */
5325
    case 0xab:
5326
        if ((b & 1) == 0)
5327
            ot = OT_BYTE;
5328
        else
5329
            ot = dflag + OT_WORD;
5330

    
5331
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5332
            gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5333
        } else {
5334
            gen_stos(s, ot);
5335
        }
5336
        break;
5337
    case 0xac: /* lodsS */
5338
    case 0xad:
5339
        if ((b & 1) == 0)
5340
            ot = OT_BYTE;
5341
        else
5342
            ot = dflag + OT_WORD;
5343
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5344
            gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5345
        } else {
5346
            gen_lods(s, ot);
5347
        }
5348
        break;
5349
    case 0xae: /* scasS */
5350
    case 0xaf:
5351
        if ((b & 1) == 0)
5352
            ot = OT_BYTE;
5353
        else
5354
            ot = dflag + OT_WORD;
5355
        if (prefixes & PREFIX_REPNZ) {
5356
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5357
        } else if (prefixes & PREFIX_REPZ) {
5358
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5359
        } else {
5360
            gen_scas(s, ot);
5361
            s->cc_op = CC_OP_SUBB + ot;
5362
        }
5363
        break;
5364

    
5365
    case 0xa6: /* cmpsS */
5366
    case 0xa7:
5367
        if ((b & 1) == 0)
5368
            ot = OT_BYTE;
5369
        else
5370
            ot = dflag + OT_WORD;
5371
        if (prefixes & PREFIX_REPNZ) {
5372
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5373
        } else if (prefixes & PREFIX_REPZ) {
5374
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5375
        } else {
5376
            gen_cmps(s, ot);
5377
            s->cc_op = CC_OP_SUBB + ot;
5378
        }
5379
        break;
5380
    case 0x6c: /* insS */
5381
    case 0x6d:
5382
        if ((b & 1) == 0)
5383
            ot = OT_BYTE;
5384
        else
5385
            ot = dflag ? OT_LONG : OT_WORD;
5386
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5387
        gen_op_andl_T0_ffff();
5388
        gen_check_io(s, ot, pc_start - s->cs_base, 
5389
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
5390
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5391
            gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5392
        } else {
5393
            gen_ins(s, ot);
5394
        }
5395
        break;
5396
    case 0x6e: /* outsS */
5397
    case 0x6f:
5398
        if ((b & 1) == 0)
5399
            ot = OT_BYTE;
5400
        else
5401
            ot = dflag ? OT_LONG : OT_WORD;
5402
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5403
        gen_op_andl_T0_ffff();
5404
        gen_check_io(s, ot, pc_start - s->cs_base,
5405
                     svm_is_rep(prefixes) | 4);
5406
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5407
            gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5408
        } else {
5409
            gen_outs(s, ot);
5410
        }
5411
        break;
5412

    
5413
        /************************/
5414
        /* port I/O */
5415

    
5416
    case 0xe4:
5417
    case 0xe5:
5418
        if ((b & 1) == 0)
5419
            ot = OT_BYTE;
5420
        else
5421
            ot = dflag ? OT_LONG : OT_WORD;
5422
        val = ldub_code(s->pc++);
5423
        gen_op_movl_T0_im(val);
5424
        gen_check_io(s, ot, pc_start - s->cs_base,
5425
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5426
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5427
        tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5428
        gen_op_mov_reg_T1(ot, R_EAX);
5429
        break;
5430
    case 0xe6:
5431
    case 0xe7:
5432
        if ((b & 1) == 0)
5433
            ot = OT_BYTE;
5434
        else
5435
            ot = dflag ? OT_LONG : OT_WORD;
5436
        val = ldub_code(s->pc++);
5437
        gen_op_movl_T0_im(val);
5438
        gen_check_io(s, ot, pc_start - s->cs_base,
5439
                     svm_is_rep(prefixes));
5440
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5441

    
5442
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5443
        tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5444
        tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5445
        tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5446
        break;
5447
    case 0xec:
5448
    case 0xed:
5449
        if ((b & 1) == 0)
5450
            ot = OT_BYTE;
5451
        else
5452
            ot = dflag ? OT_LONG : OT_WORD;
5453
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5454
        gen_op_andl_T0_ffff();
5455
        gen_check_io(s, ot, pc_start - s->cs_base,
5456
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5457
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5458
        tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5459
        gen_op_mov_reg_T1(ot, R_EAX);
5460
        break;
5461
    case 0xee:
5462
    case 0xef:
5463
        if ((b & 1) == 0)
5464
            ot = OT_BYTE;
5465
        else
5466
            ot = dflag ? OT_LONG : OT_WORD;
5467
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5468
        gen_op_andl_T0_ffff();
5469
        gen_check_io(s, ot, pc_start - s->cs_base,
5470
                     svm_is_rep(prefixes));
5471
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5472

    
5473
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5474
        tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5475
        tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5476
        tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5477
        break;
5478

    
5479
        /************************/
5480
        /* control */
5481
    case 0xc2: /* ret im */
5482
        val = ldsw_code(s->pc);
5483
        s->pc += 2;
5484
        gen_pop_T0(s);
5485
        if (CODE64(s) && s->dflag)
5486
            s->dflag = 2;
5487
        gen_stack_update(s, val + (2 << s->dflag));
5488
        if (s->dflag == 0)
5489
            gen_op_andl_T0_ffff();
5490
        gen_op_jmp_T0();
5491
        gen_eob(s);
5492
        break;
5493
    case 0xc3: /* ret */
5494
        gen_pop_T0(s);
5495
        gen_pop_update(s);
5496
        if (s->dflag == 0)
5497
            gen_op_andl_T0_ffff();
5498
        gen_op_jmp_T0();
5499
        gen_eob(s);
5500
        break;
5501
    case 0xca: /* lret im */
5502
        val = ldsw_code(s->pc);
5503
        s->pc += 2;
5504
    do_lret:
5505
        if (s->pe && !s->vm86) {
5506
            if (s->cc_op != CC_OP_DYNAMIC)
5507
                gen_op_set_cc_op(s->cc_op);
5508
            gen_jmp_im(pc_start - s->cs_base);
5509
            tcg_gen_helper_0_2(helper_lret_protected,
5510
                               tcg_const_i32(s->dflag), 
5511
                               tcg_const_i32(val));
5512
        } else {
5513
            gen_stack_A0(s);
5514
            /* pop offset */
5515
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5516
            if (s->dflag == 0)
5517
                gen_op_andl_T0_ffff();
5518
            /* NOTE: keeping EIP updated is not a problem in case of
5519
               exception */
5520
            gen_op_jmp_T0();
5521
            /* pop selector */
5522
            gen_op_addl_A0_im(2 << s->dflag);
5523
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5524
            gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5525
            /* add stack offset */
5526
            gen_stack_update(s, val + (4 << s->dflag));
5527
        }
5528
        gen_eob(s);
5529
        break;
5530
    case 0xcb: /* lret */
5531
        val = 0;
5532
        goto do_lret;
5533
    case 0xcf: /* iret */
5534
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5535
            break;
5536
        if (!s->pe) {
5537
            /* real mode */
5538
            tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5539
            s->cc_op = CC_OP_EFLAGS;
5540
        } else if (s->vm86) {
5541
            if (s->iopl != 3) {
5542
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5543
            } else {
5544
                tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5545
                s->cc_op = CC_OP_EFLAGS;
5546
            }
5547
        } else {
5548
            if (s->cc_op != CC_OP_DYNAMIC)
5549
                gen_op_set_cc_op(s->cc_op);
5550
            gen_jmp_im(pc_start - s->cs_base);
5551
            tcg_gen_helper_0_2(helper_iret_protected,
5552
                               tcg_const_i32(s->dflag), 
5553
                               tcg_const_i32(s->pc - s->cs_base));
5554
            s->cc_op = CC_OP_EFLAGS;
5555
        }
5556
        gen_eob(s);
5557
        break;
5558
    case 0xe8: /* call im */
5559
        {
5560
            if (dflag)
5561
                tval = (int32_t)insn_get(s, OT_LONG);
5562
            else
5563
                tval = (int16_t)insn_get(s, OT_WORD);
5564
            next_eip = s->pc - s->cs_base;
5565
            tval += next_eip;
5566
            if (s->dflag == 0)
5567
                tval &= 0xffff;
5568
            gen_movtl_T0_im(next_eip);
5569
            gen_push_T0(s);
5570
            gen_jmp(s, tval);
5571
        }
5572
        break;
5573
    case 0x9a: /* lcall im */
5574
        {
5575
            unsigned int selector, offset;
5576

    
5577
            if (CODE64(s))
5578
                goto illegal_op;
5579
            ot = dflag ? OT_LONG : OT_WORD;
5580
            offset = insn_get(s, ot);
5581
            selector = insn_get(s, OT_WORD);
5582

    
5583
            gen_op_movl_T0_im(selector);
5584
            gen_op_movl_T1_imu(offset);
5585
        }
5586
        goto do_lcall;
5587
    case 0xe9: /* jmp im */
5588
        if (dflag)
5589
            tval = (int32_t)insn_get(s, OT_LONG);
5590
        else
5591
            tval = (int16_t)insn_get(s, OT_WORD);
5592
        tval += s->pc - s->cs_base;
5593
        if (s->dflag == 0)
5594
            tval &= 0xffff;
5595
        gen_jmp(s, tval);
5596
        break;
5597
    case 0xea: /* ljmp im */
5598
        {
5599
            unsigned int selector, offset;
5600

    
5601
            if (CODE64(s))
5602
                goto illegal_op;
5603
            ot = dflag ? OT_LONG : OT_WORD;
5604
            offset = insn_get(s, ot);
5605
            selector = insn_get(s, OT_WORD);
5606

    
5607
            gen_op_movl_T0_im(selector);
5608
            gen_op_movl_T1_imu(offset);
5609
        }
5610
        goto do_ljmp;
5611
    case 0xeb: /* jmp Jb */
5612
        tval = (int8_t)insn_get(s, OT_BYTE);
5613
        tval += s->pc - s->cs_base;
5614
        if (s->dflag == 0)
5615
            tval &= 0xffff;
5616
        gen_jmp(s, tval);
5617
        break;
5618
    case 0x70 ... 0x7f: /* jcc Jb */
5619
        tval = (int8_t)insn_get(s, OT_BYTE);
5620
        goto do_jcc;
5621
    case 0x180 ... 0x18f: /* jcc Jv */
5622
        if (dflag) {
5623
            tval = (int32_t)insn_get(s, OT_LONG);
5624
        } else {
5625
            tval = (int16_t)insn_get(s, OT_WORD);
5626
        }
5627
    do_jcc:
5628
        next_eip = s->pc - s->cs_base;
5629
        tval += next_eip;
5630
        if (s->dflag == 0)
5631
            tval &= 0xffff;
5632
        gen_jcc(s, b, tval, next_eip);
5633
        break;
5634

    
5635
    case 0x190 ... 0x19f: /* setcc Gv */
5636
        modrm = ldub_code(s->pc++);
5637
        gen_setcc(s, b);
5638
        gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5639
        break;
5640
    case 0x140 ... 0x14f: /* cmov Gv, Ev */
5641
        ot = dflag + OT_WORD;
5642
        modrm = ldub_code(s->pc++);
5643
        reg = ((modrm >> 3) & 7) | rex_r;
5644
        mod = (modrm >> 6) & 3;
5645
        gen_setcc(s, b);
5646
        if (mod != 3) {
5647
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5648
            gen_op_ld_T1_A0(ot + s->mem_index);
5649
        } else {
5650
            rm = (modrm & 7) | REX_B(s);
5651
            gen_op_mov_TN_reg(ot, 1, rm);
5652
        }
5653
        gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5654
        break;
5655

    
5656
        /************************/
5657
        /* flags */
5658
    case 0x9c: /* pushf */
5659
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5660
            break;
5661
        if (s->vm86 && s->iopl != 3) {
5662
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5663
        } else {
5664
            if (s->cc_op != CC_OP_DYNAMIC)
5665
                gen_op_set_cc_op(s->cc_op);
5666
            gen_op_movl_T0_eflags();
5667
            gen_push_T0(s);
5668
        }
5669
        break;
5670
    case 0x9d: /* popf */
5671
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5672
            break;
5673
        if (s->vm86 && s->iopl != 3) {
5674
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5675
        } else {
5676
            gen_pop_T0(s);
5677
            if (s->cpl == 0) {
5678
                if (s->dflag) {
5679
                    gen_op_movl_eflags_T0_cpl0();
5680
                } else {
5681
                    gen_op_movw_eflags_T0_cpl0();
5682
                }
5683
            } else {
5684
                if (s->cpl <= s->iopl) {
5685
                    if (s->dflag) {
5686
                        gen_op_movl_eflags_T0_io();
5687
                    } else {
5688
                        gen_op_movw_eflags_T0_io();
5689
                    }
5690
                } else {
5691
                    if (s->dflag) {
5692
                        gen_op_movl_eflags_T0();
5693
                    } else {
5694
                        gen_op_movw_eflags_T0();
5695
                    }
5696
                }
5697
            }
5698
            gen_pop_update(s);
5699
            s->cc_op = CC_OP_EFLAGS;
5700
            /* abort translation because TF flag may change */
5701
            gen_jmp_im(s->pc - s->cs_base);
5702
            gen_eob(s);
5703
        }
5704
        break;
5705
    case 0x9e: /* sahf */
5706
        if (CODE64(s))
5707
            goto illegal_op;
5708
        gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5709
        if (s->cc_op != CC_OP_DYNAMIC)
5710
            gen_op_set_cc_op(s->cc_op);
5711
        gen_op_movb_eflags_T0();
5712
        s->cc_op = CC_OP_EFLAGS;
5713
        break;
5714
    case 0x9f: /* lahf */
5715
        if (CODE64(s))
5716
            goto illegal_op;
5717
        if (s->cc_op != CC_OP_DYNAMIC)
5718
            gen_op_set_cc_op(s->cc_op);
5719
        gen_op_movl_T0_eflags();
5720
        gen_op_mov_reg_T0(OT_BYTE, R_AH);
5721
        break;
5722
    case 0xf5: /* cmc */
5723
        if (s->cc_op != CC_OP_DYNAMIC)
5724
            gen_op_set_cc_op(s->cc_op);
5725
        gen_op_cmc();
5726
        s->cc_op = CC_OP_EFLAGS;
5727
        break;
5728
    case 0xf8: /* clc */
5729
        if (s->cc_op != CC_OP_DYNAMIC)
5730
            gen_op_set_cc_op(s->cc_op);
5731
        gen_op_clc();
5732
        s->cc_op = CC_OP_EFLAGS;
5733
        break;
5734
    case 0xf9: /* stc */
5735
        if (s->cc_op != CC_OP_DYNAMIC)
5736
            gen_op_set_cc_op(s->cc_op);
5737
        gen_op_stc();
5738
        s->cc_op = CC_OP_EFLAGS;
5739
        break;
5740
    case 0xfc: /* cld */
5741
        tcg_gen_movi_i32(cpu_tmp2_i32, 1);
5742
        tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5743
        break;
5744
    case 0xfd: /* std */
5745
        tcg_gen_movi_i32(cpu_tmp2_i32, -1);
5746
        tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5747
        break;
5748

    
5749
        /************************/
5750
        /* bit operations */
5751
    case 0x1ba: /* bt/bts/btr/btc Gv, im */
5752
        ot = dflag + OT_WORD;
5753
        modrm = ldub_code(s->pc++);
5754
        op = (modrm >> 3) & 7;
5755
        mod = (modrm >> 6) & 3;
5756
        rm = (modrm & 7) | REX_B(s);
5757
        if (mod != 3) {
5758
            s->rip_offset = 1;
5759
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5760
            gen_op_ld_T0_A0(ot + s->mem_index);
5761
        } else {
5762
            gen_op_mov_TN_reg(ot, 0, rm);
5763
        }
5764
        /* load shift */
5765
        val = ldub_code(s->pc++);
5766
        gen_op_movl_T1_im(val);
5767
        if (op < 4)
5768
            goto illegal_op;
5769
        op -= 4;
5770
        goto bt_op;
5771
    case 0x1a3: /* bt Gv, Ev */
5772
        op = 0;
5773
        goto do_btx;
5774
    case 0x1ab: /* bts */
5775
        op = 1;
5776
        goto do_btx;
5777
    case 0x1b3: /* btr */
5778
        op = 2;
5779
        goto do_btx;
5780
    case 0x1bb: /* btc */
5781
        op = 3;
5782
    do_btx:
5783
        ot = dflag + OT_WORD;
5784
        modrm = ldub_code(s->pc++);
5785
        reg = ((modrm >> 3) & 7) | rex_r;
5786
        mod = (modrm >> 6) & 3;
5787
        rm = (modrm & 7) | REX_B(s);
5788
        gen_op_mov_TN_reg(OT_LONG, 1, reg);
5789
        if (mod != 3) {
5790
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5791
            /* specific case: we need to add a displacement */
5792
            gen_exts(ot, cpu_T[1]);
5793
            tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
5794
            tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
5795
            tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
5796
            gen_op_ld_T0_A0(ot + s->mem_index);
5797
        } else {
5798
            gen_op_mov_TN_reg(ot, 0, rm);
5799
        }
5800
    bt_op:
5801
        tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
5802
        switch(op) {
5803
        case 0:
5804
            tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
5805
            tcg_gen_movi_tl(cpu_cc_dst, 0);
5806
            break;
5807
        case 1:
5808
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5809
            tcg_gen_movi_tl(cpu_tmp0, 1);
5810
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5811
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5812
            break;
5813
        case 2:
5814
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5815
            tcg_gen_movi_tl(cpu_tmp0, 1);
5816
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5817
            tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
5818
            tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5819
            break;
5820
        default:
5821
        case 3:
5822
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5823
            tcg_gen_movi_tl(cpu_tmp0, 1);
5824
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5825
            tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5826
            break;
5827
        }
5828
        s->cc_op = CC_OP_SARB + ot;
5829
        if (op != 0) {
5830
            if (mod != 3)
5831
                gen_op_st_T0_A0(ot + s->mem_index);
5832
            else
5833
                gen_op_mov_reg_T0(ot, rm);
5834
            tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
5835
            tcg_gen_movi_tl(cpu_cc_dst, 0);
5836
        }
5837
        break;
5838
    case 0x1bc: /* bsf */
5839
    case 0x1bd: /* bsr */
5840
        ot = dflag + OT_WORD;
5841
        modrm = ldub_code(s->pc++);
5842
        reg = ((modrm >> 3) & 7) | rex_r;
5843
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5844
        /* NOTE: in order to handle the 0 case, we must load the
5845
           result. It could be optimized with a generated jump */
5846
        gen_op_mov_TN_reg(ot, 1, reg);
5847
        gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5848
        gen_op_mov_reg_T1(ot, reg);
5849
        s->cc_op = CC_OP_LOGICB + ot;
5850
        break;
5851
        /************************/
5852
        /* bcd */
5853
    case 0x27: /* daa */
5854
        if (CODE64(s))
5855
            goto illegal_op;
5856
        if (s->cc_op != CC_OP_DYNAMIC)
5857
            gen_op_set_cc_op(s->cc_op);
5858
        gen_op_daa();
5859
        s->cc_op = CC_OP_EFLAGS;
5860
        break;
5861
    case 0x2f: /* das */
5862
        if (CODE64(s))
5863
            goto illegal_op;
5864
        if (s->cc_op != CC_OP_DYNAMIC)
5865
            gen_op_set_cc_op(s->cc_op);
5866
        gen_op_das();
5867
        s->cc_op = CC_OP_EFLAGS;
5868
        break;
5869
    case 0x37: /* aaa */
5870
        if (CODE64(s))
5871
            goto illegal_op;
5872
        if (s->cc_op != CC_OP_DYNAMIC)
5873
            gen_op_set_cc_op(s->cc_op);
5874
        gen_op_aaa();
5875
        s->cc_op = CC_OP_EFLAGS;
5876
        break;
5877
    case 0x3f: /* aas */
5878
        if (CODE64(s))
5879
            goto illegal_op;
5880
        if (s->cc_op != CC_OP_DYNAMIC)
5881
            gen_op_set_cc_op(s->cc_op);
5882
        gen_op_aas();
5883
        s->cc_op = CC_OP_EFLAGS;
5884
        break;
5885
    case 0xd4: /* aam */
5886
        if (CODE64(s))
5887
            goto illegal_op;
5888
        val = ldub_code(s->pc++);
5889
        if (val == 0) {
5890
            gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5891
        } else {
5892
            gen_op_aam(val);
5893
            s->cc_op = CC_OP_LOGICB;
5894
        }
5895
        break;
5896
    case 0xd5: /* aad */
5897
        if (CODE64(s))
5898
            goto illegal_op;
5899
        val = ldub_code(s->pc++);
5900
        gen_op_aad(val);
5901
        s->cc_op = CC_OP_LOGICB;
5902
        break;
5903
        /************************/
5904
        /* misc */
5905
    case 0x90: /* nop */
5906
        /* XXX: xchg + rex handling */
5907
        /* XXX: correct lock test for all insn */
5908
        if (prefixes & PREFIX_LOCK)
5909
            goto illegal_op;
5910
        if (prefixes & PREFIX_REPZ) {
5911
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
5912
        }
5913
        break;
5914
    case 0x9b: /* fwait */
5915
        if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5916
            (HF_MP_MASK | HF_TS_MASK)) {
5917
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5918
        } else {
5919
            if (s->cc_op != CC_OP_DYNAMIC)
5920
                gen_op_set_cc_op(s->cc_op);
5921
            gen_jmp_im(pc_start - s->cs_base);
5922
            tcg_gen_helper_0_0(helper_fwait);
5923
        }
5924
        break;
5925
    case 0xcc: /* int3 */
5926
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5927
            break;
5928
        gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5929
        break;
5930
    case 0xcd: /* int N */
5931
        val = ldub_code(s->pc++);
5932
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5933
            break;
5934
        if (s->vm86 && s->iopl != 3) {
5935
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5936
        } else {
5937
            gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5938
        }
5939
        break;
5940
    case 0xce: /* into */
5941
        if (CODE64(s))
5942
            goto illegal_op;
5943
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5944
            break;
5945
        if (s->cc_op != CC_OP_DYNAMIC)
5946
            gen_op_set_cc_op(s->cc_op);
5947
        gen_jmp_im(pc_start - s->cs_base);
5948
        gen_op_into(s->pc - pc_start);
5949
        break;
5950
    case 0xf1: /* icebp (undocumented, exits to external debugger) */
5951
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
5952
            break;
5953
#if 1
5954
        gen_debug(s, pc_start - s->cs_base);
5955
#else
5956
        /* start debug */
5957
        tb_flush(cpu_single_env);
5958
        cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5959
#endif
5960
        break;
5961
    case 0xfa: /* cli */
5962
        if (!s->vm86) {
5963
            if (s->cpl <= s->iopl) {
5964
                tcg_gen_helper_0_0(helper_cli);
5965
            } else {
5966
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5967
            }
5968
        } else {
5969
            if (s->iopl == 3) {
5970
                tcg_gen_helper_0_0(helper_cli);
5971
            } else {
5972
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5973
            }
5974
        }
5975
        break;
5976
    case 0xfb: /* sti */
5977
        if (!s->vm86) {
5978
            if (s->cpl <= s->iopl) {
5979
            gen_sti:
5980
                tcg_gen_helper_0_0(helper_sti);
5981
                /* interruptions are enabled only the first insn after sti */
5982
                /* If several instructions disable interrupts, only the
5983
                   _first_ does it */
5984
                if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5985
                    tcg_gen_helper_0_0(helper_set_inhibit_irq);
5986
                /* give a chance to handle pending irqs */
5987
                gen_jmp_im(s->pc - s->cs_base);
5988
                gen_eob(s);
5989
            } else {
5990
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5991
            }
5992
        } else {
5993
            if (s->iopl == 3) {
5994
                goto gen_sti;
5995
            } else {
5996
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5997
            }
5998
        }
5999
        break;
6000
    case 0x62: /* bound */
6001
        if (CODE64(s))
6002
            goto illegal_op;
6003
        ot = dflag ? OT_LONG : OT_WORD;
6004
        modrm = ldub_code(s->pc++);
6005
        reg = (modrm >> 3) & 7;
6006
        mod = (modrm >> 6) & 3;
6007
        if (mod == 3)
6008
            goto illegal_op;
6009
        gen_op_mov_TN_reg(ot, 0, reg);
6010
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6011
        gen_jmp_im(pc_start - s->cs_base);
6012
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6013
        if (ot == OT_WORD)
6014
            tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
6015
        else
6016
            tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
6017
        break;
6018
    case 0x1c8 ... 0x1cf: /* bswap reg */
6019
        reg = (b & 7) | REX_B(s);
6020
#ifdef TARGET_X86_64
6021
        if (dflag == 2) {
6022
            gen_op_mov_TN_reg(OT_QUAD, 0, reg);
6023
            tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
6024
            gen_op_mov_reg_T0(OT_QUAD, reg);
6025
        } else
6026
        {
6027
            TCGv tmp0;
6028
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
6029
            
6030
            tmp0 = tcg_temp_new(TCG_TYPE_I32);
6031
            tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
6032
            tcg_gen_bswap_i32(tmp0, tmp0);
6033
            tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
6034
            gen_op_mov_reg_T0(OT_LONG, reg);
6035
        }
6036
#else
6037
        {
6038
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
6039
            tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
6040
            gen_op_mov_reg_T0(OT_LONG, reg);
6041
        }
6042
#endif
6043
        break;
6044
    case 0xd6: /* salc */
6045
        if (CODE64(s))
6046
            goto illegal_op;
6047
        if (s->cc_op != CC_OP_DYNAMIC)
6048
            gen_op_set_cc_op(s->cc_op);
6049
        gen_op_salc();
6050
        break;
6051
    case 0xe0: /* loopnz */
6052
    case 0xe1: /* loopz */
6053
        if (s->cc_op != CC_OP_DYNAMIC)
6054
            gen_op_set_cc_op(s->cc_op);
6055
        /* FALL THRU */
6056
    case 0xe2: /* loop */
6057
    case 0xe3: /* jecxz */
6058
        {
6059
            int l1, l2;
6060

    
6061
            tval = (int8_t)insn_get(s, OT_BYTE);
6062
            next_eip = s->pc - s->cs_base;
6063
            tval += next_eip;
6064
            if (s->dflag == 0)
6065
                tval &= 0xffff;
6066

    
6067
            l1 = gen_new_label();
6068
            l2 = gen_new_label();
6069
            b &= 3;
6070
            if (b == 3) {
6071
                gen_op_jz_ecx[s->aflag](l1);
6072
            } else {
6073
                gen_op_dec_ECX[s->aflag]();
6074
                if (b <= 1)
6075
                    gen_op_mov_T0_cc();
6076
                gen_op_loop[s->aflag][b](l1);
6077
            }
6078

    
6079
            gen_jmp_im(next_eip);
6080
            gen_op_jmp_label(l2);
6081
            gen_set_label(l1);
6082
            gen_jmp_im(tval);
6083
            gen_set_label(l2);
6084
            gen_eob(s);
6085
        }
6086
        break;
6087
    case 0x130: /* wrmsr */
6088
    case 0x132: /* rdmsr */
6089
        if (s->cpl != 0) {
6090
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6091
        } else {
6092
            int retval = 0;
6093
            if (b & 2) {
6094
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
6095
                tcg_gen_helper_0_0(helper_rdmsr);
6096
            } else {
6097
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
6098
                tcg_gen_helper_0_0(helper_wrmsr);
6099
            }
6100
            if(retval)
6101
                gen_eob(s);
6102
        }
6103
        break;
6104
    case 0x131: /* rdtsc */
6105
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
6106
            break;
6107
        gen_jmp_im(pc_start - s->cs_base);
6108
        tcg_gen_helper_0_0(helper_rdtsc);
6109
        break;
6110
    case 0x133: /* rdpmc */
6111
        gen_jmp_im(pc_start - s->cs_base);
6112
        tcg_gen_helper_0_0(helper_rdpmc);
6113
        break;
6114
    case 0x134: /* sysenter */
6115
        if (CODE64(s))
6116
            goto illegal_op;
6117
        if (!s->pe) {
6118
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6119
        } else {
6120
            if (s->cc_op != CC_OP_DYNAMIC) {
6121
                gen_op_set_cc_op(s->cc_op);
6122
                s->cc_op = CC_OP_DYNAMIC;
6123
            }
6124
            gen_jmp_im(pc_start - s->cs_base);
6125
            tcg_gen_helper_0_0(helper_sysenter);
6126
            gen_eob(s);
6127
        }
6128
        break;
6129
    case 0x135: /* sysexit */
6130
        if (CODE64(s))
6131
            goto illegal_op;
6132
        if (!s->pe) {
6133
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6134
        } else {
6135
            if (s->cc_op != CC_OP_DYNAMIC) {
6136
                gen_op_set_cc_op(s->cc_op);
6137
                s->cc_op = CC_OP_DYNAMIC;
6138
            }
6139
            gen_jmp_im(pc_start - s->cs_base);
6140
            tcg_gen_helper_0_0(helper_sysexit);
6141
            gen_eob(s);
6142
        }
6143
        break;
6144
#ifdef TARGET_X86_64
6145
    case 0x105: /* syscall */
6146
        /* XXX: is it usable in real mode ? */
6147
        if (s->cc_op != CC_OP_DYNAMIC) {
6148
            gen_op_set_cc_op(s->cc_op);
6149
            s->cc_op = CC_OP_DYNAMIC;
6150
        }
6151
        gen_jmp_im(pc_start - s->cs_base);
6152
        tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
6153
        gen_eob(s);
6154
        break;
6155
    case 0x107: /* sysret */
6156
        if (!s->pe) {
6157
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6158
        } else {
6159
            if (s->cc_op != CC_OP_DYNAMIC) {
6160
                gen_op_set_cc_op(s->cc_op);
6161
                s->cc_op = CC_OP_DYNAMIC;
6162
            }
6163
            gen_jmp_im(pc_start - s->cs_base);
6164
            tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
6165
            /* condition codes are modified only in long mode */
6166
            if (s->lma)
6167
                s->cc_op = CC_OP_EFLAGS;
6168
            gen_eob(s);
6169
        }
6170
        break;
6171
#endif
6172
    case 0x1a2: /* cpuid */
6173
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
6174
            break;
6175
        tcg_gen_helper_0_0(helper_cpuid);
6176
        break;
6177
    case 0xf4: /* hlt */
6178
        if (s->cpl != 0) {
6179
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6180
        } else {
6181
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
6182
                break;
6183
            if (s->cc_op != CC_OP_DYNAMIC)
6184
                gen_op_set_cc_op(s->cc_op);
6185
            gen_jmp_im(s->pc - s->cs_base);
6186
            tcg_gen_helper_0_0(helper_hlt);
6187
            s->is_jmp = 3;
6188
        }
6189
        break;
6190
    case 0x100:
6191
        modrm = ldub_code(s->pc++);
6192
        mod = (modrm >> 6) & 3;
6193
        op = (modrm >> 3) & 7;
6194
        switch(op) {
6195
        case 0: /* sldt */
6196
            if (!s->pe || s->vm86)
6197
                goto illegal_op;
6198
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
6199
                break;
6200
            gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
6201
            ot = OT_WORD;
6202
            if (mod == 3)
6203
                ot += s->dflag;
6204
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6205
            break;
6206
        case 2: /* lldt */
6207
            if (!s->pe || s->vm86)
6208
                goto illegal_op;
6209
            if (s->cpl != 0) {
6210
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6211
            } else {
6212
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
6213
                    break;
6214
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6215
                gen_jmp_im(pc_start - s->cs_base);
6216
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6217
                tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
6218
            }
6219
            break;
6220
        case 1: /* str */
6221
            if (!s->pe || s->vm86)
6222
                goto illegal_op;
6223
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
6224
                break;
6225
            gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
6226
            ot = OT_WORD;
6227
            if (mod == 3)
6228
                ot += s->dflag;
6229
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6230
            break;
6231
        case 3: /* ltr */
6232
            if (!s->pe || s->vm86)
6233
                goto illegal_op;
6234
            if (s->cpl != 0) {
6235
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6236
            } else {
6237
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
6238
                    break;
6239
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6240
                gen_jmp_im(pc_start - s->cs_base);
6241
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6242
                tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
6243
            }
6244
            break;
6245
        case 4: /* verr */
6246
        case 5: /* verw */
6247
            if (!s->pe || s->vm86)
6248
                goto illegal_op;
6249
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6250
            if (s->cc_op != CC_OP_DYNAMIC)
6251
                gen_op_set_cc_op(s->cc_op);
6252
            if (op == 4)
6253
                gen_op_verr();
6254
            else
6255
                gen_op_verw();
6256
            s->cc_op = CC_OP_EFLAGS;
6257
            break;
6258
        default:
6259
            goto illegal_op;
6260
        }
6261
        break;
6262
    case 0x101:
6263
        modrm = ldub_code(s->pc++);
6264
        mod = (modrm >> 6) & 3;
6265
        op = (modrm >> 3) & 7;
6266
        rm = modrm & 7;
6267
        switch(op) {
6268
        case 0: /* sgdt */
6269
            if (mod == 3)
6270
                goto illegal_op;
6271
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
6272
                break;
6273
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6274
            gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
6275
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
6276
            gen_add_A0_im(s, 2);
6277
            gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
6278
            if (!s->dflag)
6279
                gen_op_andl_T0_im(0xffffff);
6280
            gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6281
            break;
6282
        case 1:
6283
            if (mod == 3) {
6284
                switch (rm) {
6285
                case 0: /* monitor */
6286
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6287
                        s->cpl != 0)
6288
                        goto illegal_op;
6289
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
6290
                        break;
6291
                    gen_jmp_im(pc_start - s->cs_base);
6292
#ifdef TARGET_X86_64
6293
                    if (s->aflag == 2) {
6294
                        gen_op_movq_A0_reg(R_EBX);
6295
                        gen_op_addq_A0_AL();
6296
                    } else
6297
#endif
6298
                    {
6299
                        gen_op_movl_A0_reg(R_EBX);
6300
                        gen_op_addl_A0_AL();
6301
                        if (s->aflag == 0)
6302
                            gen_op_andl_A0_ffff();
6303
                    }
6304
                    gen_add_A0_ds_seg(s);
6305
                    tcg_gen_helper_0_1(helper_monitor, cpu_A0);
6306
                    break;
6307
                case 1: /* mwait */
6308
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6309
                        s->cpl != 0)
6310
                        goto illegal_op;
6311
                    if (s->cc_op != CC_OP_DYNAMIC) {
6312
                        gen_op_set_cc_op(s->cc_op);
6313
                        s->cc_op = CC_OP_DYNAMIC;
6314
                    }
6315
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
6316
                        break;
6317
                    gen_jmp_im(s->pc - s->cs_base);
6318
                    tcg_gen_helper_0_0(helper_mwait);
6319
                    gen_eob(s);
6320
                    break;
6321
                default:
6322
                    goto illegal_op;
6323
                }
6324
            } else { /* sidt */
6325
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
6326
                    break;
6327
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6328
                gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
6329
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
6330
                gen_add_A0_im(s, 2);
6331
                gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
6332
                if (!s->dflag)
6333
                    gen_op_andl_T0_im(0xffffff);
6334
                gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6335
            }
6336
            break;
6337
        case 2: /* lgdt */
6338
        case 3: /* lidt */
6339
            if (mod == 3) {
6340
                switch(rm) {
6341
                case 0: /* VMRUN */
6342
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
6343
                        break;
6344
                    if (s->cc_op != CC_OP_DYNAMIC)
6345
                        gen_op_set_cc_op(s->cc_op);
6346
                    gen_jmp_im(s->pc - s->cs_base);
6347
                    tcg_gen_helper_0_0(helper_vmrun);
6348
                    s->cc_op = CC_OP_EFLAGS;
6349
                    gen_eob(s);
6350
                    break;
6351
                case 1: /* VMMCALL */
6352
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
6353
                         break;
6354
                    /* FIXME: cause #UD if hflags & SVM */
6355
                    tcg_gen_helper_0_0(helper_vmmcall);
6356
                    break;
6357
                case 2: /* VMLOAD */
6358
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
6359
                         break;
6360
                    tcg_gen_helper_0_0(helper_vmload);
6361
                    break;
6362
                case 3: /* VMSAVE */
6363
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
6364
                         break;
6365
                    tcg_gen_helper_0_0(helper_vmsave);
6366
                    break;
6367
                case 4: /* STGI */
6368
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
6369
                         break;
6370
                    tcg_gen_helper_0_0(helper_stgi);
6371
                    break;
6372
                case 5: /* CLGI */
6373
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
6374
                         break;
6375
                    tcg_gen_helper_0_0(helper_clgi);
6376
                    break;
6377
                case 6: /* SKINIT */
6378
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
6379
                         break;
6380
                    tcg_gen_helper_0_0(helper_skinit);
6381
                    break;
6382
                case 7: /* INVLPGA */
6383
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
6384
                         break;
6385
                    tcg_gen_helper_0_0(helper_invlpga);
6386
                    break;
6387
                default:
6388
                    goto illegal_op;
6389
                }
6390
            } else if (s->cpl != 0) {
6391
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6392
            } else {
6393
                if (gen_svm_check_intercept(s, pc_start,
6394
                                            op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
6395
                    break;
6396
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6397
                gen_op_ld_T1_A0(OT_WORD + s->mem_index);
6398
                gen_add_A0_im(s, 2);
6399
                gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6400
                if (!s->dflag)
6401
                    gen_op_andl_T0_im(0xffffff);
6402
                if (op == 2) {
6403
                    gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
6404
                    gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
6405
                } else {
6406
                    gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
6407
                    gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
6408
                }
6409
            }
6410
            break;
6411
        case 4: /* smsw */
6412
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
6413
                break;
6414
            gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
6415
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6416
            break;
6417
        case 6: /* lmsw */
6418
            if (s->cpl != 0) {
6419
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6420
            } else {
6421
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
6422
                    break;
6423
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6424
                tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
6425
                gen_jmp_im(s->pc - s->cs_base);
6426
                gen_eob(s);
6427
            }
6428
            break;
6429
        case 7: /* invlpg */
6430
            if (s->cpl != 0) {
6431
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6432
            } else {
6433
                if (mod == 3) {
6434
#ifdef TARGET_X86_64
6435
                    if (CODE64(s) && rm == 0) {
6436
                        /* swapgs */
6437
                        gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
6438
                        gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
6439
                        gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
6440
                        gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
6441
                    } else
6442
#endif
6443
                    {
6444
                        goto illegal_op;
6445
                    }
6446
                } else {
6447
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
6448
                        break;
6449
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6450
                    tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
6451
                    gen_jmp_im(s->pc - s->cs_base);
6452
                    gen_eob(s);
6453
                }
6454
            }
6455
            break;
6456
        default:
6457
            goto illegal_op;
6458
        }
6459
        break;
6460
    case 0x108: /* invd */
6461
    case 0x109: /* wbinvd */
6462
        if (s->cpl != 0) {
6463
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6464
        } else {
6465
            if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6466
                break;
6467
            /* nothing to do */
6468
        }
6469
        break;
6470
    case 0x63: /* arpl or movslS (x86_64) */
6471
#ifdef TARGET_X86_64
6472
        if (CODE64(s)) {
6473
            int d_ot;
6474
            /* d_ot is the size of destination */
6475
            d_ot = dflag + OT_WORD;
6476

    
6477
            modrm = ldub_code(s->pc++);
6478
            reg = ((modrm >> 3) & 7) | rex_r;
6479
            mod = (modrm >> 6) & 3;
6480
            rm = (modrm & 7) | REX_B(s);
6481

    
6482
            if (mod == 3) {
6483
                gen_op_mov_TN_reg(OT_LONG, 0, rm);
6484
                /* sign extend */
6485
                if (d_ot == OT_QUAD)
6486
                    gen_op_movslq_T0_T0();
6487
                gen_op_mov_reg_T0(d_ot, reg);
6488
            } else {
6489
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6490
                if (d_ot == OT_QUAD) {
6491
                    gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6492
                } else {
6493
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6494
                }
6495
                gen_op_mov_reg_T0(d_ot, reg);
6496
            }
6497
        } else
6498
#endif
6499
        {
6500
            if (!s->pe || s->vm86)
6501
                goto illegal_op;
6502
            ot = dflag ? OT_LONG : OT_WORD;
6503
            modrm = ldub_code(s->pc++);
6504
            reg = (modrm >> 3) & 7;
6505
            mod = (modrm >> 6) & 3;
6506
            rm = modrm & 7;
6507
            if (mod != 3) {
6508
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6509
                gen_op_ld_T0_A0(ot + s->mem_index);
6510
            } else {
6511
                gen_op_mov_TN_reg(ot, 0, rm);
6512
            }
6513
            gen_op_mov_TN_reg(ot, 1, reg);
6514
            if (s->cc_op != CC_OP_DYNAMIC)
6515
                gen_op_set_cc_op(s->cc_op);
6516
            gen_op_arpl();
6517
            s->cc_op = CC_OP_EFLAGS;
6518
            if (mod != 3) {
6519
                gen_op_st_T0_A0(ot + s->mem_index);
6520
            } else {
6521
                gen_op_mov_reg_T0(ot, rm);
6522
            }
6523
            gen_op_arpl_update();
6524
        }
6525
        break;
6526
    case 0x102: /* lar */
6527
    case 0x103: /* lsl */
6528
        if (!s->pe || s->vm86)
6529
            goto illegal_op;
6530
        ot = dflag ? OT_LONG : OT_WORD;
6531
        modrm = ldub_code(s->pc++);
6532
        reg = ((modrm >> 3) & 7) | rex_r;
6533
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6534
        gen_op_mov_TN_reg(ot, 1, reg);
6535
        if (s->cc_op != CC_OP_DYNAMIC)
6536
            gen_op_set_cc_op(s->cc_op);
6537
        if (b == 0x102)
6538
            gen_op_lar();
6539
        else
6540
            gen_op_lsl();
6541
        s->cc_op = CC_OP_EFLAGS;
6542
        gen_op_mov_reg_T1(ot, reg);
6543
        break;
6544
    case 0x118:
6545
        modrm = ldub_code(s->pc++);
6546
        mod = (modrm >> 6) & 3;
6547
        op = (modrm >> 3) & 7;
6548
        switch(op) {
6549
        case 0: /* prefetchnta */
6550
        case 1: /* prefetchnt0 */
6551
        case 2: /* prefetchnt0 */
6552
        case 3: /* prefetchnt0 */
6553
            if (mod == 3)
6554
                goto illegal_op;
6555
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6556
            /* nothing more to do */
6557
            break;
6558
        default: /* nop (multi byte) */
6559
            gen_nop_modrm(s, modrm);
6560
            break;
6561
        }
6562
        break;
6563
    case 0x119 ... 0x11f: /* nop (multi byte) */
6564
        modrm = ldub_code(s->pc++);
6565
        gen_nop_modrm(s, modrm);
6566
        break;
6567
    case 0x120: /* mov reg, crN */
6568
    case 0x122: /* mov crN, reg */
6569
        if (s->cpl != 0) {
6570
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6571
        } else {
6572
            modrm = ldub_code(s->pc++);
6573
            if ((modrm & 0xc0) != 0xc0)
6574
                goto illegal_op;
6575
            rm = (modrm & 7) | REX_B(s);
6576
            reg = ((modrm >> 3) & 7) | rex_r;
6577
            if (CODE64(s))
6578
                ot = OT_QUAD;
6579
            else
6580
                ot = OT_LONG;
6581
            switch(reg) {
6582
            case 0:
6583
            case 2:
6584
            case 3:
6585
            case 4:
6586
            case 8:
6587
                if (b & 2) {
6588
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6589
                    gen_op_mov_TN_reg(ot, 0, rm);
6590
                    tcg_gen_helper_0_2(helper_movl_crN_T0, 
6591
                                       tcg_const_i32(reg), cpu_T[0]);
6592
                    gen_jmp_im(s->pc - s->cs_base);
6593
                    gen_eob(s);
6594
                } else {
6595
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6596
#if !defined(CONFIG_USER_ONLY)
6597
                    if (reg == 8)
6598
                        tcg_gen_helper_1_0(helper_movtl_T0_cr8, cpu_T[0]);
6599
                    else
6600
#endif
6601
                        gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6602
                    gen_op_mov_reg_T0(ot, rm);
6603
                }
6604
                break;
6605
            default:
6606
                goto illegal_op;
6607
            }
6608
        }
6609
        break;
6610
    case 0x121: /* mov reg, drN */
6611
    case 0x123: /* mov drN, reg */
6612
        if (s->cpl != 0) {
6613
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6614
        } else {
6615
            modrm = ldub_code(s->pc++);
6616
            if ((modrm & 0xc0) != 0xc0)
6617
                goto illegal_op;
6618
            rm = (modrm & 7) | REX_B(s);
6619
            reg = ((modrm >> 3) & 7) | rex_r;
6620
            if (CODE64(s))
6621
                ot = OT_QUAD;
6622
            else
6623
                ot = OT_LONG;
6624
            /* XXX: do it dynamically with CR4.DE bit */
6625
            if (reg == 4 || reg == 5 || reg >= 8)
6626
                goto illegal_op;
6627
            if (b & 2) {
6628
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6629
                gen_op_mov_TN_reg(ot, 0, rm);
6630
                tcg_gen_helper_0_2(helper_movl_drN_T0,
6631
                                   tcg_const_i32(reg), cpu_T[0]);
6632
                gen_jmp_im(s->pc - s->cs_base);
6633
                gen_eob(s);
6634
            } else {
6635
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6636
                gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6637
                gen_op_mov_reg_T0(ot, rm);
6638
            }
6639
        }
6640
        break;
6641
    case 0x106: /* clts */
6642
        if (s->cpl != 0) {
6643
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6644
        } else {
6645
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6646
            tcg_gen_helper_0_0(helper_clts);
6647
            /* abort block because static cpu state changed */
6648
            gen_jmp_im(s->pc - s->cs_base);
6649
            gen_eob(s);
6650
        }
6651
        break;
6652
    /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6653
    case 0x1c3: /* MOVNTI reg, mem */
6654
        if (!(s->cpuid_features & CPUID_SSE2))
6655
            goto illegal_op;
6656
        ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6657
        modrm = ldub_code(s->pc++);
6658
        mod = (modrm >> 6) & 3;
6659
        if (mod == 3)
6660
            goto illegal_op;
6661
        reg = ((modrm >> 3) & 7) | rex_r;
6662
        /* generate a generic store */
6663
        gen_ldst_modrm(s, modrm, ot, reg, 1);
6664
        break;
6665
    case 0x1ae:
6666
        modrm = ldub_code(s->pc++);
6667
        mod = (modrm >> 6) & 3;
6668
        op = (modrm >> 3) & 7;
6669
        switch(op) {
6670
        case 0: /* fxsave */
6671
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6672
                (s->flags & HF_EM_MASK))
6673
                goto illegal_op;
6674
            if (s->flags & HF_TS_MASK) {
6675
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6676
                break;
6677
            }
6678
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6679
            if (s->cc_op != CC_OP_DYNAMIC)
6680
                gen_op_set_cc_op(s->cc_op);
6681
            gen_jmp_im(pc_start - s->cs_base);
6682
            tcg_gen_helper_0_2(helper_fxsave, 
6683
                               cpu_A0, tcg_const_i32((s->dflag == 2)));
6684
            break;
6685
        case 1: /* fxrstor */
6686
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6687
                (s->flags & HF_EM_MASK))
6688
                goto illegal_op;
6689
            if (s->flags & HF_TS_MASK) {
6690
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6691
                break;
6692
            }
6693
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6694
            if (s->cc_op != CC_OP_DYNAMIC)
6695
                gen_op_set_cc_op(s->cc_op);
6696
            gen_jmp_im(pc_start - s->cs_base);
6697
            tcg_gen_helper_0_2(helper_fxrstor,
6698
                               cpu_A0, tcg_const_i32((s->dflag == 2)));
6699
            break;
6700
        case 2: /* ldmxcsr */
6701
        case 3: /* stmxcsr */
6702
            if (s->flags & HF_TS_MASK) {
6703
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6704
                break;
6705
            }
6706
            if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6707
                mod == 3)
6708
                goto illegal_op;
6709
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6710
            if (op == 2) {
6711
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6712
                gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6713
            } else {
6714
                gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6715
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
6716
            }
6717
            break;
6718
        case 5: /* lfence */
6719
        case 6: /* mfence */
6720
            if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6721
                goto illegal_op;
6722
            break;
6723
        case 7: /* sfence / clflush */
6724
            if ((modrm & 0xc7) == 0xc0) {
6725
                /* sfence */
6726
                /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6727
                if (!(s->cpuid_features & CPUID_SSE))
6728
                    goto illegal_op;
6729
            } else {
6730
                /* clflush */
6731
                if (!(s->cpuid_features & CPUID_CLFLUSH))
6732
                    goto illegal_op;
6733
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6734
            }
6735
            break;
6736
        default:
6737
            goto illegal_op;
6738
        }
6739
        break;
6740
    case 0x10d: /* 3DNow! prefetch(w) */
6741
        modrm = ldub_code(s->pc++);
6742
        mod = (modrm >> 6) & 3;
6743
        if (mod == 3)
6744
            goto illegal_op;
6745
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6746
        /* ignore for now */
6747
        break;
6748
    case 0x1aa: /* rsm */
6749
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6750
            break;
6751
        if (!(s->flags & HF_SMM_MASK))
6752
            goto illegal_op;
6753
        if (s->cc_op != CC_OP_DYNAMIC) {
6754
            gen_op_set_cc_op(s->cc_op);
6755
            s->cc_op = CC_OP_DYNAMIC;
6756
        }
6757
        gen_jmp_im(s->pc - s->cs_base);
6758
        tcg_gen_helper_0_0(helper_rsm);
6759
        gen_eob(s);
6760
        break;
6761
    case 0x10e ... 0x10f:
6762
        /* 3DNow! instructions, ignore prefixes */
6763
        s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
6764
    case 0x110 ... 0x117:
6765
    case 0x128 ... 0x12f:
6766
    case 0x150 ... 0x177:
6767
    case 0x17c ... 0x17f:
6768
    case 0x1c2:
6769
    case 0x1c4 ... 0x1c6:
6770
    case 0x1d0 ... 0x1fe:
6771
        gen_sse(s, b, pc_start, rex_r);
6772
        break;
6773
    default:
6774
        goto illegal_op;
6775
    }
6776
    /* lock generation */
6777
    if (s->prefix & PREFIX_LOCK)
6778
        tcg_gen_helper_0_0(helper_unlock);
6779
    return s->pc;
6780
 illegal_op:
6781
    if (s->prefix & PREFIX_LOCK)
6782
        tcg_gen_helper_0_0(helper_unlock);
6783
    /* XXX: ensure that no lock was generated */
6784
    gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6785
    return s->pc;
6786
}
6787

    
6788
static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
6789
{
6790
    switch(macro_id) {
6791
#ifdef MACRO_TEST
6792
    case MACRO_TEST:
6793
        tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
6794
        break;
6795
#endif
6796
    }
6797
}
6798

    
6799
void optimize_flags_init(void)
6800
{
6801
#if TCG_TARGET_REG_BITS == 32
6802
    assert(sizeof(CCTable) == (1 << 3));
6803
#else
6804
    assert(sizeof(CCTable) == (1 << 4));
6805
#endif
6806
    tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
6807

    
6808
    cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
6809
#if TARGET_LONG_BITS > HOST_LONG_BITS
6810
    cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL, 
6811
                                  TCG_AREG0, offsetof(CPUState, t0), "T0");
6812
    cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
6813
                                  TCG_AREG0, offsetof(CPUState, t1), "T1");
6814
    cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
6815
                                TCG_AREG0, offsetof(CPUState, t2), "A0");
6816
#else
6817
    cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
6818
    cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
6819
    cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
6820
#endif
6821
    cpu_T3 = tcg_global_mem_new(TCG_TYPE_TL,
6822
                                TCG_AREG0, offsetof(CPUState, t3), "T3");
6823
#if defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS)
6824
    /* XXX: must be suppressed once there are less fixed registers */
6825
    cpu_tmp1_i64 = tcg_global_reg2_new_hack(TCG_TYPE_I64, TCG_AREG1, TCG_AREG2, "tmp1");
6826
#endif
6827
    cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
6828
                                   TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
6829
    cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
6830
                                    TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
6831
    cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
6832
                                    TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
6833
}
6834

    
6835
/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6836
   basic block 'tb'. If search_pc is TRUE, also generate PC
6837
   information for each intermediate instruction. */
6838
static inline int gen_intermediate_code_internal(CPUState *env,
6839
                                                 TranslationBlock *tb,
6840
                                                 int search_pc)
6841
{
6842
    DisasContext dc1, *dc = &dc1;
6843
    target_ulong pc_ptr;
6844
    uint16_t *gen_opc_end;
6845
    int j, lj, cflags;
6846
    uint64_t flags;
6847
    target_ulong pc_start;
6848
    target_ulong cs_base;
6849

    
6850
    /* generate intermediate code */
6851
    pc_start = tb->pc;
6852
    cs_base = tb->cs_base;
6853
    flags = tb->flags;
6854
    cflags = tb->cflags;
6855

    
6856
    dc->pe = (flags >> HF_PE_SHIFT) & 1;
6857
    dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6858
    dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6859
    dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6860
    dc->f_st = 0;
6861
    dc->vm86 = (flags >> VM_SHIFT) & 1;
6862
    dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6863
    dc->iopl = (flags >> IOPL_SHIFT) & 3;
6864
    dc->tf = (flags >> TF_SHIFT) & 1;
6865
    dc->singlestep_enabled = env->singlestep_enabled;
6866
    dc->cc_op = CC_OP_DYNAMIC;
6867
    dc->cs_base = cs_base;
6868
    dc->tb = tb;
6869
    dc->popl_esp_hack = 0;
6870
    /* select memory access functions */
6871
    dc->mem_index = 0;
6872
    if (flags & HF_SOFTMMU_MASK) {
6873
        if (dc->cpl == 3)
6874
            dc->mem_index = 2 * 4;
6875
        else
6876
            dc->mem_index = 1 * 4;
6877
    }
6878
    dc->cpuid_features = env->cpuid_features;
6879
    dc->cpuid_ext_features = env->cpuid_ext_features;
6880
    dc->cpuid_ext2_features = env->cpuid_ext2_features;
6881
#ifdef TARGET_X86_64
6882
    dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6883
    dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6884
#endif
6885
    dc->flags = flags;
6886
    dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6887
                    (flags & HF_INHIBIT_IRQ_MASK)
6888
#ifndef CONFIG_SOFTMMU
6889
                    || (flags & HF_SOFTMMU_MASK)
6890
#endif
6891
                    );
6892
#if 0
6893
    /* check addseg logic */
6894
    if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6895
        printf("ERROR addseg\n");
6896
#endif
6897

    
6898
    cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
6899
#if !(defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS))
6900
    cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
6901
#endif
6902
    cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
6903
    cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
6904
    cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
6905
    cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
6906
    cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
6907
    cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
6908
    cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
6909

    
6910
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6911

    
6912
    dc->is_jmp = DISAS_NEXT;
6913
    pc_ptr = pc_start;
6914
    lj = -1;
6915

    
6916
    for(;;) {
6917
        if (env->nb_breakpoints > 0) {
6918
            for(j = 0; j < env->nb_breakpoints; j++) {
6919
                if (env->breakpoints[j] == pc_ptr) {
6920
                    gen_debug(dc, pc_ptr - dc->cs_base);
6921
                    break;
6922
                }
6923
            }
6924
        }
6925
        if (search_pc) {
6926
            j = gen_opc_ptr - gen_opc_buf;
6927
            if (lj < j) {
6928
                lj++;
6929
                while (lj < j)
6930
                    gen_opc_instr_start[lj++] = 0;
6931
            }
6932
            gen_opc_pc[lj] = pc_ptr;
6933
            gen_opc_cc_op[lj] = dc->cc_op;
6934
            gen_opc_instr_start[lj] = 1;
6935
        }
6936
        pc_ptr = disas_insn(dc, pc_ptr);
6937
        /* stop translation if indicated */
6938
        if (dc->is_jmp)
6939
            break;
6940
        /* if single step mode, we generate only one instruction and
6941
           generate an exception */
6942
        /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6943
           the flag and abort the translation to give the irqs a
6944
           change to be happen */
6945
        if (dc->tf || dc->singlestep_enabled ||
6946
            (flags & HF_INHIBIT_IRQ_MASK) ||
6947
            (cflags & CF_SINGLE_INSN)) {
6948
            gen_jmp_im(pc_ptr - dc->cs_base);
6949
            gen_eob(dc);
6950
            break;
6951
        }
6952
        /* if too long translation, stop generation too */
6953
        if (gen_opc_ptr >= gen_opc_end ||
6954
            (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6955
            gen_jmp_im(pc_ptr - dc->cs_base);
6956
            gen_eob(dc);
6957
            break;
6958
        }
6959
    }
6960
    *gen_opc_ptr = INDEX_op_end;
6961
    /* we don't forget to fill the last values */
6962
    if (search_pc) {
6963
        j = gen_opc_ptr - gen_opc_buf;
6964
        lj++;
6965
        while (lj <= j)
6966
            gen_opc_instr_start[lj++] = 0;
6967
    }
6968

    
6969
#ifdef DEBUG_DISAS
6970
    if (loglevel & CPU_LOG_TB_CPU) {
6971
        cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6972
    }
6973
    if (loglevel & CPU_LOG_TB_IN_ASM) {
6974
        int disas_flags;
6975
        fprintf(logfile, "----------------\n");
6976
        fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6977
#ifdef TARGET_X86_64
6978
        if (dc->code64)
6979
            disas_flags = 2;
6980
        else
6981
#endif
6982
            disas_flags = !dc->code32;
6983
        target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6984
        fprintf(logfile, "\n");
6985
        if (loglevel & CPU_LOG_TB_OP_OPT) {
6986
            fprintf(logfile, "OP before opt:\n");
6987
            tcg_dump_ops(&tcg_ctx, logfile);
6988
            fprintf(logfile, "\n");
6989
        }
6990
    }
6991
#endif
6992

    
6993
    if (!search_pc)
6994
        tb->size = pc_ptr - pc_start;
6995
    return 0;
6996
}
6997

    
6998
int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6999
{
7000
    return gen_intermediate_code_internal(env, tb, 0);
7001
}
7002

    
7003
int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
7004
{
7005
    return gen_intermediate_code_internal(env, tb, 1);
7006
}
7007

    
7008
void gen_pc_load(CPUState *env, TranslationBlock *tb,
7009
                unsigned long searched_pc, int pc_pos, void *puc)
7010
{
7011
    int cc_op;
7012
#ifdef DEBUG_DISAS
7013
    if (loglevel & CPU_LOG_TB_OP) {
7014
        int i;
7015
        fprintf(logfile, "RESTORE:\n");
7016
        for(i = 0;i <= pc_pos; i++) {
7017
            if (gen_opc_instr_start[i]) {
7018
                fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7019
            }
7020
        }
7021
        fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7022
                searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7023
                (uint32_t)tb->cs_base);
7024
    }
7025
#endif
7026
    env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7027
    cc_op = gen_opc_cc_op[pc_pos];
7028
    if (cc_op != CC_OP_DYNAMIC)
7029
        env->cc_op = cc_op;
7030
}