Statistics
| Branch: | Revision:

root / target-i386 / translate.c @ bd7a7b33

History | View | Annotate | Download (230.5 kB)

1
/*
2
 *  i386 translation
3
 *
4
 *  Copyright (c) 2003 Fabrice Bellard
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20
#include <stdarg.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23
#include <string.h>
24
#include <inttypes.h>
25
#include <signal.h>
26
#include <assert.h>
27

    
28
#include "cpu.h"
29
#include "exec-all.h"
30
#include "disas.h"
31
#include "helper.h"
32
#include "tcg-op.h"
33

    
34
#define PREFIX_REPZ   0x01
35
#define PREFIX_REPNZ  0x02
36
#define PREFIX_LOCK   0x04
37
#define PREFIX_DATA   0x08
38
#define PREFIX_ADR    0x10
39

    
40
#ifdef TARGET_X86_64
41
#define X86_64_ONLY(x) x
42
#define X86_64_DEF(x...) x
43
#define CODE64(s) ((s)->code64)
44
#define REX_X(s) ((s)->rex_x)
45
#define REX_B(s) ((s)->rex_b)
46
/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
47
#if 1
48
#define BUGGY_64(x) NULL
49
#endif
50
#else
51
#define X86_64_ONLY(x) NULL
52
#define X86_64_DEF(x...)
53
#define CODE64(s) 0
54
#define REX_X(s) 0
55
#define REX_B(s) 0
56
#endif
57

    
58
//#define MACRO_TEST   1
59

    
60
/* global register indexes */
61
static TCGv cpu_env, cpu_T[2], cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst;
62
static TCGv cpu_T3;
63
/* local register indexes (only used inside old micro ops) */
64
static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
65
static TCGv cpu_tmp5, cpu_tmp6;
66

    
67
#ifdef TARGET_X86_64
68
static int x86_64_hregs;
69
#endif
70

    
71
typedef struct DisasContext {
72
    /* current insn context */
73
    int override; /* -1 if no override */
74
    int prefix;
75
    int aflag, dflag;
76
    target_ulong pc; /* pc = eip + cs_base */
77
    int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
78
                   static state change (stop translation) */
79
    /* current block context */
80
    target_ulong cs_base; /* base of CS segment */
81
    int pe;     /* protected mode */
82
    int code32; /* 32 bit code segment */
83
#ifdef TARGET_X86_64
84
    int lma;    /* long mode active */
85
    int code64; /* 64 bit code segment */
86
    int rex_x, rex_b;
87
#endif
88
    int ss32;   /* 32 bit stack segment */
89
    int cc_op;  /* current CC operation */
90
    int addseg; /* non zero if either DS/ES/SS have a non zero base */
91
    int f_st;   /* currently unused */
92
    int vm86;   /* vm86 mode */
93
    int cpl;
94
    int iopl;
95
    int tf;     /* TF cpu flag */
96
    int singlestep_enabled; /* "hardware" single step enabled */
97
    int jmp_opt; /* use direct block chaining for direct jumps */
98
    int mem_index; /* select memory access functions */
99
    uint64_t flags; /* all execution flags */
100
    struct TranslationBlock *tb;
101
    int popl_esp_hack; /* for correct popl with esp base handling */
102
    int rip_offset; /* only used in x86_64, but left for simplicity */
103
    int cpuid_features;
104
    int cpuid_ext_features;
105
    int cpuid_ext2_features;
106
} DisasContext;
107

    
108
static void gen_eob(DisasContext *s);
109
static void gen_jmp(DisasContext *s, target_ulong eip);
110
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
111

    
112
/* i386 arith/logic operations */
113
enum {
114
    OP_ADDL,
115
    OP_ORL,
116
    OP_ADCL,
117
    OP_SBBL,
118
    OP_ANDL,
119
    OP_SUBL,
120
    OP_XORL,
121
    OP_CMPL,
122
};
123

    
124
/* i386 shift ops */
125
enum {
126
    OP_ROL,
127
    OP_ROR,
128
    OP_RCL,
129
    OP_RCR,
130
    OP_SHL,
131
    OP_SHR,
132
    OP_SHL1, /* undocumented */
133
    OP_SAR = 7,
134
};
135

    
136
/* operand size */
137
enum {
138
    OT_BYTE = 0,
139
    OT_WORD,
140
    OT_LONG,
141
    OT_QUAD,
142
};
143

    
144
enum {
145
    /* I386 int registers */
146
    OR_EAX,   /* MUST be even numbered */
147
    OR_ECX,
148
    OR_EDX,
149
    OR_EBX,
150
    OR_ESP,
151
    OR_EBP,
152
    OR_ESI,
153
    OR_EDI,
154

    
155
    OR_TMP0 = 16,    /* temporary operand register */
156
    OR_TMP1,
157
    OR_A0, /* temporary register used when doing address evaluation */
158
};
159

    
160
static inline void gen_op_movl_T0_0(void)
161
{
162
    tcg_gen_movi_tl(cpu_T[0], 0);
163
}
164

    
165
static inline void gen_op_movl_T0_im(int32_t val)
166
{
167
    tcg_gen_movi_tl(cpu_T[0], val);
168
}
169

    
170
static inline void gen_op_movl_T0_imu(uint32_t val)
171
{
172
    tcg_gen_movi_tl(cpu_T[0], val);
173
}
174

    
175
static inline void gen_op_movl_T1_im(int32_t val)
176
{
177
    tcg_gen_movi_tl(cpu_T[1], val);
178
}
179

    
180
static inline void gen_op_movl_T1_imu(uint32_t val)
181
{
182
    tcg_gen_movi_tl(cpu_T[1], val);
183
}
184

    
185
static inline void gen_op_movl_A0_im(uint32_t val)
186
{
187
    tcg_gen_movi_tl(cpu_A0, val);
188
}
189

    
190
#ifdef TARGET_X86_64
191
static inline void gen_op_movq_A0_im(int64_t val)
192
{
193
    tcg_gen_movi_tl(cpu_A0, val);
194
}
195
#endif
196

    
197
static inline void gen_movtl_T0_im(target_ulong val)
198
{
199
    tcg_gen_movi_tl(cpu_T[0], val);
200
}
201

    
202
static inline void gen_movtl_T1_im(target_ulong val)
203
{
204
    tcg_gen_movi_tl(cpu_T[1], val);
205
}
206

    
207
static inline void gen_op_andl_T0_ffff(void)
208
{
209
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
210
}
211

    
212
static inline void gen_op_andl_T0_im(uint32_t val)
213
{
214
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
215
}
216

    
217
static inline void gen_op_movl_T0_T1(void)
218
{
219
    tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
220
}
221

    
222
static inline void gen_op_andl_A0_ffff(void)
223
{
224
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
225
}
226

    
227
#ifdef TARGET_X86_64
228

    
229
#define NB_OP_SIZES 4
230

    
231
#define DEF_REGS(prefix, suffix) \
232
  prefix ## EAX ## suffix,\
233
  prefix ## ECX ## suffix,\
234
  prefix ## EDX ## suffix,\
235
  prefix ## EBX ## suffix,\
236
  prefix ## ESP ## suffix,\
237
  prefix ## EBP ## suffix,\
238
  prefix ## ESI ## suffix,\
239
  prefix ## EDI ## suffix,\
240
  prefix ## R8 ## suffix,\
241
  prefix ## R9 ## suffix,\
242
  prefix ## R10 ## suffix,\
243
  prefix ## R11 ## suffix,\
244
  prefix ## R12 ## suffix,\
245
  prefix ## R13 ## suffix,\
246
  prefix ## R14 ## suffix,\
247
  prefix ## R15 ## suffix,
248

    
249
#else /* !TARGET_X86_64 */
250

    
251
#define NB_OP_SIZES 3
252

    
253
#define DEF_REGS(prefix, suffix) \
254
  prefix ## EAX ## suffix,\
255
  prefix ## ECX ## suffix,\
256
  prefix ## EDX ## suffix,\
257
  prefix ## EBX ## suffix,\
258
  prefix ## ESP ## suffix,\
259
  prefix ## EBP ## suffix,\
260
  prefix ## ESI ## suffix,\
261
  prefix ## EDI ## suffix,
262

    
263
#endif /* !TARGET_X86_64 */
264

    
265
#if defined(WORDS_BIGENDIAN)
266
#define REG_B_OFFSET (sizeof(target_ulong) - 1)
267
#define REG_H_OFFSET (sizeof(target_ulong) - 2)
268
#define REG_W_OFFSET (sizeof(target_ulong) - 2)
269
#define REG_L_OFFSET (sizeof(target_ulong) - 4)
270
#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
271
#else
272
#define REG_B_OFFSET 0
273
#define REG_H_OFFSET 1
274
#define REG_W_OFFSET 0
275
#define REG_L_OFFSET 0
276
#define REG_LH_OFFSET 4
277
#endif
278

    
279
static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
280
{
281
    switch(ot) {
282
    case OT_BYTE:
283
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
284
            tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
285
        } else {
286
            tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
287
        }
288
        break;
289
    case OT_WORD:
290
        tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
291
        break;
292
#ifdef TARGET_X86_64
293
    case OT_LONG:
294
        tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
295
        /* high part of register set to zero */
296
        tcg_gen_movi_tl(cpu_tmp0, 0);
297
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
298
        break;
299
    default:
300
    case OT_QUAD:
301
        tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
302
        break;
303
#else
304
    default:
305
    case OT_LONG:
306
        tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
307
        break;
308
#endif
309
    }
310
}
311

    
312
static inline void gen_op_mov_reg_T0(int ot, int reg)
313
{
314
    gen_op_mov_reg_TN(ot, 0, reg);
315
}
316

    
317
static inline void gen_op_mov_reg_T1(int ot, int reg)
318
{
319
    gen_op_mov_reg_TN(ot, 1, reg);
320
}
321

    
322
static inline void gen_op_mov_reg_A0(int size, int reg)
323
{
324
    switch(size) {
325
    case 0:
326
        tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
327
        break;
328
#ifdef TARGET_X86_64
329
    case 1:
330
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
331
        /* high part of register set to zero */
332
        tcg_gen_movi_tl(cpu_tmp0, 0);
333
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
334
        break;
335
    default:
336
    case 2:
337
        tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
338
        break;
339
#else
340
    default:
341
    case 1:
342
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
343
        break;
344
#endif
345
    }
346
}
347

    
348
static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
349
{
350
    switch(ot) {
351
    case OT_BYTE:
352
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
353
            goto std_case;
354
        } else {
355
            tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
356
        }
357
        break;
358
    default:
359
    std_case:
360
        tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
361
        break;
362
    }
363
}
364

    
365
static inline void gen_op_movl_A0_reg(int reg)
366
{
367
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
368
}
369

    
370
static inline void gen_op_addl_A0_im(int32_t val)
371
{
372
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
373
#ifdef TARGET_X86_64
374
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
375
#endif
376
}
377

    
378
#ifdef TARGET_X86_64
379
static inline void gen_op_addq_A0_im(int64_t val)
380
{
381
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
382
}
383
#endif
384
    
385
static void gen_add_A0_im(DisasContext *s, int val)
386
{
387
#ifdef TARGET_X86_64
388
    if (CODE64(s))
389
        gen_op_addq_A0_im(val);
390
    else
391
#endif
392
        gen_op_addl_A0_im(val);
393
}
394

    
395
static inline void gen_op_addl_T0_T1(void)
396
{
397
    tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
398
}
399

    
400
static inline void gen_op_jmp_T0(void)
401
{
402
    tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
403
}
404

    
405
static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
406
{
407
    switch(size) {
408
    case 0:
409
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
410
        tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
411
        tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
412
        break;
413
    case 1:
414
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
415
        tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
416
#ifdef TARGET_X86_64
417
        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
418
#endif
419
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
420
        break;
421
#ifdef TARGET_X86_64
422
    case 2:
423
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
424
        tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
425
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
426
        break;
427
#endif
428
    }
429
}
430

    
431
static inline void gen_op_add_reg_T0(int size, int reg)
432
{
433
    switch(size) {
434
    case 0:
435
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
436
        tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
437
        tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
438
        break;
439
    case 1:
440
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
441
        tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
442
#ifdef TARGET_X86_64
443
        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
444
#endif
445
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
446
        break;
447
#ifdef TARGET_X86_64
448
    case 2:
449
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
450
        tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
451
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
452
        break;
453
#endif
454
    }
455
}
456

    
457
static inline void gen_op_set_cc_op(int32_t val)
458
{
459
    tcg_gen_movi_i32(cpu_cc_op, val);
460
}
461

    
462
static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
463
{
464
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
465
    if (shift != 0) 
466
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
467
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
468
#ifdef TARGET_X86_64
469
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
470
#endif
471
}
472

    
473
static inline void gen_op_movl_A0_seg(int reg)
474
{
475
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
476
}
477

    
478
static inline void gen_op_addl_A0_seg(int reg)
479
{
480
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
481
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
482
#ifdef TARGET_X86_64
483
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
484
#endif
485
}
486

    
487
#ifdef TARGET_X86_64
488
static inline void gen_op_movq_A0_seg(int reg)
489
{
490
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
491
}
492

    
493
static inline void gen_op_addq_A0_seg(int reg)
494
{
495
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
496
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
497
}
498

    
499
static inline void gen_op_movq_A0_reg(int reg)
500
{
501
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
502
}
503

    
504
static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
505
{
506
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
507
    if (shift != 0) 
508
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
509
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
510
}
511
#endif
512

    
513
static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
514
    [0] = {
515
        DEF_REGS(gen_op_cmovw_, _T1_T0)
516
    },
517
    [1] = {
518
        DEF_REGS(gen_op_cmovl_, _T1_T0)
519
    },
520
#ifdef TARGET_X86_64
521
    [2] = {
522
        DEF_REGS(gen_op_cmovq_, _T1_T0)
523
    },
524
#endif
525
};
526

    
527
static inline void gen_op_lds_T0_A0(int idx)
528
{
529
    int mem_index = (idx >> 2) - 1;
530
    switch(idx & 3) {
531
    case 0:
532
        tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
533
        break;
534
    case 1:
535
        tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
536
        break;
537
    default:
538
    case 2:
539
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
540
        break;
541
    }
542
}
543

    
544
/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
545
static inline void gen_op_ld_T0_A0(int idx)
546
{
547
    int mem_index = (idx >> 2) - 1;
548
    switch(idx & 3) {
549
    case 0:
550
        tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
551
        break;
552
    case 1:
553
        tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
554
        break;
555
    case 2:
556
        tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
557
        break;
558
    default:
559
    case 3:
560
        tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
561
        break;
562
    }
563
}
564

    
565
static inline void gen_op_ldu_T0_A0(int idx)
566
{
567
    gen_op_ld_T0_A0(idx);
568
}
569

    
570
static inline void gen_op_ld_T1_A0(int idx)
571
{
572
    int mem_index = (idx >> 2) - 1;
573
    switch(idx & 3) {
574
    case 0:
575
        tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
576
        break;
577
    case 1:
578
        tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
579
        break;
580
    case 2:
581
        tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
582
        break;
583
    default:
584
    case 3:
585
        tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
586
        break;
587
    }
588
}
589

    
590
static inline void gen_op_st_T0_A0(int idx)
591
{
592
    int mem_index = (idx >> 2) - 1;
593
    switch(idx & 3) {
594
    case 0:
595
        tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
596
        break;
597
    case 1:
598
        tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
599
        break;
600
    case 2:
601
        tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
602
        break;
603
    default:
604
    case 3:
605
        tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
606
        break;
607
    }
608
}
609

    
610
static inline void gen_op_st_T1_A0(int idx)
611
{
612
    int mem_index = (idx >> 2) - 1;
613
    switch(idx & 3) {
614
    case 0:
615
        tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
616
        break;
617
    case 1:
618
        tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
619
        break;
620
    case 2:
621
        tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
622
        break;
623
    default:
624
    case 3:
625
        tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
626
        break;
627
    }
628
}
629

    
630
static inline void gen_jmp_im(target_ulong pc)
631
{
632
    tcg_gen_movi_tl(cpu_tmp0, pc);
633
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
634
}
635

    
636
static inline void gen_string_movl_A0_ESI(DisasContext *s)
637
{
638
    int override;
639

    
640
    override = s->override;
641
#ifdef TARGET_X86_64
642
    if (s->aflag == 2) {
643
        if (override >= 0) {
644
            gen_op_movq_A0_seg(override);
645
            gen_op_addq_A0_reg_sN(0, R_ESI);
646
        } else {
647
            gen_op_movq_A0_reg(R_ESI);
648
        }
649
    } else
650
#endif
651
    if (s->aflag) {
652
        /* 32 bit address */
653
        if (s->addseg && override < 0)
654
            override = R_DS;
655
        if (override >= 0) {
656
            gen_op_movl_A0_seg(override);
657
            gen_op_addl_A0_reg_sN(0, R_ESI);
658
        } else {
659
            gen_op_movl_A0_reg(R_ESI);
660
        }
661
    } else {
662
        /* 16 address, always override */
663
        if (override < 0)
664
            override = R_DS;
665
        gen_op_movl_A0_reg(R_ESI);
666
        gen_op_andl_A0_ffff();
667
        gen_op_addl_A0_seg(override);
668
    }
669
}
670

    
671
static inline void gen_string_movl_A0_EDI(DisasContext *s)
672
{
673
#ifdef TARGET_X86_64
674
    if (s->aflag == 2) {
675
        gen_op_movq_A0_reg(R_EDI);
676
    } else
677
#endif
678
    if (s->aflag) {
679
        if (s->addseg) {
680
            gen_op_movl_A0_seg(R_ES);
681
            gen_op_addl_A0_reg_sN(0, R_EDI);
682
        } else {
683
            gen_op_movl_A0_reg(R_EDI);
684
        }
685
    } else {
686
        gen_op_movl_A0_reg(R_EDI);
687
        gen_op_andl_A0_ffff();
688
        gen_op_addl_A0_seg(R_ES);
689
    }
690
}
691

    
692
static inline void gen_op_movl_T0_Dshift(int ot) 
693
{
694
    tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
695
    tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
696
};
697

    
698
static void gen_extu(int ot, TCGv reg)
699
{
700
    switch(ot) {
701
    case OT_BYTE:
702
        tcg_gen_ext8u_tl(reg, reg);
703
        break;
704
    case OT_WORD:
705
        tcg_gen_ext16u_tl(reg, reg);
706
        break;
707
    case OT_LONG:
708
        tcg_gen_ext32u_tl(reg, reg);
709
        break;
710
    default:
711
        break;
712
    }
713
}
714

    
715
static void gen_exts(int ot, TCGv reg)
716
{
717
    switch(ot) {
718
    case OT_BYTE:
719
        tcg_gen_ext8s_tl(reg, reg);
720
        break;
721
    case OT_WORD:
722
        tcg_gen_ext16s_tl(reg, reg);
723
        break;
724
    case OT_LONG:
725
        tcg_gen_ext32s_tl(reg, reg);
726
        break;
727
    default:
728
        break;
729
    }
730
}
731

    
732
static inline void gen_op_jnz_ecx(int size, int label1)
733
{
734
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
735
    gen_extu(size + 1, cpu_tmp0);
736
    tcg_gen_brcond_tl(TCG_COND_NE, cpu_tmp0, tcg_const_tl(0), label1);
737
}
738

    
739
static inline void gen_op_jz_ecx(int size, int label1)
740
{
741
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
742
    gen_extu(size + 1, cpu_tmp0);
743
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_tmp0, tcg_const_tl(0), label1);
744
}
745

    
746
static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
747
    {
748
        gen_op_jnz_subb,
749
        gen_op_jnz_subw,
750
        gen_op_jnz_subl,
751
        X86_64_ONLY(gen_op_jnz_subq),
752
    },
753
    {
754
        gen_op_jz_subb,
755
        gen_op_jz_subw,
756
        gen_op_jz_subl,
757
        X86_64_ONLY(gen_op_jz_subq),
758
    },
759
};
760

    
761
static void *helper_in_func[3] = {
762
    helper_inb,
763
    helper_inw,
764
    helper_inl,
765
};
766

    
767
static void *helper_out_func[3] = {
768
    helper_outb,
769
    helper_outw,
770
    helper_outl,
771
};
772

    
773
static void *gen_check_io_func[3] = {
774
    helper_check_iob,
775
    helper_check_iow,
776
    helper_check_iol,
777
};
778

    
779
static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
780
                         uint32_t svm_flags)
781
{
782
    int state_saved;
783
    target_ulong next_eip;
784

    
785
    state_saved = 0;
786
    if (s->pe && (s->cpl > s->iopl || s->vm86)) {
787
        if (s->cc_op != CC_OP_DYNAMIC)
788
            gen_op_set_cc_op(s->cc_op);
789
        gen_jmp_im(cur_eip);
790
        state_saved = 1;
791
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
792
        tcg_gen_helper_0_1(gen_check_io_func[ot],
793
                           cpu_tmp2_i32);
794
    }
795
    if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
796
        if (!state_saved) {
797
            if (s->cc_op != CC_OP_DYNAMIC)
798
                gen_op_set_cc_op(s->cc_op);
799
            gen_jmp_im(cur_eip);
800
            state_saved = 1;
801
        }
802
        svm_flags |= (1 << (4 + ot));
803
        next_eip = s->pc - s->cs_base;
804
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
805
        tcg_gen_helper_0_3(helper_svm_check_io,
806
                           cpu_tmp2_i32,
807
                           tcg_const_i32(svm_flags),
808
                           tcg_const_i32(next_eip - cur_eip));
809
    }
810
}
811

    
812
static inline void gen_movs(DisasContext *s, int ot)
813
{
814
    gen_string_movl_A0_ESI(s);
815
    gen_op_ld_T0_A0(ot + s->mem_index);
816
    gen_string_movl_A0_EDI(s);
817
    gen_op_st_T0_A0(ot + s->mem_index);
818
    gen_op_movl_T0_Dshift(ot);
819
    gen_op_add_reg_T0(s->aflag, R_ESI);
820
    gen_op_add_reg_T0(s->aflag, R_EDI);
821
}
822

    
823
static inline void gen_update_cc_op(DisasContext *s)
824
{
825
    if (s->cc_op != CC_OP_DYNAMIC) {
826
        gen_op_set_cc_op(s->cc_op);
827
        s->cc_op = CC_OP_DYNAMIC;
828
    }
829
}
830

    
831
static void gen_op_update1_cc(void)
832
{
833
    tcg_gen_discard_tl(cpu_cc_src);
834
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
835
}
836

    
837
static void gen_op_update2_cc(void)
838
{
839
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
840
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
841
}
842

    
843
static inline void gen_op_cmpl_T0_T1_cc(void)
844
{
845
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
846
    tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
847
}
848

    
849
static inline void gen_op_testl_T0_T1_cc(void)
850
{
851
    tcg_gen_discard_tl(cpu_cc_src);
852
    tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
853
}
854

    
855
static void gen_op_update_neg_cc(void)
856
{
857
    tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
858
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
859
}
860

    
861
/* XXX: does not work with gdbstub "ice" single step - not a
862
   serious problem */
863
static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
864
{
865
    int l1, l2;
866

    
867
    l1 = gen_new_label();
868
    l2 = gen_new_label();
869
    gen_op_jnz_ecx(s->aflag, l1);
870
    gen_set_label(l2);
871
    gen_jmp_tb(s, next_eip, 1);
872
    gen_set_label(l1);
873
    return l2;
874
}
875

    
876
static inline void gen_stos(DisasContext *s, int ot)
877
{
878
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
879
    gen_string_movl_A0_EDI(s);
880
    gen_op_st_T0_A0(ot + s->mem_index);
881
    gen_op_movl_T0_Dshift(ot);
882
    gen_op_add_reg_T0(s->aflag, R_EDI);
883
}
884

    
885
static inline void gen_lods(DisasContext *s, int ot)
886
{
887
    gen_string_movl_A0_ESI(s);
888
    gen_op_ld_T0_A0(ot + s->mem_index);
889
    gen_op_mov_reg_T0(ot, R_EAX);
890
    gen_op_movl_T0_Dshift(ot);
891
    gen_op_add_reg_T0(s->aflag, R_ESI);
892
}
893

    
894
static inline void gen_scas(DisasContext *s, int ot)
895
{
896
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
897
    gen_string_movl_A0_EDI(s);
898
    gen_op_ld_T1_A0(ot + s->mem_index);
899
    gen_op_cmpl_T0_T1_cc();
900
    gen_op_movl_T0_Dshift(ot);
901
    gen_op_add_reg_T0(s->aflag, R_EDI);
902
}
903

    
904
static inline void gen_cmps(DisasContext *s, int ot)
905
{
906
    gen_string_movl_A0_ESI(s);
907
    gen_op_ld_T0_A0(ot + s->mem_index);
908
    gen_string_movl_A0_EDI(s);
909
    gen_op_ld_T1_A0(ot + s->mem_index);
910
    gen_op_cmpl_T0_T1_cc();
911
    gen_op_movl_T0_Dshift(ot);
912
    gen_op_add_reg_T0(s->aflag, R_ESI);
913
    gen_op_add_reg_T0(s->aflag, R_EDI);
914
}
915

    
916
static inline void gen_ins(DisasContext *s, int ot)
917
{
918
    gen_string_movl_A0_EDI(s);
919
    /* Note: we must do this dummy write first to be restartable in
920
       case of page fault. */
921
    gen_op_movl_T0_0();
922
    gen_op_st_T0_A0(ot + s->mem_index);
923
    gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
924
    tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
925
    tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
926
    tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
927
    gen_op_st_T0_A0(ot + s->mem_index);
928
    gen_op_movl_T0_Dshift(ot);
929
    gen_op_add_reg_T0(s->aflag, R_EDI);
930
}
931

    
932
static inline void gen_outs(DisasContext *s, int ot)
933
{
934
    gen_string_movl_A0_ESI(s);
935
    gen_op_ld_T0_A0(ot + s->mem_index);
936

    
937
    gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
938
    tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
939
    tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
940
    tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
941
    tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
942

    
943
    gen_op_movl_T0_Dshift(ot);
944
    gen_op_add_reg_T0(s->aflag, R_ESI);
945
}
946

    
947
/* same method as Valgrind : we generate jumps to current or next
948
   instruction */
949
#define GEN_REPZ(op)                                                          \
950
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
951
                                 target_ulong cur_eip, target_ulong next_eip) \
952
{                                                                             \
953
    int l2;\
954
    gen_update_cc_op(s);                                                      \
955
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
956
    gen_ ## op(s, ot);                                                        \
957
    gen_op_add_reg_im(s->aflag, R_ECX, -1);                                   \
958
    /* a loop would cause two single step exceptions if ECX = 1               \
959
       before rep string_insn */                                              \
960
    if (!s->jmp_opt)                                                          \
961
        gen_op_jz_ecx(s->aflag, l2);                                          \
962
    gen_jmp(s, cur_eip);                                                      \
963
}
964

    
965
#define GEN_REPZ2(op)                                                         \
966
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
967
                                   target_ulong cur_eip,                      \
968
                                   target_ulong next_eip,                     \
969
                                   int nz)                                    \
970
{                                                                             \
971
    int l2;\
972
    gen_update_cc_op(s);                                                      \
973
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
974
    gen_ ## op(s, ot);                                                        \
975
    gen_op_add_reg_im(s->aflag, R_ECX, -1);                                   \
976
    gen_op_set_cc_op(CC_OP_SUBB + ot);                                        \
977
    gen_op_string_jnz_sub[nz][ot](l2);\
978
    if (!s->jmp_opt)                                                          \
979
        gen_op_jz_ecx(s->aflag, l2);                                          \
980
    gen_jmp(s, cur_eip);                                                      \
981
}
982

    
983
GEN_REPZ(movs)
984
GEN_REPZ(stos)
985
GEN_REPZ(lods)
986
GEN_REPZ(ins)
987
GEN_REPZ(outs)
988
GEN_REPZ2(scas)
989
GEN_REPZ2(cmps)
990

    
991
enum {
992
    JCC_O,
993
    JCC_B,
994
    JCC_Z,
995
    JCC_BE,
996
    JCC_S,
997
    JCC_P,
998
    JCC_L,
999
    JCC_LE,
1000
};
1001

    
1002
static GenOpFunc1 *gen_jcc_sub[4][8] = {
1003
    [OT_BYTE] = {
1004
        NULL,
1005
        gen_op_jb_subb,
1006
        gen_op_jz_subb,
1007
        gen_op_jbe_subb,
1008
        gen_op_js_subb,
1009
        NULL,
1010
        gen_op_jl_subb,
1011
        gen_op_jle_subb,
1012
    },
1013
    [OT_WORD] = {
1014
        NULL,
1015
        gen_op_jb_subw,
1016
        gen_op_jz_subw,
1017
        gen_op_jbe_subw,
1018
        gen_op_js_subw,
1019
        NULL,
1020
        gen_op_jl_subw,
1021
        gen_op_jle_subw,
1022
    },
1023
    [OT_LONG] = {
1024
        NULL,
1025
        gen_op_jb_subl,
1026
        gen_op_jz_subl,
1027
        gen_op_jbe_subl,
1028
        gen_op_js_subl,
1029
        NULL,
1030
        gen_op_jl_subl,
1031
        gen_op_jle_subl,
1032
    },
1033
#ifdef TARGET_X86_64
1034
    [OT_QUAD] = {
1035
        NULL,
1036
        BUGGY_64(gen_op_jb_subq),
1037
        gen_op_jz_subq,
1038
        BUGGY_64(gen_op_jbe_subq),
1039
        gen_op_js_subq,
1040
        NULL,
1041
        BUGGY_64(gen_op_jl_subq),
1042
        BUGGY_64(gen_op_jle_subq),
1043
    },
1044
#endif
1045
};
1046

    
1047
static GenOpFunc *gen_setcc_slow[8] = {
1048
    gen_op_seto_T0_cc,
1049
    gen_op_setb_T0_cc,
1050
    gen_op_setz_T0_cc,
1051
    gen_op_setbe_T0_cc,
1052
    gen_op_sets_T0_cc,
1053
    gen_op_setp_T0_cc,
1054
    gen_op_setl_T0_cc,
1055
    gen_op_setle_T0_cc,
1056
};
1057

    
1058
static GenOpFunc *gen_setcc_sub[4][8] = {
1059
    [OT_BYTE] = {
1060
        NULL,
1061
        gen_op_setb_T0_subb,
1062
        gen_op_setz_T0_subb,
1063
        gen_op_setbe_T0_subb,
1064
        gen_op_sets_T0_subb,
1065
        NULL,
1066
        gen_op_setl_T0_subb,
1067
        gen_op_setle_T0_subb,
1068
    },
1069
    [OT_WORD] = {
1070
        NULL,
1071
        gen_op_setb_T0_subw,
1072
        gen_op_setz_T0_subw,
1073
        gen_op_setbe_T0_subw,
1074
        gen_op_sets_T0_subw,
1075
        NULL,
1076
        gen_op_setl_T0_subw,
1077
        gen_op_setle_T0_subw,
1078
    },
1079
    [OT_LONG] = {
1080
        NULL,
1081
        gen_op_setb_T0_subl,
1082
        gen_op_setz_T0_subl,
1083
        gen_op_setbe_T0_subl,
1084
        gen_op_sets_T0_subl,
1085
        NULL,
1086
        gen_op_setl_T0_subl,
1087
        gen_op_setle_T0_subl,
1088
    },
1089
#ifdef TARGET_X86_64
1090
    [OT_QUAD] = {
1091
        NULL,
1092
        gen_op_setb_T0_subq,
1093
        gen_op_setz_T0_subq,
1094
        gen_op_setbe_T0_subq,
1095
        gen_op_sets_T0_subq,
1096
        NULL,
1097
        gen_op_setl_T0_subq,
1098
        gen_op_setle_T0_subq,
1099
    },
1100
#endif
1101
};
1102

    
1103
static void *helper_fp_arith_ST0_FT0[8] = {
1104
    helper_fadd_ST0_FT0,
1105
    helper_fmul_ST0_FT0,
1106
    helper_fcom_ST0_FT0,
1107
    helper_fcom_ST0_FT0,
1108
    helper_fsub_ST0_FT0,
1109
    helper_fsubr_ST0_FT0,
1110
    helper_fdiv_ST0_FT0,
1111
    helper_fdivr_ST0_FT0,
1112
};
1113

    
1114
/* NOTE the exception in "r" op ordering */
1115
static void *helper_fp_arith_STN_ST0[8] = {
1116
    helper_fadd_STN_ST0,
1117
    helper_fmul_STN_ST0,
1118
    NULL,
1119
    NULL,
1120
    helper_fsubr_STN_ST0,
1121
    helper_fsub_STN_ST0,
1122
    helper_fdivr_STN_ST0,
1123
    helper_fdiv_STN_ST0,
1124
};
1125

    
1126
/* compute eflags.C to reg */
1127
static void gen_compute_eflags_c(TCGv reg)
1128
{
1129
#if TCG_TARGET_REG_BITS == 32
1130
    tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1131
    tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 
1132
                     (long)cc_table + offsetof(CCTable, compute_c));
1133
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1134
    tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE, 
1135
                 1, &cpu_tmp2_i32, 0, NULL);
1136
#else
1137
    tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1138
    tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1139
    tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64, 
1140
                     (long)cc_table + offsetof(CCTable, compute_c));
1141
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1142
    tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE, 
1143
                 1, &cpu_tmp2_i32, 0, NULL);
1144
#endif
1145
    tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1146
}
1147

    
1148
/* compute all eflags to cc_src */
1149
static void gen_compute_eflags(TCGv reg)
1150
{
1151
#if TCG_TARGET_REG_BITS == 32
1152
    tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1153
    tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 
1154
                     (long)cc_table + offsetof(CCTable, compute_all));
1155
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1156
    tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE, 
1157
                 1, &cpu_tmp2_i32, 0, NULL);
1158
#else
1159
    tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1160
    tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1161
    tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64, 
1162
                     (long)cc_table + offsetof(CCTable, compute_all));
1163
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1164
    tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE, 
1165
                 1, &cpu_tmp2_i32, 0, NULL);
1166
#endif
1167
    tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1168
}
1169

    
1170
/* if d == OR_TMP0, it means memory operand (address in A0) */
1171
static void gen_op(DisasContext *s1, int op, int ot, int d)
1172
{
1173
    if (d != OR_TMP0) {
1174
        gen_op_mov_TN_reg(ot, 0, d);
1175
    } else {
1176
        gen_op_ld_T0_A0(ot + s1->mem_index);
1177
    }
1178
    switch(op) {
1179
    case OP_ADCL:
1180
        if (s1->cc_op != CC_OP_DYNAMIC)
1181
            gen_op_set_cc_op(s1->cc_op);
1182
        gen_compute_eflags_c(cpu_tmp4);
1183
        tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1184
        tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1185
        if (d != OR_TMP0)
1186
            gen_op_mov_reg_T0(ot, d);
1187
        else
1188
            gen_op_st_T0_A0(ot + s1->mem_index);
1189
        tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1190
        tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1191
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1192
        tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1193
        tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1194
        s1->cc_op = CC_OP_DYNAMIC;
1195
        break;
1196
    case OP_SBBL:
1197
        if (s1->cc_op != CC_OP_DYNAMIC)
1198
            gen_op_set_cc_op(s1->cc_op);
1199
        gen_compute_eflags_c(cpu_tmp4);
1200
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1201
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1202
        if (d != OR_TMP0)
1203
            gen_op_mov_reg_T0(ot, d);
1204
        else
1205
            gen_op_st_T0_A0(ot + s1->mem_index);
1206
        tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1207
        tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1208
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1209
        tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1210
        tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1211
        s1->cc_op = CC_OP_DYNAMIC;
1212
        break;
1213
    case OP_ADDL:
1214
        gen_op_addl_T0_T1();
1215
        if (d != OR_TMP0)
1216
            gen_op_mov_reg_T0(ot, d);
1217
        else
1218
            gen_op_st_T0_A0(ot + s1->mem_index);
1219
        gen_op_update2_cc();
1220
        s1->cc_op = CC_OP_ADDB + ot;
1221
        break;
1222
    case OP_SUBL:
1223
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1224
        if (d != OR_TMP0)
1225
            gen_op_mov_reg_T0(ot, d);
1226
        else
1227
            gen_op_st_T0_A0(ot + s1->mem_index);
1228
        gen_op_update2_cc();
1229
        s1->cc_op = CC_OP_SUBB + ot;
1230
        break;
1231
    default:
1232
    case OP_ANDL:
1233
        tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1234
        if (d != OR_TMP0)
1235
            gen_op_mov_reg_T0(ot, d);
1236
        else
1237
            gen_op_st_T0_A0(ot + s1->mem_index);
1238
        gen_op_update1_cc();
1239
        s1->cc_op = CC_OP_LOGICB + ot;
1240
        break;
1241
    case OP_ORL:
1242
        tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1243
        if (d != OR_TMP0)
1244
            gen_op_mov_reg_T0(ot, d);
1245
        else
1246
            gen_op_st_T0_A0(ot + s1->mem_index);
1247
        gen_op_update1_cc();
1248
        s1->cc_op = CC_OP_LOGICB + ot;
1249
        break;
1250
    case OP_XORL:
1251
        tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1252
        if (d != OR_TMP0)
1253
            gen_op_mov_reg_T0(ot, d);
1254
        else
1255
            gen_op_st_T0_A0(ot + s1->mem_index);
1256
        gen_op_update1_cc();
1257
        s1->cc_op = CC_OP_LOGICB + ot;
1258
        break;
1259
    case OP_CMPL:
1260
        gen_op_cmpl_T0_T1_cc();
1261
        s1->cc_op = CC_OP_SUBB + ot;
1262
        break;
1263
    }
1264
}
1265

    
1266
/* if d == OR_TMP0, it means memory operand (address in A0) */
1267
static void gen_inc(DisasContext *s1, int ot, int d, int c)
1268
{
1269
    if (d != OR_TMP0)
1270
        gen_op_mov_TN_reg(ot, 0, d);
1271
    else
1272
        gen_op_ld_T0_A0(ot + s1->mem_index);
1273
    if (s1->cc_op != CC_OP_DYNAMIC)
1274
        gen_op_set_cc_op(s1->cc_op);
1275
    if (c > 0) {
1276
        tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1277
        s1->cc_op = CC_OP_INCB + ot;
1278
    } else {
1279
        tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1280
        s1->cc_op = CC_OP_DECB + ot;
1281
    }
1282
    if (d != OR_TMP0)
1283
        gen_op_mov_reg_T0(ot, d);
1284
    else
1285
        gen_op_st_T0_A0(ot + s1->mem_index);
1286
    gen_compute_eflags_c(cpu_cc_src);
1287
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1288
}
1289

    
1290
/* XXX: add faster immediate case */
1291
static void gen_shift_rm_T1(DisasContext *s, int ot, int op1, 
1292
                            int is_right, int is_arith)
1293
{
1294
    target_ulong mask;
1295
    int shift_label;
1296
    
1297
    if (ot == OT_QUAD)
1298
        mask = 0x3f;
1299
    else
1300
        mask = 0x1f;
1301

    
1302
    /* load */
1303
    if (op1 == OR_TMP0)
1304
        gen_op_ld_T0_A0(ot + s->mem_index);
1305
    else
1306
        gen_op_mov_TN_reg(ot, 0, op1);
1307

    
1308
    tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1309

    
1310
    tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1311

    
1312
    if (is_right) {
1313
        if (is_arith) {
1314
            gen_exts(ot, cpu_T[0]);
1315
            tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1316
            tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1317
        } else {
1318
            gen_extu(ot, cpu_T[0]);
1319
            tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1320
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1321
        }
1322
    } else {
1323
        tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1324
        tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1325
    }
1326

    
1327
    /* store */
1328
    if (op1 == OR_TMP0)
1329
        gen_op_st_T0_A0(ot + s->mem_index);
1330
    else
1331
        gen_op_mov_reg_T0(ot, op1);
1332
        
1333
    /* update eflags if non zero shift */
1334
    if (s->cc_op != CC_OP_DYNAMIC)
1335
        gen_op_set_cc_op(s->cc_op);
1336

    
1337
    shift_label = gen_new_label();
1338
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), shift_label);
1339

    
1340
    tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1341
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1342
    if (is_right)
1343
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1344
    else
1345
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1346
        
1347
    gen_set_label(shift_label);
1348
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1349
}
1350

    
1351
static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1352
{
1353
    if (arg2 >= 0)
1354
        tcg_gen_shli_tl(ret, arg1, arg2);
1355
    else
1356
        tcg_gen_shri_tl(ret, arg1, -arg2);
1357
}
1358

    
1359
/* XXX: add faster immediate case */
1360
static void gen_rot_rm_T1(DisasContext *s, int ot, int op1, 
1361
                          int is_right)
1362
{
1363
    target_ulong mask;
1364
    int label1, label2, data_bits;
1365
    
1366
    if (ot == OT_QUAD)
1367
        mask = 0x3f;
1368
    else
1369
        mask = 0x1f;
1370

    
1371
    /* load */
1372
    if (op1 == OR_TMP0)
1373
        gen_op_ld_T0_A0(ot + s->mem_index);
1374
    else
1375
        gen_op_mov_TN_reg(ot, 0, op1);
1376

    
1377
    tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1378

    
1379
    /* Must test zero case to avoid using undefined behaviour in TCG
1380
       shifts. */
1381
    label1 = gen_new_label();
1382
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label1);
1383
    
1384
    if (ot <= OT_WORD)
1385
        tcg_gen_andi_tl(cpu_tmp0, cpu_T[1], (1 << (3 + ot)) - 1);
1386
    else
1387
        tcg_gen_mov_tl(cpu_tmp0, cpu_T[1]);
1388
    
1389
    gen_extu(ot, cpu_T[0]);
1390
    tcg_gen_mov_tl(cpu_T3, cpu_T[0]);
1391

    
1392
    data_bits = 8 << ot;
1393
    /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1394
       fix TCG definition) */
1395
    if (is_right) {
1396
        tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1397
        tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1398
        tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1399
    } else {
1400
        tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1401
        tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1402
        tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1403
    }
1404
    tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1405

    
1406
    gen_set_label(label1);
1407
    /* store */
1408
    if (op1 == OR_TMP0)
1409
        gen_op_st_T0_A0(ot + s->mem_index);
1410
    else
1411
        gen_op_mov_reg_T0(ot, op1);
1412
    
1413
    /* update eflags */
1414
    if (s->cc_op != CC_OP_DYNAMIC)
1415
        gen_op_set_cc_op(s->cc_op);
1416

    
1417
    label2 = gen_new_label();
1418
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label2);
1419

    
1420
    gen_compute_eflags(cpu_cc_src);
1421
    tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1422
    tcg_gen_xor_tl(cpu_tmp0, cpu_T3, cpu_T[0]);
1423
    tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1424
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1425
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1426
    if (is_right) {
1427
        tcg_gen_shri_tl(cpu_T[0], cpu_T[0], data_bits - 1);
1428
    }
1429
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_C);
1430
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
1431
    
1432
    tcg_gen_discard_tl(cpu_cc_dst);
1433
    tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1434
        
1435
    gen_set_label(label2);
1436
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1437
}
1438

    
1439
static void *helper_rotc[8] = {
1440
    helper_rclb,
1441
    helper_rclw,
1442
    helper_rcll,
1443
    X86_64_ONLY(helper_rclq),
1444
    helper_rcrb,
1445
    helper_rcrw,
1446
    helper_rcrl,
1447
    X86_64_ONLY(helper_rcrq),
1448
};
1449

    
1450
/* XXX: add faster immediate = 1 case */
1451
static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1, 
1452
                           int is_right)
1453
{
1454
    int label1;
1455

    
1456
    if (s->cc_op != CC_OP_DYNAMIC)
1457
        gen_op_set_cc_op(s->cc_op);
1458

    
1459
    /* load */
1460
    if (op1 == OR_TMP0)
1461
        gen_op_ld_T0_A0(ot + s->mem_index);
1462
    else
1463
        gen_op_mov_TN_reg(ot, 0, op1);
1464
    
1465
    tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
1466
                       cpu_T[0], cpu_T[0], cpu_T[1]);
1467
    /* store */
1468
    if (op1 == OR_TMP0)
1469
        gen_op_st_T0_A0(ot + s->mem_index);
1470
    else
1471
        gen_op_mov_reg_T0(ot, op1);
1472

    
1473
    /* update eflags */
1474
    label1 = gen_new_label();
1475
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(-1), label1);
1476

    
1477
    tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1478
    tcg_gen_discard_tl(cpu_cc_dst);
1479
    tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1480
        
1481
    gen_set_label(label1);
1482
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1483
}
1484

    
1485
/* XXX: add faster immediate case */
1486
static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1, 
1487
                                int is_right)
1488
{
1489
    int label1, label2, data_bits;
1490
    target_ulong mask;
1491

    
1492
    if (ot == OT_QUAD)
1493
        mask = 0x3f;
1494
    else
1495
        mask = 0x1f;
1496

    
1497
    /* load */
1498
    if (op1 == OR_TMP0)
1499
        gen_op_ld_T0_A0(ot + s->mem_index);
1500
    else
1501
        gen_op_mov_TN_reg(ot, 0, op1);
1502

    
1503
    tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
1504
    /* Must test zero case to avoid using undefined behaviour in TCG
1505
       shifts. */
1506
    label1 = gen_new_label();
1507
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
1508
    
1509
    tcg_gen_addi_tl(cpu_tmp5, cpu_T3, -1);
1510
    if (ot == OT_WORD) {
1511
        /* Note: we implement the Intel behaviour for shift count > 16 */
1512
        if (is_right) {
1513
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1514
            tcg_gen_shli_tl(cpu_tmp0, cpu_T[1], 16);
1515
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1516
            tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1517

    
1518
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1519
            
1520
            /* only needed if count > 16, but a test would complicate */
1521
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1522
            tcg_gen_shl_tl(cpu_tmp0, cpu_T[0], cpu_tmp5);
1523

    
1524
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1525

    
1526
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1527
        } else {
1528
            /* XXX: not optimal */
1529
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1530
            tcg_gen_shli_tl(cpu_T[1], cpu_T[1], 16);
1531
            tcg_gen_or_tl(cpu_T[1], cpu_T[1], cpu_T[0]);
1532
            tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1533
            
1534
            tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1535
            tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
1536
            tcg_gen_shr_tl(cpu_tmp6, cpu_T[1], cpu_tmp0);
1537
            tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
1538

    
1539
            tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1540
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1541
            tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1542
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1543
        }
1544
    } else {
1545
        data_bits = 8 << ot;
1546
        if (is_right) {
1547
            if (ot == OT_LONG)
1548
                tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1549

    
1550
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1551

    
1552
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1553
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1554
            tcg_gen_shl_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1555
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1556
            
1557
        } else {
1558
            if (ot == OT_LONG)
1559
                tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1560

    
1561
            tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1562
            
1563
            tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1564
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1565
            tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1566
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1567
        }
1568
    }
1569
    tcg_gen_mov_tl(cpu_T[1], cpu_tmp4);
1570

    
1571
    gen_set_label(label1);
1572
    /* store */
1573
    if (op1 == OR_TMP0)
1574
        gen_op_st_T0_A0(ot + s->mem_index);
1575
    else
1576
        gen_op_mov_reg_T0(ot, op1);
1577
    
1578
    /* update eflags */
1579
    if (s->cc_op != CC_OP_DYNAMIC)
1580
        gen_op_set_cc_op(s->cc_op);
1581

    
1582
    label2 = gen_new_label();
1583
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label2);
1584

    
1585
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1586
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1587
    if (is_right) {
1588
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1589
    } else {
1590
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1591
    }
1592
    gen_set_label(label2);
1593
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1594
}
1595

    
1596
static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1597
{
1598
    if (s != OR_TMP1)
1599
        gen_op_mov_TN_reg(ot, 1, s);
1600
    switch(op) {
1601
    case OP_ROL:
1602
        gen_rot_rm_T1(s1, ot, d, 0);
1603
        break;
1604
    case OP_ROR:
1605
        gen_rot_rm_T1(s1, ot, d, 1);
1606
        break;
1607
    case OP_SHL:
1608
    case OP_SHL1:
1609
        gen_shift_rm_T1(s1, ot, d, 0, 0);
1610
        break;
1611
    case OP_SHR:
1612
        gen_shift_rm_T1(s1, ot, d, 1, 0);
1613
        break;
1614
    case OP_SAR:
1615
        gen_shift_rm_T1(s1, ot, d, 1, 1);
1616
        break;
1617
    case OP_RCL:
1618
        gen_rotc_rm_T1(s1, ot, d, 0);
1619
        break;
1620
    case OP_RCR:
1621
        gen_rotc_rm_T1(s1, ot, d, 1);
1622
        break;
1623
    }
1624
}
1625

    
1626
static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1627
{
1628
    /* currently not optimized */
1629
    gen_op_movl_T1_im(c);
1630
    gen_shift(s1, op, ot, d, OR_TMP1);
1631
}
1632

    
1633
static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1634
{
1635
    target_long disp;
1636
    int havesib;
1637
    int base;
1638
    int index;
1639
    int scale;
1640
    int opreg;
1641
    int mod, rm, code, override, must_add_seg;
1642

    
1643
    override = s->override;
1644
    must_add_seg = s->addseg;
1645
    if (override >= 0)
1646
        must_add_seg = 1;
1647
    mod = (modrm >> 6) & 3;
1648
    rm = modrm & 7;
1649

    
1650
    if (s->aflag) {
1651

    
1652
        havesib = 0;
1653
        base = rm;
1654
        index = 0;
1655
        scale = 0;
1656

    
1657
        if (base == 4) {
1658
            havesib = 1;
1659
            code = ldub_code(s->pc++);
1660
            scale = (code >> 6) & 3;
1661
            index = ((code >> 3) & 7) | REX_X(s);
1662
            base = (code & 7);
1663
        }
1664
        base |= REX_B(s);
1665

    
1666
        switch (mod) {
1667
        case 0:
1668
            if ((base & 7) == 5) {
1669
                base = -1;
1670
                disp = (int32_t)ldl_code(s->pc);
1671
                s->pc += 4;
1672
                if (CODE64(s) && !havesib) {
1673
                    disp += s->pc + s->rip_offset;
1674
                }
1675
            } else {
1676
                disp = 0;
1677
            }
1678
            break;
1679
        case 1:
1680
            disp = (int8_t)ldub_code(s->pc++);
1681
            break;
1682
        default:
1683
        case 2:
1684
            disp = ldl_code(s->pc);
1685
            s->pc += 4;
1686
            break;
1687
        }
1688

    
1689
        if (base >= 0) {
1690
            /* for correct popl handling with esp */
1691
            if (base == 4 && s->popl_esp_hack)
1692
                disp += s->popl_esp_hack;
1693
#ifdef TARGET_X86_64
1694
            if (s->aflag == 2) {
1695
                gen_op_movq_A0_reg(base);
1696
                if (disp != 0) {
1697
                    gen_op_addq_A0_im(disp);
1698
                }
1699
            } else
1700
#endif
1701
            {
1702
                gen_op_movl_A0_reg(base);
1703
                if (disp != 0)
1704
                    gen_op_addl_A0_im(disp);
1705
            }
1706
        } else {
1707
#ifdef TARGET_X86_64
1708
            if (s->aflag == 2) {
1709
                gen_op_movq_A0_im(disp);
1710
            } else
1711
#endif
1712
            {
1713
                gen_op_movl_A0_im(disp);
1714
            }
1715
        }
1716
        /* XXX: index == 4 is always invalid */
1717
        if (havesib && (index != 4 || scale != 0)) {
1718
#ifdef TARGET_X86_64
1719
            if (s->aflag == 2) {
1720
                gen_op_addq_A0_reg_sN(scale, index);
1721
            } else
1722
#endif
1723
            {
1724
                gen_op_addl_A0_reg_sN(scale, index);
1725
            }
1726
        }
1727
        if (must_add_seg) {
1728
            if (override < 0) {
1729
                if (base == R_EBP || base == R_ESP)
1730
                    override = R_SS;
1731
                else
1732
                    override = R_DS;
1733
            }
1734
#ifdef TARGET_X86_64
1735
            if (s->aflag == 2) {
1736
                gen_op_addq_A0_seg(override);
1737
            } else
1738
#endif
1739
            {
1740
                gen_op_addl_A0_seg(override);
1741
            }
1742
        }
1743
    } else {
1744
        switch (mod) {
1745
        case 0:
1746
            if (rm == 6) {
1747
                disp = lduw_code(s->pc);
1748
                s->pc += 2;
1749
                gen_op_movl_A0_im(disp);
1750
                rm = 0; /* avoid SS override */
1751
                goto no_rm;
1752
            } else {
1753
                disp = 0;
1754
            }
1755
            break;
1756
        case 1:
1757
            disp = (int8_t)ldub_code(s->pc++);
1758
            break;
1759
        default:
1760
        case 2:
1761
            disp = lduw_code(s->pc);
1762
            s->pc += 2;
1763
            break;
1764
        }
1765
        switch(rm) {
1766
        case 0:
1767
            gen_op_movl_A0_reg(R_EBX);
1768
            gen_op_addl_A0_reg_sN(0, R_ESI);
1769
            break;
1770
        case 1:
1771
            gen_op_movl_A0_reg(R_EBX);
1772
            gen_op_addl_A0_reg_sN(0, R_EDI);
1773
            break;
1774
        case 2:
1775
            gen_op_movl_A0_reg(R_EBP);
1776
            gen_op_addl_A0_reg_sN(0, R_ESI);
1777
            break;
1778
        case 3:
1779
            gen_op_movl_A0_reg(R_EBP);
1780
            gen_op_addl_A0_reg_sN(0, R_EDI);
1781
            break;
1782
        case 4:
1783
            gen_op_movl_A0_reg(R_ESI);
1784
            break;
1785
        case 5:
1786
            gen_op_movl_A0_reg(R_EDI);
1787
            break;
1788
        case 6:
1789
            gen_op_movl_A0_reg(R_EBP);
1790
            break;
1791
        default:
1792
        case 7:
1793
            gen_op_movl_A0_reg(R_EBX);
1794
            break;
1795
        }
1796
        if (disp != 0)
1797
            gen_op_addl_A0_im(disp);
1798
        gen_op_andl_A0_ffff();
1799
    no_rm:
1800
        if (must_add_seg) {
1801
            if (override < 0) {
1802
                if (rm == 2 || rm == 3 || rm == 6)
1803
                    override = R_SS;
1804
                else
1805
                    override = R_DS;
1806
            }
1807
            gen_op_addl_A0_seg(override);
1808
        }
1809
    }
1810

    
1811
    opreg = OR_A0;
1812
    disp = 0;
1813
    *reg_ptr = opreg;
1814
    *offset_ptr = disp;
1815
}
1816

    
1817
static void gen_nop_modrm(DisasContext *s, int modrm)
1818
{
1819
    int mod, rm, base, code;
1820

    
1821
    mod = (modrm >> 6) & 3;
1822
    if (mod == 3)
1823
        return;
1824
    rm = modrm & 7;
1825

    
1826
    if (s->aflag) {
1827

    
1828
        base = rm;
1829

    
1830
        if (base == 4) {
1831
            code = ldub_code(s->pc++);
1832
            base = (code & 7);
1833
        }
1834

    
1835
        switch (mod) {
1836
        case 0:
1837
            if (base == 5) {
1838
                s->pc += 4;
1839
            }
1840
            break;
1841
        case 1:
1842
            s->pc++;
1843
            break;
1844
        default:
1845
        case 2:
1846
            s->pc += 4;
1847
            break;
1848
        }
1849
    } else {
1850
        switch (mod) {
1851
        case 0:
1852
            if (rm == 6) {
1853
                s->pc += 2;
1854
            }
1855
            break;
1856
        case 1:
1857
            s->pc++;
1858
            break;
1859
        default:
1860
        case 2:
1861
            s->pc += 2;
1862
            break;
1863
        }
1864
    }
1865
}
1866

    
1867
/* used for LEA and MOV AX, mem */
1868
static void gen_add_A0_ds_seg(DisasContext *s)
1869
{
1870
    int override, must_add_seg;
1871
    must_add_seg = s->addseg;
1872
    override = R_DS;
1873
    if (s->override >= 0) {
1874
        override = s->override;
1875
        must_add_seg = 1;
1876
    } else {
1877
        override = R_DS;
1878
    }
1879
    if (must_add_seg) {
1880
#ifdef TARGET_X86_64
1881
        if (CODE64(s)) {
1882
            gen_op_addq_A0_seg(override);
1883
        } else
1884
#endif
1885
        {
1886
            gen_op_addl_A0_seg(override);
1887
        }
1888
    }
1889
}
1890

    
1891
/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1892
   OR_TMP0 */
1893
static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1894
{
1895
    int mod, rm, opreg, disp;
1896

    
1897
    mod = (modrm >> 6) & 3;
1898
    rm = (modrm & 7) | REX_B(s);
1899
    if (mod == 3) {
1900
        if (is_store) {
1901
            if (reg != OR_TMP0)
1902
                gen_op_mov_TN_reg(ot, 0, reg);
1903
            gen_op_mov_reg_T0(ot, rm);
1904
        } else {
1905
            gen_op_mov_TN_reg(ot, 0, rm);
1906
            if (reg != OR_TMP0)
1907
                gen_op_mov_reg_T0(ot, reg);
1908
        }
1909
    } else {
1910
        gen_lea_modrm(s, modrm, &opreg, &disp);
1911
        if (is_store) {
1912
            if (reg != OR_TMP0)
1913
                gen_op_mov_TN_reg(ot, 0, reg);
1914
            gen_op_st_T0_A0(ot + s->mem_index);
1915
        } else {
1916
            gen_op_ld_T0_A0(ot + s->mem_index);
1917
            if (reg != OR_TMP0)
1918
                gen_op_mov_reg_T0(ot, reg);
1919
        }
1920
    }
1921
}
1922

    
1923
static inline uint32_t insn_get(DisasContext *s, int ot)
1924
{
1925
    uint32_t ret;
1926

    
1927
    switch(ot) {
1928
    case OT_BYTE:
1929
        ret = ldub_code(s->pc);
1930
        s->pc++;
1931
        break;
1932
    case OT_WORD:
1933
        ret = lduw_code(s->pc);
1934
        s->pc += 2;
1935
        break;
1936
    default:
1937
    case OT_LONG:
1938
        ret = ldl_code(s->pc);
1939
        s->pc += 4;
1940
        break;
1941
    }
1942
    return ret;
1943
}
1944

    
1945
static inline int insn_const_size(unsigned int ot)
1946
{
1947
    if (ot <= OT_LONG)
1948
        return 1 << ot;
1949
    else
1950
        return 4;
1951
}
1952

    
1953
static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1954
{
1955
    TranslationBlock *tb;
1956
    target_ulong pc;
1957

    
1958
    pc = s->cs_base + eip;
1959
    tb = s->tb;
1960
    /* NOTE: we handle the case where the TB spans two pages here */
1961
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1962
        (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK))  {
1963
        /* jump to same page: we can use a direct jump */
1964
        tcg_gen_goto_tb(tb_num);
1965
        gen_jmp_im(eip);
1966
        tcg_gen_exit_tb((long)tb + tb_num);
1967
    } else {
1968
        /* jump to another page: currently not optimized */
1969
        gen_jmp_im(eip);
1970
        gen_eob(s);
1971
    }
1972
}
1973

    
1974
static inline void gen_jcc(DisasContext *s, int b,
1975
                           target_ulong val, target_ulong next_eip)
1976
{
1977
    TranslationBlock *tb;
1978
    int inv, jcc_op;
1979
    GenOpFunc1 *func;
1980
    target_ulong tmp;
1981
    int l1, l2;
1982

    
1983
    inv = b & 1;
1984
    jcc_op = (b >> 1) & 7;
1985

    
1986
    if (s->jmp_opt) {
1987
        switch(s->cc_op) {
1988
            /* we optimize the cmp/jcc case */
1989
        case CC_OP_SUBB:
1990
        case CC_OP_SUBW:
1991
        case CC_OP_SUBL:
1992
        case CC_OP_SUBQ:
1993
            func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1994
            break;
1995

    
1996
            /* some jumps are easy to compute */
1997
        case CC_OP_ADDB:
1998
        case CC_OP_ADDW:
1999
        case CC_OP_ADDL:
2000
        case CC_OP_ADDQ:
2001

    
2002
        case CC_OP_ADCB:
2003
        case CC_OP_ADCW:
2004
        case CC_OP_ADCL:
2005
        case CC_OP_ADCQ:
2006

    
2007
        case CC_OP_SBBB:
2008
        case CC_OP_SBBW:
2009
        case CC_OP_SBBL:
2010
        case CC_OP_SBBQ:
2011

    
2012
        case CC_OP_LOGICB:
2013
        case CC_OP_LOGICW:
2014
        case CC_OP_LOGICL:
2015
        case CC_OP_LOGICQ:
2016

    
2017
        case CC_OP_INCB:
2018
        case CC_OP_INCW:
2019
        case CC_OP_INCL:
2020
        case CC_OP_INCQ:
2021

    
2022
        case CC_OP_DECB:
2023
        case CC_OP_DECW:
2024
        case CC_OP_DECL:
2025
        case CC_OP_DECQ:
2026

    
2027
        case CC_OP_SHLB:
2028
        case CC_OP_SHLW:
2029
        case CC_OP_SHLL:
2030
        case CC_OP_SHLQ:
2031

    
2032
        case CC_OP_SARB:
2033
        case CC_OP_SARW:
2034
        case CC_OP_SARL:
2035
        case CC_OP_SARQ:
2036
            switch(jcc_op) {
2037
            case JCC_Z:
2038
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2039
                break;
2040
            case JCC_S:
2041
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2042
                break;
2043
            default:
2044
                func = NULL;
2045
                break;
2046
            }
2047
            break;
2048
        default:
2049
            func = NULL;
2050
            break;
2051
        }
2052

    
2053
        if (s->cc_op != CC_OP_DYNAMIC) {
2054
            gen_op_set_cc_op(s->cc_op);
2055
            s->cc_op = CC_OP_DYNAMIC;
2056
        }
2057

    
2058
        if (!func) {
2059
            gen_setcc_slow[jcc_op]();
2060
            func = gen_op_jnz_T0_label;
2061
        }
2062

    
2063
        if (inv) {
2064
            tmp = val;
2065
            val = next_eip;
2066
            next_eip = tmp;
2067
        }
2068
        tb = s->tb;
2069

    
2070
        l1 = gen_new_label();
2071
        func(l1);
2072

    
2073
        gen_goto_tb(s, 0, next_eip);
2074

    
2075
        gen_set_label(l1);
2076
        gen_goto_tb(s, 1, val);
2077

    
2078
        s->is_jmp = 3;
2079
    } else {
2080

    
2081
        if (s->cc_op != CC_OP_DYNAMIC) {
2082
            gen_op_set_cc_op(s->cc_op);
2083
            s->cc_op = CC_OP_DYNAMIC;
2084
        }
2085
        gen_setcc_slow[jcc_op]();
2086
        if (inv) {
2087
            tmp = val;
2088
            val = next_eip;
2089
            next_eip = tmp;
2090
        }
2091
        l1 = gen_new_label();
2092
        l2 = gen_new_label();
2093
        gen_op_jnz_T0_label(l1);
2094
        gen_jmp_im(next_eip);
2095
        gen_op_jmp_label(l2);
2096
        gen_set_label(l1);
2097
        gen_jmp_im(val);
2098
        gen_set_label(l2);
2099
        gen_eob(s);
2100
    }
2101
}
2102

    
2103
static void gen_setcc(DisasContext *s, int b)
2104
{
2105
    int inv, jcc_op;
2106
    GenOpFunc *func;
2107

    
2108
    inv = b & 1;
2109
    jcc_op = (b >> 1) & 7;
2110
    switch(s->cc_op) {
2111
        /* we optimize the cmp/jcc case */
2112
    case CC_OP_SUBB:
2113
    case CC_OP_SUBW:
2114
    case CC_OP_SUBL:
2115
    case CC_OP_SUBQ:
2116
        func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
2117
        if (!func)
2118
            goto slow_jcc;
2119
        break;
2120

    
2121
        /* some jumps are easy to compute */
2122
    case CC_OP_ADDB:
2123
    case CC_OP_ADDW:
2124
    case CC_OP_ADDL:
2125
    case CC_OP_ADDQ:
2126

    
2127
    case CC_OP_LOGICB:
2128
    case CC_OP_LOGICW:
2129
    case CC_OP_LOGICL:
2130
    case CC_OP_LOGICQ:
2131

    
2132
    case CC_OP_INCB:
2133
    case CC_OP_INCW:
2134
    case CC_OP_INCL:
2135
    case CC_OP_INCQ:
2136

    
2137
    case CC_OP_DECB:
2138
    case CC_OP_DECW:
2139
    case CC_OP_DECL:
2140
    case CC_OP_DECQ:
2141

    
2142
    case CC_OP_SHLB:
2143
    case CC_OP_SHLW:
2144
    case CC_OP_SHLL:
2145
    case CC_OP_SHLQ:
2146
        switch(jcc_op) {
2147
        case JCC_Z:
2148
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2149
            break;
2150
        case JCC_S:
2151
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2152
            break;
2153
        default:
2154
            goto slow_jcc;
2155
        }
2156
        break;
2157
    default:
2158
    slow_jcc:
2159
        if (s->cc_op != CC_OP_DYNAMIC)
2160
            gen_op_set_cc_op(s->cc_op);
2161
        func = gen_setcc_slow[jcc_op];
2162
        break;
2163
    }
2164
    func();
2165
    if (inv) {
2166
        gen_op_xor_T0_1();
2167
    }
2168
}
2169

    
2170
static inline void gen_op_movl_T0_seg(int seg_reg)
2171
{
2172
    tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
2173
                     offsetof(CPUX86State,segs[seg_reg].selector));
2174
}
2175

    
2176
static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2177
{
2178
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2179
    tcg_gen_st32_tl(cpu_T[0], cpu_env, 
2180
                    offsetof(CPUX86State,segs[seg_reg].selector));
2181
    tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2182
    tcg_gen_st_tl(cpu_T[0], cpu_env, 
2183
                  offsetof(CPUX86State,segs[seg_reg].base));
2184
}
2185

    
2186
/* move T0 to seg_reg and compute if the CPU state may change. Never
2187
   call this function with seg_reg == R_CS */
2188
static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2189
{
2190
    if (s->pe && !s->vm86) {
2191
        /* XXX: optimize by finding processor state dynamically */
2192
        if (s->cc_op != CC_OP_DYNAMIC)
2193
            gen_op_set_cc_op(s->cc_op);
2194
        gen_jmp_im(cur_eip);
2195
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2196
        tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2197
        /* abort translation because the addseg value may change or
2198
           because ss32 may change. For R_SS, translation must always
2199
           stop as a special handling must be done to disable hardware
2200
           interrupts for the next instruction */
2201
        if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2202
            s->is_jmp = 3;
2203
    } else {
2204
        gen_op_movl_seg_T0_vm(seg_reg);
2205
        if (seg_reg == R_SS)
2206
            s->is_jmp = 3;
2207
    }
2208
}
2209

    
2210
static inline int svm_is_rep(int prefixes)
2211
{
2212
    return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2213
}
2214

    
2215
static inline int
2216
gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2217
                              uint32_t type, uint64_t param)
2218
{
2219
    if(!(s->flags & (INTERCEPT_SVM_MASK)))
2220
        /* no SVM activated */
2221
        return 0;
2222
    switch(type) {
2223
        /* CRx and DRx reads/writes */
2224
        case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2225
            if (s->cc_op != CC_OP_DYNAMIC) {
2226
                gen_op_set_cc_op(s->cc_op);
2227
            }
2228
            gen_jmp_im(pc_start - s->cs_base);
2229
            tcg_gen_helper_0_2(helper_svm_check_intercept_param, 
2230
                               tcg_const_i32(type), tcg_const_i64(param));
2231
            /* this is a special case as we do not know if the interception occurs
2232
               so we assume there was none */
2233
            return 0;
2234
        case SVM_EXIT_MSR:
2235
            if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2236
                if (s->cc_op != CC_OP_DYNAMIC) {
2237
                    gen_op_set_cc_op(s->cc_op);
2238
                }
2239
                gen_jmp_im(pc_start - s->cs_base);
2240
                tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2241
                                   tcg_const_i32(type), tcg_const_i64(param));
2242
                /* this is a special case as we do not know if the interception occurs
2243
                   so we assume there was none */
2244
                return 0;
2245
            }
2246
            break;
2247
        default:
2248
            if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2249
                if (s->cc_op != CC_OP_DYNAMIC) {
2250
                    gen_op_set_cc_op(s->cc_op);
2251
                }
2252
                gen_jmp_im(pc_start - s->cs_base);
2253
                tcg_gen_helper_0_2(helper_vmexit,
2254
                                   tcg_const_i32(type), tcg_const_i64(param));
2255
                /* we can optimize this one so TBs don't get longer
2256
                   than up to vmexit */
2257
                gen_eob(s);
2258
                return 1;
2259
            }
2260
    }
2261
    return 0;
2262
}
2263

    
2264
static inline int
2265
gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2266
{
2267
    return gen_svm_check_intercept_param(s, pc_start, type, 0);
2268
}
2269

    
2270
static inline void gen_stack_update(DisasContext *s, int addend)
2271
{
2272
#ifdef TARGET_X86_64
2273
    if (CODE64(s)) {
2274
        gen_op_add_reg_im(2, R_ESP, addend);
2275
    } else
2276
#endif
2277
    if (s->ss32) {
2278
        gen_op_add_reg_im(1, R_ESP, addend);
2279
    } else {
2280
        gen_op_add_reg_im(0, R_ESP, addend);
2281
    }
2282
}
2283

    
2284
/* generate a push. It depends on ss32, addseg and dflag */
2285
static void gen_push_T0(DisasContext *s)
2286
{
2287
#ifdef TARGET_X86_64
2288
    if (CODE64(s)) {
2289
        gen_op_movq_A0_reg(R_ESP);
2290
        if (s->dflag) {
2291
            gen_op_addq_A0_im(-8);
2292
            gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2293
        } else {
2294
            gen_op_addq_A0_im(-2);
2295
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2296
        }
2297
        gen_op_mov_reg_A0(2, R_ESP);
2298
    } else
2299
#endif
2300
    {
2301
        gen_op_movl_A0_reg(R_ESP);
2302
        if (!s->dflag)
2303
            gen_op_addl_A0_im(-2);
2304
        else
2305
            gen_op_addl_A0_im(-4);
2306
        if (s->ss32) {
2307
            if (s->addseg) {
2308
                tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2309
                gen_op_addl_A0_seg(R_SS);
2310
            }
2311
        } else {
2312
            gen_op_andl_A0_ffff();
2313
            tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2314
            gen_op_addl_A0_seg(R_SS);
2315
        }
2316
        gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2317
        if (s->ss32 && !s->addseg)
2318
            gen_op_mov_reg_A0(1, R_ESP);
2319
        else
2320
            gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2321
    }
2322
}
2323

    
2324
/* generate a push. It depends on ss32, addseg and dflag */
2325
/* slower version for T1, only used for call Ev */
2326
static void gen_push_T1(DisasContext *s)
2327
{
2328
#ifdef TARGET_X86_64
2329
    if (CODE64(s)) {
2330
        gen_op_movq_A0_reg(R_ESP);
2331
        if (s->dflag) {
2332
            gen_op_addq_A0_im(-8);
2333
            gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2334
        } else {
2335
            gen_op_addq_A0_im(-2);
2336
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2337
        }
2338
        gen_op_mov_reg_A0(2, R_ESP);
2339
    } else
2340
#endif
2341
    {
2342
        gen_op_movl_A0_reg(R_ESP);
2343
        if (!s->dflag)
2344
            gen_op_addl_A0_im(-2);
2345
        else
2346
            gen_op_addl_A0_im(-4);
2347
        if (s->ss32) {
2348
            if (s->addseg) {
2349
                gen_op_addl_A0_seg(R_SS);
2350
            }
2351
        } else {
2352
            gen_op_andl_A0_ffff();
2353
            gen_op_addl_A0_seg(R_SS);
2354
        }
2355
        gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2356

    
2357
        if (s->ss32 && !s->addseg)
2358
            gen_op_mov_reg_A0(1, R_ESP);
2359
        else
2360
            gen_stack_update(s, (-2) << s->dflag);
2361
    }
2362
}
2363

    
2364
/* two step pop is necessary for precise exceptions */
2365
static void gen_pop_T0(DisasContext *s)
2366
{
2367
#ifdef TARGET_X86_64
2368
    if (CODE64(s)) {
2369
        gen_op_movq_A0_reg(R_ESP);
2370
        gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2371
    } else
2372
#endif
2373
    {
2374
        gen_op_movl_A0_reg(R_ESP);
2375
        if (s->ss32) {
2376
            if (s->addseg)
2377
                gen_op_addl_A0_seg(R_SS);
2378
        } else {
2379
            gen_op_andl_A0_ffff();
2380
            gen_op_addl_A0_seg(R_SS);
2381
        }
2382
        gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2383
    }
2384
}
2385

    
2386
static void gen_pop_update(DisasContext *s)
2387
{
2388
#ifdef TARGET_X86_64
2389
    if (CODE64(s) && s->dflag) {
2390
        gen_stack_update(s, 8);
2391
    } else
2392
#endif
2393
    {
2394
        gen_stack_update(s, 2 << s->dflag);
2395
    }
2396
}
2397

    
2398
static void gen_stack_A0(DisasContext *s)
2399
{
2400
    gen_op_movl_A0_reg(R_ESP);
2401
    if (!s->ss32)
2402
        gen_op_andl_A0_ffff();
2403
    tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2404
    if (s->addseg)
2405
        gen_op_addl_A0_seg(R_SS);
2406
}
2407

    
2408
/* NOTE: wrap around in 16 bit not fully handled */
2409
static void gen_pusha(DisasContext *s)
2410
{
2411
    int i;
2412
    gen_op_movl_A0_reg(R_ESP);
2413
    gen_op_addl_A0_im(-16 <<  s->dflag);
2414
    if (!s->ss32)
2415
        gen_op_andl_A0_ffff();
2416
    tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2417
    if (s->addseg)
2418
        gen_op_addl_A0_seg(R_SS);
2419
    for(i = 0;i < 8; i++) {
2420
        gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2421
        gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2422
        gen_op_addl_A0_im(2 <<  s->dflag);
2423
    }
2424
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2425
}
2426

    
2427
/* NOTE: wrap around in 16 bit not fully handled */
2428
static void gen_popa(DisasContext *s)
2429
{
2430
    int i;
2431
    gen_op_movl_A0_reg(R_ESP);
2432
    if (!s->ss32)
2433
        gen_op_andl_A0_ffff();
2434
    tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2435
    tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 <<  s->dflag);
2436
    if (s->addseg)
2437
        gen_op_addl_A0_seg(R_SS);
2438
    for(i = 0;i < 8; i++) {
2439
        /* ESP is not reloaded */
2440
        if (i != 3) {
2441
            gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2442
            gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2443
        }
2444
        gen_op_addl_A0_im(2 <<  s->dflag);
2445
    }
2446
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2447
}
2448

    
2449
static void gen_enter(DisasContext *s, int esp_addend, int level)
2450
{
2451
    int ot, opsize;
2452

    
2453
    level &= 0x1f;
2454
#ifdef TARGET_X86_64
2455
    if (CODE64(s)) {
2456
        ot = s->dflag ? OT_QUAD : OT_WORD;
2457
        opsize = 1 << ot;
2458

    
2459
        gen_op_movl_A0_reg(R_ESP);
2460
        gen_op_addq_A0_im(-opsize);
2461
        tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2462

    
2463
        /* push bp */
2464
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2465
        gen_op_st_T0_A0(ot + s->mem_index);
2466
        if (level) {
2467
            /* XXX: must save state */
2468
            tcg_gen_helper_0_3(helper_enter64_level,
2469
                               tcg_const_i32(level),
2470
                               tcg_const_i32((ot == OT_QUAD)),
2471
                               cpu_T[1]);
2472
        }
2473
        gen_op_mov_reg_T1(ot, R_EBP);
2474
        tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2475
        gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2476
    } else
2477
#endif
2478
    {
2479
        ot = s->dflag + OT_WORD;
2480
        opsize = 2 << s->dflag;
2481

    
2482
        gen_op_movl_A0_reg(R_ESP);
2483
        gen_op_addl_A0_im(-opsize);
2484
        if (!s->ss32)
2485
            gen_op_andl_A0_ffff();
2486
        tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2487
        if (s->addseg)
2488
            gen_op_addl_A0_seg(R_SS);
2489
        /* push bp */
2490
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2491
        gen_op_st_T0_A0(ot + s->mem_index);
2492
        if (level) {
2493
            /* XXX: must save state */
2494
            tcg_gen_helper_0_3(helper_enter_level,
2495
                               tcg_const_i32(level),
2496
                               tcg_const_i32(s->dflag),
2497
                               cpu_T[1]);
2498
        }
2499
        gen_op_mov_reg_T1(ot, R_EBP);
2500
        tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2501
        gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2502
    }
2503
}
2504

    
2505
static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2506
{
2507
    if (s->cc_op != CC_OP_DYNAMIC)
2508
        gen_op_set_cc_op(s->cc_op);
2509
    gen_jmp_im(cur_eip);
2510
    tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
2511
    s->is_jmp = 3;
2512
}
2513

    
2514
/* an interrupt is different from an exception because of the
2515
   privilege checks */
2516
static void gen_interrupt(DisasContext *s, int intno,
2517
                          target_ulong cur_eip, target_ulong next_eip)
2518
{
2519
    if (s->cc_op != CC_OP_DYNAMIC)
2520
        gen_op_set_cc_op(s->cc_op);
2521
    gen_jmp_im(cur_eip);
2522
    tcg_gen_helper_0_2(helper_raise_interrupt, 
2523
                       tcg_const_i32(intno), 
2524
                       tcg_const_i32(next_eip - cur_eip));
2525
    s->is_jmp = 3;
2526
}
2527

    
2528
static void gen_debug(DisasContext *s, target_ulong cur_eip)
2529
{
2530
    if (s->cc_op != CC_OP_DYNAMIC)
2531
        gen_op_set_cc_op(s->cc_op);
2532
    gen_jmp_im(cur_eip);
2533
    tcg_gen_helper_0_0(helper_debug);
2534
    s->is_jmp = 3;
2535
}
2536

    
2537
/* generate a generic end of block. Trace exception is also generated
2538
   if needed */
2539
static void gen_eob(DisasContext *s)
2540
{
2541
    if (s->cc_op != CC_OP_DYNAMIC)
2542
        gen_op_set_cc_op(s->cc_op);
2543
    if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2544
        tcg_gen_helper_0_0(helper_reset_inhibit_irq);
2545
    }
2546
    if (s->singlestep_enabled) {
2547
        tcg_gen_helper_0_0(helper_debug);
2548
    } else if (s->tf) {
2549
        tcg_gen_helper_0_0(helper_single_step);
2550
    } else {
2551
        tcg_gen_exit_tb(0);
2552
    }
2553
    s->is_jmp = 3;
2554
}
2555

    
2556
/* generate a jump to eip. No segment change must happen before as a
2557
   direct call to the next block may occur */
2558
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2559
{
2560
    if (s->jmp_opt) {
2561
        if (s->cc_op != CC_OP_DYNAMIC) {
2562
            gen_op_set_cc_op(s->cc_op);
2563
            s->cc_op = CC_OP_DYNAMIC;
2564
        }
2565
        gen_goto_tb(s, tb_num, eip);
2566
        s->is_jmp = 3;
2567
    } else {
2568
        gen_jmp_im(eip);
2569
        gen_eob(s);
2570
    }
2571
}
2572

    
2573
static void gen_jmp(DisasContext *s, target_ulong eip)
2574
{
2575
    gen_jmp_tb(s, eip, 0);
2576
}
2577

    
2578
static inline void gen_ldq_env_A0(int idx, int offset)
2579
{
2580
    int mem_index = (idx >> 2) - 1;
2581
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2582
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
2583
}
2584

    
2585
static inline void gen_stq_env_A0(int idx, int offset)
2586
{
2587
    int mem_index = (idx >> 2) - 1;
2588
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2589
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2590
}
2591

    
2592
static inline void gen_ldo_env_A0(int idx, int offset)
2593
{
2594
    int mem_index = (idx >> 2) - 1;
2595
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2596
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2597
    tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2598
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2599
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2600
}
2601

    
2602
static inline void gen_sto_env_A0(int idx, int offset)
2603
{
2604
    int mem_index = (idx >> 2) - 1;
2605
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2606
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2607
    tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2608
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2609
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2610
}
2611

    
2612
static inline void gen_op_movo(int d_offset, int s_offset)
2613
{
2614
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2615
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2616
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2617
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
2618
}
2619

    
2620
static inline void gen_op_movq(int d_offset, int s_offset)
2621
{
2622
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2623
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2624
}
2625

    
2626
static inline void gen_op_movl(int d_offset, int s_offset)
2627
{
2628
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2629
    tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
2630
}
2631

    
2632
static inline void gen_op_movq_env_0(int d_offset)
2633
{
2634
    tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2635
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2636
}
2637

    
2638
#define SSE_SPECIAL ((void *)1)
2639
#define SSE_DUMMY ((void *)2)
2640

    
2641
#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2642
#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2643
                     helper_ ## x ## ss, helper_ ## x ## sd, }
2644

    
2645
static void *sse_op_table1[256][4] = {
2646
    /* 3DNow! extensions */
2647
    [0x0e] = { SSE_DUMMY }, /* femms */
2648
    [0x0f] = { SSE_DUMMY }, /* pf... */
2649
    /* pure SSE operations */
2650
    [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2651
    [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2652
    [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2653
    [0x13] = { SSE_SPECIAL, SSE_SPECIAL },  /* movlps, movlpd */
2654
    [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2655
    [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2656
    [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd, movshdup */
2657
    [0x17] = { SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd */
2658

    
2659
    [0x28] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2660
    [0x29] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2661
    [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2662
    [0x2b] = { SSE_SPECIAL, SSE_SPECIAL },  /* movntps, movntpd */
2663
    [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2664
    [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2665
    [0x2e] = { helper_ucomiss, helper_ucomisd },
2666
    [0x2f] = { helper_comiss, helper_comisd },
2667
    [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2668
    [0x51] = SSE_FOP(sqrt),
2669
    [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
2670
    [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
2671
    [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
2672
    [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
2673
    [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
2674
    [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
2675
    [0x58] = SSE_FOP(add),
2676
    [0x59] = SSE_FOP(mul),
2677
    [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
2678
               helper_cvtss2sd, helper_cvtsd2ss },
2679
    [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
2680
    [0x5c] = SSE_FOP(sub),
2681
    [0x5d] = SSE_FOP(min),
2682
    [0x5e] = SSE_FOP(div),
2683
    [0x5f] = SSE_FOP(max),
2684

    
2685
    [0xc2] = SSE_FOP(cmpeq),
2686
    [0xc6] = { helper_shufps, helper_shufpd },
2687

    
2688
    /* MMX ops and their SSE extensions */
2689
    [0x60] = MMX_OP2(punpcklbw),
2690
    [0x61] = MMX_OP2(punpcklwd),
2691
    [0x62] = MMX_OP2(punpckldq),
2692
    [0x63] = MMX_OP2(packsswb),
2693
    [0x64] = MMX_OP2(pcmpgtb),
2694
    [0x65] = MMX_OP2(pcmpgtw),
2695
    [0x66] = MMX_OP2(pcmpgtl),
2696
    [0x67] = MMX_OP2(packuswb),
2697
    [0x68] = MMX_OP2(punpckhbw),
2698
    [0x69] = MMX_OP2(punpckhwd),
2699
    [0x6a] = MMX_OP2(punpckhdq),
2700
    [0x6b] = MMX_OP2(packssdw),
2701
    [0x6c] = { NULL, helper_punpcklqdq_xmm },
2702
    [0x6d] = { NULL, helper_punpckhqdq_xmm },
2703
    [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2704
    [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2705
    [0x70] = { helper_pshufw_mmx,
2706
               helper_pshufd_xmm,
2707
               helper_pshufhw_xmm,
2708
               helper_pshuflw_xmm },
2709
    [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2710
    [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2711
    [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2712
    [0x74] = MMX_OP2(pcmpeqb),
2713
    [0x75] = MMX_OP2(pcmpeqw),
2714
    [0x76] = MMX_OP2(pcmpeql),
2715
    [0x77] = { SSE_DUMMY }, /* emms */
2716
    [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
2717
    [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
2718
    [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2719
    [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2720
    [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2721
    [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2722
    [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
2723
    [0xd1] = MMX_OP2(psrlw),
2724
    [0xd2] = MMX_OP2(psrld),
2725
    [0xd3] = MMX_OP2(psrlq),
2726
    [0xd4] = MMX_OP2(paddq),
2727
    [0xd5] = MMX_OP2(pmullw),
2728
    [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2729
    [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2730
    [0xd8] = MMX_OP2(psubusb),
2731
    [0xd9] = MMX_OP2(psubusw),
2732
    [0xda] = MMX_OP2(pminub),
2733
    [0xdb] = MMX_OP2(pand),
2734
    [0xdc] = MMX_OP2(paddusb),
2735
    [0xdd] = MMX_OP2(paddusw),
2736
    [0xde] = MMX_OP2(pmaxub),
2737
    [0xdf] = MMX_OP2(pandn),
2738
    [0xe0] = MMX_OP2(pavgb),
2739
    [0xe1] = MMX_OP2(psraw),
2740
    [0xe2] = MMX_OP2(psrad),
2741
    [0xe3] = MMX_OP2(pavgw),
2742
    [0xe4] = MMX_OP2(pmulhuw),
2743
    [0xe5] = MMX_OP2(pmulhw),
2744
    [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
2745
    [0xe7] = { SSE_SPECIAL , SSE_SPECIAL },  /* movntq, movntq */
2746
    [0xe8] = MMX_OP2(psubsb),
2747
    [0xe9] = MMX_OP2(psubsw),
2748
    [0xea] = MMX_OP2(pminsw),
2749
    [0xeb] = MMX_OP2(por),
2750
    [0xec] = MMX_OP2(paddsb),
2751
    [0xed] = MMX_OP2(paddsw),
2752
    [0xee] = MMX_OP2(pmaxsw),
2753
    [0xef] = MMX_OP2(pxor),
2754
    [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2755
    [0xf1] = MMX_OP2(psllw),
2756
    [0xf2] = MMX_OP2(pslld),
2757
    [0xf3] = MMX_OP2(psllq),
2758
    [0xf4] = MMX_OP2(pmuludq),
2759
    [0xf5] = MMX_OP2(pmaddwd),
2760
    [0xf6] = MMX_OP2(psadbw),
2761
    [0xf7] = MMX_OP2(maskmov),
2762
    [0xf8] = MMX_OP2(psubb),
2763
    [0xf9] = MMX_OP2(psubw),
2764
    [0xfa] = MMX_OP2(psubl),
2765
    [0xfb] = MMX_OP2(psubq),
2766
    [0xfc] = MMX_OP2(paddb),
2767
    [0xfd] = MMX_OP2(paddw),
2768
    [0xfe] = MMX_OP2(paddl),
2769
};
2770

    
2771
static void *sse_op_table2[3 * 8][2] = {
2772
    [0 + 2] = MMX_OP2(psrlw),
2773
    [0 + 4] = MMX_OP2(psraw),
2774
    [0 + 6] = MMX_OP2(psllw),
2775
    [8 + 2] = MMX_OP2(psrld),
2776
    [8 + 4] = MMX_OP2(psrad),
2777
    [8 + 6] = MMX_OP2(pslld),
2778
    [16 + 2] = MMX_OP2(psrlq),
2779
    [16 + 3] = { NULL, helper_psrldq_xmm },
2780
    [16 + 6] = MMX_OP2(psllq),
2781
    [16 + 7] = { NULL, helper_pslldq_xmm },
2782
};
2783

    
2784
static void *sse_op_table3[4 * 3] = {
2785
    helper_cvtsi2ss,
2786
    helper_cvtsi2sd,
2787
    X86_64_ONLY(helper_cvtsq2ss),
2788
    X86_64_ONLY(helper_cvtsq2sd),
2789

    
2790
    helper_cvttss2si,
2791
    helper_cvttsd2si,
2792
    X86_64_ONLY(helper_cvttss2sq),
2793
    X86_64_ONLY(helper_cvttsd2sq),
2794

    
2795
    helper_cvtss2si,
2796
    helper_cvtsd2si,
2797
    X86_64_ONLY(helper_cvtss2sq),
2798
    X86_64_ONLY(helper_cvtsd2sq),
2799
};
2800

    
2801
static void *sse_op_table4[8][4] = {
2802
    SSE_FOP(cmpeq),
2803
    SSE_FOP(cmplt),
2804
    SSE_FOP(cmple),
2805
    SSE_FOP(cmpunord),
2806
    SSE_FOP(cmpneq),
2807
    SSE_FOP(cmpnlt),
2808
    SSE_FOP(cmpnle),
2809
    SSE_FOP(cmpord),
2810
};
2811

    
2812
static void *sse_op_table5[256] = {
2813
    [0x0c] = helper_pi2fw,
2814
    [0x0d] = helper_pi2fd,
2815
    [0x1c] = helper_pf2iw,
2816
    [0x1d] = helper_pf2id,
2817
    [0x8a] = helper_pfnacc,
2818
    [0x8e] = helper_pfpnacc,
2819
    [0x90] = helper_pfcmpge,
2820
    [0x94] = helper_pfmin,
2821
    [0x96] = helper_pfrcp,
2822
    [0x97] = helper_pfrsqrt,
2823
    [0x9a] = helper_pfsub,
2824
    [0x9e] = helper_pfadd,
2825
    [0xa0] = helper_pfcmpgt,
2826
    [0xa4] = helper_pfmax,
2827
    [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2828
    [0xa7] = helper_movq, /* pfrsqit1 */
2829
    [0xaa] = helper_pfsubr,
2830
    [0xae] = helper_pfacc,
2831
    [0xb0] = helper_pfcmpeq,
2832
    [0xb4] = helper_pfmul,
2833
    [0xb6] = helper_movq, /* pfrcpit2 */
2834
    [0xb7] = helper_pmulhrw_mmx,
2835
    [0xbb] = helper_pswapd,
2836
    [0xbf] = helper_pavgb_mmx /* pavgusb */
2837
};
2838

    
2839
static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2840
{
2841
    int b1, op1_offset, op2_offset, is_xmm, val, ot;
2842
    int modrm, mod, rm, reg, reg_addr, offset_addr;
2843
    void *sse_op2;
2844

    
2845
    b &= 0xff;
2846
    if (s->prefix & PREFIX_DATA)
2847
        b1 = 1;
2848
    else if (s->prefix & PREFIX_REPZ)
2849
        b1 = 2;
2850
    else if (s->prefix & PREFIX_REPNZ)
2851
        b1 = 3;
2852
    else
2853
        b1 = 0;
2854
    sse_op2 = sse_op_table1[b][b1];
2855
    if (!sse_op2)
2856
        goto illegal_op;
2857
    if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2858
        is_xmm = 1;
2859
    } else {
2860
        if (b1 == 0) {
2861
            /* MMX case */
2862
            is_xmm = 0;
2863
        } else {
2864
            is_xmm = 1;
2865
        }
2866
    }
2867
    /* simple MMX/SSE operation */
2868
    if (s->flags & HF_TS_MASK) {
2869
        gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2870
        return;
2871
    }
2872
    if (s->flags & HF_EM_MASK) {
2873
    illegal_op:
2874
        gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2875
        return;
2876
    }
2877
    if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2878
        goto illegal_op;
2879
    if (b == 0x0e) {
2880
        if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
2881
            goto illegal_op;
2882
        /* femms */
2883
        tcg_gen_helper_0_0(helper_emms);
2884
        return;
2885
    }
2886
    if (b == 0x77) {
2887
        /* emms */
2888
        tcg_gen_helper_0_0(helper_emms);
2889
        return;
2890
    }
2891
    /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2892
       the static cpu state) */
2893
    if (!is_xmm) {
2894
        tcg_gen_helper_0_0(helper_enter_mmx);
2895
    }
2896

    
2897
    modrm = ldub_code(s->pc++);
2898
    reg = ((modrm >> 3) & 7);
2899
    if (is_xmm)
2900
        reg |= rex_r;
2901
    mod = (modrm >> 6) & 3;
2902
    if (sse_op2 == SSE_SPECIAL) {
2903
        b |= (b1 << 8);
2904
        switch(b) {
2905
        case 0x0e7: /* movntq */
2906
            if (mod == 3)
2907
                goto illegal_op;
2908
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2909
            gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2910
            break;
2911
        case 0x1e7: /* movntdq */
2912
        case 0x02b: /* movntps */
2913
        case 0x12b: /* movntps */
2914
        case 0x3f0: /* lddqu */
2915
            if (mod == 3)
2916
                goto illegal_op;
2917
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2918
            gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2919
            break;
2920
        case 0x6e: /* movd mm, ea */
2921
#ifdef TARGET_X86_64
2922
            if (s->dflag == 2) {
2923
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2924
                tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
2925
            } else
2926
#endif
2927
            {
2928
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2929
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
2930
                                 offsetof(CPUX86State,fpregs[reg].mmx));
2931
                tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
2932
            }
2933
            break;
2934
        case 0x16e: /* movd xmm, ea */
2935
#ifdef TARGET_X86_64
2936
            if (s->dflag == 2) {
2937
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2938
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
2939
                                 offsetof(CPUX86State,xmm_regs[reg]));
2940
                tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
2941
            } else
2942
#endif
2943
            {
2944
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2945
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
2946
                                 offsetof(CPUX86State,xmm_regs[reg]));
2947
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2948
                tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
2949
            }
2950
            break;
2951
        case 0x6f: /* movq mm, ea */
2952
            if (mod != 3) {
2953
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2954
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2955
            } else {
2956
                rm = (modrm & 7);
2957
                tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
2958
                               offsetof(CPUX86State,fpregs[rm].mmx));
2959
                tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
2960
                               offsetof(CPUX86State,fpregs[reg].mmx));
2961
            }
2962
            break;
2963
        case 0x010: /* movups */
2964
        case 0x110: /* movupd */
2965
        case 0x028: /* movaps */
2966
        case 0x128: /* movapd */
2967
        case 0x16f: /* movdqa xmm, ea */
2968
        case 0x26f: /* movdqu xmm, ea */
2969
            if (mod != 3) {
2970
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2971
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2972
            } else {
2973
                rm = (modrm & 7) | REX_B(s);
2974
                gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2975
                            offsetof(CPUX86State,xmm_regs[rm]));
2976
            }
2977
            break;
2978
        case 0x210: /* movss xmm, ea */
2979
            if (mod != 3) {
2980
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2981
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
2982
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2983
                gen_op_movl_T0_0();
2984
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2985
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2986
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2987
            } else {
2988
                rm = (modrm & 7) | REX_B(s);
2989
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2990
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2991
            }
2992
            break;
2993
        case 0x310: /* movsd xmm, ea */
2994
            if (mod != 3) {
2995
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2996
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2997
                gen_op_movl_T0_0();
2998
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2999
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3000
            } else {
3001
                rm = (modrm & 7) | REX_B(s);
3002
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3003
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3004
            }
3005
            break;
3006
        case 0x012: /* movlps */
3007
        case 0x112: /* movlpd */
3008
            if (mod != 3) {
3009
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3010
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3011
            } else {
3012
                /* movhlps */
3013
                rm = (modrm & 7) | REX_B(s);
3014
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3015
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3016
            }
3017
            break;
3018
        case 0x212: /* movsldup */
3019
            if (mod != 3) {
3020
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3021
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3022
            } else {
3023
                rm = (modrm & 7) | REX_B(s);
3024
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3025
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3026
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3027
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3028
            }
3029
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3030
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3031
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3032
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3033
            break;
3034
        case 0x312: /* movddup */
3035
            if (mod != 3) {
3036
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3037
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3038
            } else {
3039
                rm = (modrm & 7) | REX_B(s);
3040
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3041
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3042
            }
3043
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3044
                        offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3045
            break;
3046
        case 0x016: /* movhps */
3047
        case 0x116: /* movhpd */
3048
            if (mod != 3) {
3049
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3050
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3051
            } else {
3052
                /* movlhps */
3053
                rm = (modrm & 7) | REX_B(s);
3054
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3055
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3056
            }
3057
            break;
3058
        case 0x216: /* movshdup */
3059
            if (mod != 3) {
3060
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3061
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3062
            } else {
3063
                rm = (modrm & 7) | REX_B(s);
3064
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3065
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3066
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3067
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3068
            }
3069
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3070
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3071
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3072
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3073
            break;
3074
        case 0x7e: /* movd ea, mm */
3075
#ifdef TARGET_X86_64
3076
            if (s->dflag == 2) {
3077
                tcg_gen_ld_i64(cpu_T[0], cpu_env, 
3078
                               offsetof(CPUX86State,fpregs[reg].mmx));
3079
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3080
            } else
3081
#endif
3082
            {
3083
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
3084
                                 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3085
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3086
            }
3087
            break;
3088
        case 0x17e: /* movd ea, xmm */
3089
#ifdef TARGET_X86_64
3090
            if (s->dflag == 2) {
3091
                tcg_gen_ld_i64(cpu_T[0], cpu_env, 
3092
                               offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3093
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3094
            } else
3095
#endif
3096
            {
3097
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
3098
                                 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3099
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3100
            }
3101
            break;
3102
        case 0x27e: /* movq xmm, ea */
3103
            if (mod != 3) {
3104
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3105
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3106
            } else {
3107
                rm = (modrm & 7) | REX_B(s);
3108
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3109
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3110
            }
3111
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3112
            break;
3113
        case 0x7f: /* movq ea, mm */
3114
            if (mod != 3) {
3115
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3116
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3117
            } else {
3118
                rm = (modrm & 7);
3119
                gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3120
                            offsetof(CPUX86State,fpregs[reg].mmx));
3121
            }
3122
            break;
3123
        case 0x011: /* movups */
3124
        case 0x111: /* movupd */
3125
        case 0x029: /* movaps */
3126
        case 0x129: /* movapd */
3127
        case 0x17f: /* movdqa ea, xmm */
3128
        case 0x27f: /* movdqu ea, xmm */
3129
            if (mod != 3) {
3130
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3131
                gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3132
            } else {
3133
                rm = (modrm & 7) | REX_B(s);
3134
                gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3135
                            offsetof(CPUX86State,xmm_regs[reg]));
3136
            }
3137
            break;
3138
        case 0x211: /* movss ea, xmm */
3139
            if (mod != 3) {
3140
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3141
                gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3142
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
3143
            } else {
3144
                rm = (modrm & 7) | REX_B(s);
3145
                gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3146
                            offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3147
            }
3148
            break;
3149
        case 0x311: /* movsd ea, xmm */
3150
            if (mod != 3) {
3151
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3152
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3153
            } else {
3154
                rm = (modrm & 7) | REX_B(s);
3155
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3156
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3157
            }
3158
            break;
3159
        case 0x013: /* movlps */
3160
        case 0x113: /* movlpd */
3161
            if (mod != 3) {
3162
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3163
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3164
            } else {
3165
                goto illegal_op;
3166
            }
3167
            break;
3168
        case 0x017: /* movhps */
3169
        case 0x117: /* movhpd */
3170
            if (mod != 3) {
3171
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3172
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3173
            } else {
3174
                goto illegal_op;
3175
            }
3176
            break;
3177
        case 0x71: /* shift mm, im */
3178
        case 0x72:
3179
        case 0x73:
3180
        case 0x171: /* shift xmm, im */
3181
        case 0x172:
3182
        case 0x173:
3183
            val = ldub_code(s->pc++);
3184
            if (is_xmm) {
3185
                gen_op_movl_T0_im(val);
3186
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3187
                gen_op_movl_T0_0();
3188
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3189
                op1_offset = offsetof(CPUX86State,xmm_t0);
3190
            } else {
3191
                gen_op_movl_T0_im(val);
3192
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3193
                gen_op_movl_T0_0();
3194
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3195
                op1_offset = offsetof(CPUX86State,mmx_t0);
3196
            }
3197
            sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3198
            if (!sse_op2)
3199
                goto illegal_op;
3200
            if (is_xmm) {
3201
                rm = (modrm & 7) | REX_B(s);
3202
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3203
            } else {
3204
                rm = (modrm & 7);
3205
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3206
            }
3207
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3208
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3209
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3210
            break;
3211
        case 0x050: /* movmskps */
3212
            rm = (modrm & 7) | REX_B(s);
3213
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3214
                             offsetof(CPUX86State,xmm_regs[rm]));
3215
            tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3216
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3217
            gen_op_mov_reg_T0(OT_LONG, reg);
3218
            break;
3219
        case 0x150: /* movmskpd */
3220
            rm = (modrm & 7) | REX_B(s);
3221
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3222
                             offsetof(CPUX86State,xmm_regs[rm]));
3223
            tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3224
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3225
            gen_op_mov_reg_T0(OT_LONG, reg);
3226
            break;
3227
        case 0x02a: /* cvtpi2ps */
3228
        case 0x12a: /* cvtpi2pd */
3229
            tcg_gen_helper_0_0(helper_enter_mmx);
3230
            if (mod != 3) {
3231
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3232
                op2_offset = offsetof(CPUX86State,mmx_t0);
3233
                gen_ldq_env_A0(s->mem_index, op2_offset);
3234
            } else {
3235
                rm = (modrm & 7);
3236
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3237
            }
3238
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3239
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3240
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3241
            switch(b >> 8) {
3242
            case 0x0:
3243
                tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3244
                break;
3245
            default:
3246
            case 0x1:
3247
                tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3248
                break;
3249
            }
3250
            break;
3251
        case 0x22a: /* cvtsi2ss */
3252
        case 0x32a: /* cvtsi2sd */
3253
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3254
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3255
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3256
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3257
            sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3258
            tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3259
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3260
            break;
3261
        case 0x02c: /* cvttps2pi */
3262
        case 0x12c: /* cvttpd2pi */
3263
        case 0x02d: /* cvtps2pi */
3264
        case 0x12d: /* cvtpd2pi */
3265
            tcg_gen_helper_0_0(helper_enter_mmx);
3266
            if (mod != 3) {
3267
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3268
                op2_offset = offsetof(CPUX86State,xmm_t0);
3269
                gen_ldo_env_A0(s->mem_index, op2_offset);
3270
            } else {
3271
                rm = (modrm & 7) | REX_B(s);
3272
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3273
            }
3274
            op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3275
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3276
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3277
            switch(b) {
3278
            case 0x02c:
3279
                tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3280
                break;
3281
            case 0x12c:
3282
                tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3283
                break;
3284
            case 0x02d:
3285
                tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3286
                break;
3287
            case 0x12d:
3288
                tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3289
                break;
3290
            }
3291
            break;
3292
        case 0x22c: /* cvttss2si */
3293
        case 0x32c: /* cvttsd2si */
3294
        case 0x22d: /* cvtss2si */
3295
        case 0x32d: /* cvtsd2si */
3296
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3297
            if (mod != 3) {
3298
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3299
                if ((b >> 8) & 1) {
3300
                    gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3301
                } else {
3302
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3303
                    gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3304
                }
3305
                op2_offset = offsetof(CPUX86State,xmm_t0);
3306
            } else {
3307
                rm = (modrm & 7) | REX_B(s);
3308
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3309
            }
3310
            sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3311
                                    (b & 1) * 4];
3312
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3313
            if (ot == OT_LONG) {
3314
                tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
3315
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3316
            } else {
3317
                tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3318
            }
3319
            gen_op_mov_reg_T0(ot, reg);
3320
            break;
3321
        case 0xc4: /* pinsrw */
3322
        case 0x1c4:
3323
            s->rip_offset = 1;
3324
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3325
            val = ldub_code(s->pc++);
3326
            if (b1) {
3327
                val &= 7;
3328
                tcg_gen_st16_tl(cpu_T[0], cpu_env,
3329
                                offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3330
            } else {
3331
                val &= 3;
3332
                tcg_gen_st16_tl(cpu_T[0], cpu_env,
3333
                                offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3334
            }
3335
            break;
3336
        case 0xc5: /* pextrw */
3337
        case 0x1c5:
3338
            if (mod != 3)
3339
                goto illegal_op;
3340
            val = ldub_code(s->pc++);
3341
            if (b1) {
3342
                val &= 7;
3343
                rm = (modrm & 7) | REX_B(s);
3344
                tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3345
                                 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3346
            } else {
3347
                val &= 3;
3348
                rm = (modrm & 7);
3349
                tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3350
                                offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3351
            }
3352
            reg = ((modrm >> 3) & 7) | rex_r;
3353
            gen_op_mov_reg_T0(OT_LONG, reg);
3354
            break;
3355
        case 0x1d6: /* movq ea, xmm */
3356
            if (mod != 3) {
3357
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3358
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3359
            } else {
3360
                rm = (modrm & 7) | REX_B(s);
3361
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3362
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3363
                gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3364
            }
3365
            break;
3366
        case 0x2d6: /* movq2dq */
3367
            tcg_gen_helper_0_0(helper_enter_mmx);
3368
            rm = (modrm & 7);
3369
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3370
                        offsetof(CPUX86State,fpregs[rm].mmx));
3371
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3372
            break;
3373
        case 0x3d6: /* movdq2q */
3374
            tcg_gen_helper_0_0(helper_enter_mmx);
3375
            rm = (modrm & 7) | REX_B(s);
3376
            gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3377
                        offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3378
            break;
3379
        case 0xd7: /* pmovmskb */
3380
        case 0x1d7:
3381
            if (mod != 3)
3382
                goto illegal_op;
3383
            if (b1) {
3384
                rm = (modrm & 7) | REX_B(s);
3385
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3386
                tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
3387
            } else {
3388
                rm = (modrm & 7);
3389
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3390
                tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
3391
            }
3392
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3393
            reg = ((modrm >> 3) & 7) | rex_r;
3394
            gen_op_mov_reg_T0(OT_LONG, reg);
3395
            break;
3396
        default:
3397
            goto illegal_op;
3398
        }
3399
    } else {
3400
        /* generic MMX or SSE operation */
3401
        switch(b) {
3402
        case 0x70: /* pshufx insn */
3403
        case 0xc6: /* pshufx insn */
3404
        case 0xc2: /* compare insns */
3405
            s->rip_offset = 1;
3406
            break;
3407
        default:
3408
            break;
3409
        }
3410
        if (is_xmm) {
3411
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3412
            if (mod != 3) {
3413
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3414
                op2_offset = offsetof(CPUX86State,xmm_t0);
3415
                if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3416
                                b == 0xc2)) {
3417
                    /* specific case for SSE single instructions */
3418
                    if (b1 == 2) {
3419
                        /* 32 bit access */
3420
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3421
                        gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3422
                    } else {
3423
                        /* 64 bit access */
3424
                        gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3425
                    }
3426
                } else {
3427
                    gen_ldo_env_A0(s->mem_index, op2_offset);
3428
                }
3429
            } else {
3430
                rm = (modrm & 7) | REX_B(s);
3431
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3432
            }
3433
        } else {
3434
            op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3435
            if (mod != 3) {
3436
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3437
                op2_offset = offsetof(CPUX86State,mmx_t0);
3438
                gen_ldq_env_A0(s->mem_index, op2_offset);
3439
            } else {
3440
                rm = (modrm & 7);
3441
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3442
            }
3443
        }
3444
        switch(b) {
3445
        case 0x0f: /* 3DNow! data insns */
3446
            if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3447
                goto illegal_op;
3448
            val = ldub_code(s->pc++);
3449
            sse_op2 = sse_op_table5[val];
3450
            if (!sse_op2)
3451
                goto illegal_op;
3452
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3453
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3454
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3455
            break;
3456
        case 0x70: /* pshufx insn */
3457
        case 0xc6: /* pshufx insn */
3458
            val = ldub_code(s->pc++);
3459
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3460
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3461
            tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3462
            break;
3463
        case 0xc2:
3464
            /* compare insns */
3465
            val = ldub_code(s->pc++);
3466
            if (val >= 8)
3467
                goto illegal_op;
3468
            sse_op2 = sse_op_table4[val][b1];
3469
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3470
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3471
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3472
            break;
3473
        case 0xf7:
3474
            /* maskmov : we must prepare A0 */
3475
            if (mod != 3)
3476
                goto illegal_op;
3477
#ifdef TARGET_X86_64
3478
            if (s->aflag == 2) {
3479
                gen_op_movq_A0_reg(R_EDI);
3480
            } else
3481
#endif
3482
            {
3483
                gen_op_movl_A0_reg(R_EDI);
3484
                if (s->aflag == 0)
3485
                    gen_op_andl_A0_ffff();
3486
            }
3487
            gen_add_A0_ds_seg(s);
3488

    
3489
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3490
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3491
            tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
3492
            break;
3493
        default:
3494
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3495
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3496
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3497
            break;
3498
        }
3499
        if (b == 0x2e || b == 0x2f) {
3500
            s->cc_op = CC_OP_EFLAGS;
3501
        }
3502
    }
3503
}
3504

    
3505
/* convert one instruction. s->is_jmp is set if the translation must
3506
   be stopped. Return the next pc value */
3507
static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3508
{
3509
    int b, prefixes, aflag, dflag;
3510
    int shift, ot;
3511
    int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3512
    target_ulong next_eip, tval;
3513
    int rex_w, rex_r;
3514

    
3515
    s->pc = pc_start;
3516
    prefixes = 0;
3517
    aflag = s->code32;
3518
    dflag = s->code32;
3519
    s->override = -1;
3520
    rex_w = -1;
3521
    rex_r = 0;
3522
#ifdef TARGET_X86_64
3523
    s->rex_x = 0;
3524
    s->rex_b = 0;
3525
    x86_64_hregs = 0;
3526
#endif
3527
    s->rip_offset = 0; /* for relative ip address */
3528
 next_byte:
3529
    b = ldub_code(s->pc);
3530
    s->pc++;
3531
    /* check prefixes */
3532
#ifdef TARGET_X86_64
3533
    if (CODE64(s)) {
3534
        switch (b) {
3535
        case 0xf3:
3536
            prefixes |= PREFIX_REPZ;
3537
            goto next_byte;
3538
        case 0xf2:
3539
            prefixes |= PREFIX_REPNZ;
3540
            goto next_byte;
3541
        case 0xf0:
3542
            prefixes |= PREFIX_LOCK;
3543
            goto next_byte;
3544
        case 0x2e:
3545
            s->override = R_CS;
3546
            goto next_byte;
3547
        case 0x36:
3548
            s->override = R_SS;
3549
            goto next_byte;
3550
        case 0x3e:
3551
            s->override = R_DS;
3552
            goto next_byte;
3553
        case 0x26:
3554
            s->override = R_ES;
3555
            goto next_byte;
3556
        case 0x64:
3557
            s->override = R_FS;
3558
            goto next_byte;
3559
        case 0x65:
3560
            s->override = R_GS;
3561
            goto next_byte;
3562
        case 0x66:
3563
            prefixes |= PREFIX_DATA;
3564
            goto next_byte;
3565
        case 0x67:
3566
            prefixes |= PREFIX_ADR;
3567
            goto next_byte;
3568
        case 0x40 ... 0x4f:
3569
            /* REX prefix */
3570
            rex_w = (b >> 3) & 1;
3571
            rex_r = (b & 0x4) << 1;
3572
            s->rex_x = (b & 0x2) << 2;
3573
            REX_B(s) = (b & 0x1) << 3;
3574
            x86_64_hregs = 1; /* select uniform byte register addressing */
3575
            goto next_byte;
3576
        }
3577
        if (rex_w == 1) {
3578
            /* 0x66 is ignored if rex.w is set */
3579
            dflag = 2;
3580
        } else {
3581
            if (prefixes & PREFIX_DATA)
3582
                dflag ^= 1;
3583
        }
3584
        if (!(prefixes & PREFIX_ADR))
3585
            aflag = 2;
3586
    } else
3587
#endif
3588
    {
3589
        switch (b) {
3590
        case 0xf3:
3591
            prefixes |= PREFIX_REPZ;
3592
            goto next_byte;
3593
        case 0xf2:
3594
            prefixes |= PREFIX_REPNZ;
3595
            goto next_byte;
3596
        case 0xf0:
3597
            prefixes |= PREFIX_LOCK;
3598
            goto next_byte;
3599
        case 0x2e:
3600
            s->override = R_CS;
3601
            goto next_byte;
3602
        case 0x36:
3603
            s->override = R_SS;
3604
            goto next_byte;
3605
        case 0x3e:
3606
            s->override = R_DS;
3607
            goto next_byte;
3608
        case 0x26:
3609
            s->override = R_ES;
3610
            goto next_byte;
3611
        case 0x64:
3612
            s->override = R_FS;
3613
            goto next_byte;
3614
        case 0x65:
3615
            s->override = R_GS;
3616
            goto next_byte;
3617
        case 0x66:
3618
            prefixes |= PREFIX_DATA;
3619
            goto next_byte;
3620
        case 0x67:
3621
            prefixes |= PREFIX_ADR;
3622
            goto next_byte;
3623
        }
3624
        if (prefixes & PREFIX_DATA)
3625
            dflag ^= 1;
3626
        if (prefixes & PREFIX_ADR)
3627
            aflag ^= 1;
3628
    }
3629

    
3630
    s->prefix = prefixes;
3631
    s->aflag = aflag;
3632
    s->dflag = dflag;
3633

    
3634
    /* lock generation */
3635
    if (prefixes & PREFIX_LOCK)
3636
        tcg_gen_helper_0_0(helper_lock);
3637

    
3638
    /* now check op code */
3639
 reswitch:
3640
    switch(b) {
3641
    case 0x0f:
3642
        /**************************/
3643
        /* extended op code */
3644
        b = ldub_code(s->pc++) | 0x100;
3645
        goto reswitch;
3646

    
3647
        /**************************/
3648
        /* arith & logic */
3649
    case 0x00 ... 0x05:
3650
    case 0x08 ... 0x0d:
3651
    case 0x10 ... 0x15:
3652
    case 0x18 ... 0x1d:
3653
    case 0x20 ... 0x25:
3654
    case 0x28 ... 0x2d:
3655
    case 0x30 ... 0x35:
3656
    case 0x38 ... 0x3d:
3657
        {
3658
            int op, f, val;
3659
            op = (b >> 3) & 7;
3660
            f = (b >> 1) & 3;
3661

    
3662
            if ((b & 1) == 0)
3663
                ot = OT_BYTE;
3664
            else
3665
                ot = dflag + OT_WORD;
3666

    
3667
            switch(f) {
3668
            case 0: /* OP Ev, Gv */
3669
                modrm = ldub_code(s->pc++);
3670
                reg = ((modrm >> 3) & 7) | rex_r;
3671
                mod = (modrm >> 6) & 3;
3672
                rm = (modrm & 7) | REX_B(s);
3673
                if (mod != 3) {
3674
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3675
                    opreg = OR_TMP0;
3676
                } else if (op == OP_XORL && rm == reg) {
3677
                xor_zero:
3678
                    /* xor reg, reg optimisation */
3679
                    gen_op_movl_T0_0();
3680
                    s->cc_op = CC_OP_LOGICB + ot;
3681
                    gen_op_mov_reg_T0(ot, reg);
3682
                    gen_op_update1_cc();
3683
                    break;
3684
                } else {
3685
                    opreg = rm;
3686
                }
3687
                gen_op_mov_TN_reg(ot, 1, reg);
3688
                gen_op(s, op, ot, opreg);
3689
                break;
3690
            case 1: /* OP Gv, Ev */
3691
                modrm = ldub_code(s->pc++);
3692
                mod = (modrm >> 6) & 3;
3693
                reg = ((modrm >> 3) & 7) | rex_r;
3694
                rm = (modrm & 7) | REX_B(s);
3695
                if (mod != 3) {
3696
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3697
                    gen_op_ld_T1_A0(ot + s->mem_index);
3698
                } else if (op == OP_XORL && rm == reg) {
3699
                    goto xor_zero;
3700
                } else {
3701
                    gen_op_mov_TN_reg(ot, 1, rm);
3702
                }
3703
                gen_op(s, op, ot, reg);
3704
                break;
3705
            case 2: /* OP A, Iv */
3706
                val = insn_get(s, ot);
3707
                gen_op_movl_T1_im(val);
3708
                gen_op(s, op, ot, OR_EAX);
3709
                break;
3710
            }
3711
        }
3712
        break;
3713

    
3714
    case 0x80: /* GRP1 */
3715
    case 0x81:
3716
    case 0x82:
3717
    case 0x83:
3718
        {
3719
            int val;
3720

    
3721
            if ((b & 1) == 0)
3722
                ot = OT_BYTE;
3723
            else
3724
                ot = dflag + OT_WORD;
3725

    
3726
            modrm = ldub_code(s->pc++);
3727
            mod = (modrm >> 6) & 3;
3728
            rm = (modrm & 7) | REX_B(s);
3729
            op = (modrm >> 3) & 7;
3730

    
3731
            if (mod != 3) {
3732
                if (b == 0x83)
3733
                    s->rip_offset = 1;
3734
                else
3735
                    s->rip_offset = insn_const_size(ot);
3736
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3737
                opreg = OR_TMP0;
3738
            } else {
3739
                opreg = rm;
3740
            }
3741

    
3742
            switch(b) {
3743
            default:
3744
            case 0x80:
3745
            case 0x81:
3746
            case 0x82:
3747
                val = insn_get(s, ot);
3748
                break;
3749
            case 0x83:
3750
                val = (int8_t)insn_get(s, OT_BYTE);
3751
                break;
3752
            }
3753
            gen_op_movl_T1_im(val);
3754
            gen_op(s, op, ot, opreg);
3755
        }
3756
        break;
3757

    
3758
        /**************************/
3759
        /* inc, dec, and other misc arith */
3760
    case 0x40 ... 0x47: /* inc Gv */
3761
        ot = dflag ? OT_LONG : OT_WORD;
3762
        gen_inc(s, ot, OR_EAX + (b & 7), 1);
3763
        break;
3764
    case 0x48 ... 0x4f: /* dec Gv */
3765
        ot = dflag ? OT_LONG : OT_WORD;
3766
        gen_inc(s, ot, OR_EAX + (b & 7), -1);
3767
        break;
3768
    case 0xf6: /* GRP3 */
3769
    case 0xf7:
3770
        if ((b & 1) == 0)
3771
            ot = OT_BYTE;
3772
        else
3773
            ot = dflag + OT_WORD;
3774

    
3775
        modrm = ldub_code(s->pc++);
3776
        mod = (modrm >> 6) & 3;
3777
        rm = (modrm & 7) | REX_B(s);
3778
        op = (modrm >> 3) & 7;
3779
        if (mod != 3) {
3780
            if (op == 0)
3781
                s->rip_offset = insn_const_size(ot);
3782
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3783
            gen_op_ld_T0_A0(ot + s->mem_index);
3784
        } else {
3785
            gen_op_mov_TN_reg(ot, 0, rm);
3786
        }
3787

    
3788
        switch(op) {
3789
        case 0: /* test */
3790
            val = insn_get(s, ot);
3791
            gen_op_movl_T1_im(val);
3792
            gen_op_testl_T0_T1_cc();
3793
            s->cc_op = CC_OP_LOGICB + ot;
3794
            break;
3795
        case 2: /* not */
3796
            tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
3797
            if (mod != 3) {
3798
                gen_op_st_T0_A0(ot + s->mem_index);
3799
            } else {
3800
                gen_op_mov_reg_T0(ot, rm);
3801
            }
3802
            break;
3803
        case 3: /* neg */
3804
            tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
3805
            if (mod != 3) {
3806
                gen_op_st_T0_A0(ot + s->mem_index);
3807
            } else {
3808
                gen_op_mov_reg_T0(ot, rm);
3809
            }
3810
            gen_op_update_neg_cc();
3811
            s->cc_op = CC_OP_SUBB + ot;
3812
            break;
3813
        case 4: /* mul */
3814
            switch(ot) {
3815
            case OT_BYTE:
3816
                gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
3817
                tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
3818
                tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
3819
                /* XXX: use 32 bit mul which could be faster */
3820
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3821
                gen_op_mov_reg_T0(OT_WORD, R_EAX);
3822
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3823
                tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
3824
                s->cc_op = CC_OP_MULB;
3825
                break;
3826
            case OT_WORD:
3827
                gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
3828
                tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
3829
                tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
3830
                /* XXX: use 32 bit mul which could be faster */
3831
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3832
                gen_op_mov_reg_T0(OT_WORD, R_EAX);
3833
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3834
                tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
3835
                gen_op_mov_reg_T0(OT_WORD, R_EDX);
3836
                tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3837
                s->cc_op = CC_OP_MULW;
3838
                break;
3839
            default:
3840
            case OT_LONG:
3841
#ifdef TARGET_X86_64
3842
                gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3843
                tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
3844
                tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
3845
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3846
                gen_op_mov_reg_T0(OT_LONG, R_EAX);
3847
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3848
                tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
3849
                gen_op_mov_reg_T0(OT_LONG, R_EDX);
3850
                tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3851
#else
3852
                {
3853
                    TCGv t0, t1;
3854
                    t0 = tcg_temp_new(TCG_TYPE_I64);
3855
                    t1 = tcg_temp_new(TCG_TYPE_I64);
3856
                    gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3857
                    tcg_gen_extu_i32_i64(t0, cpu_T[0]);
3858
                    tcg_gen_extu_i32_i64(t1, cpu_T[1]);
3859
                    tcg_gen_mul_i64(t0, t0, t1);
3860
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3861
                    gen_op_mov_reg_T0(OT_LONG, R_EAX);
3862
                    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3863
                    tcg_gen_shri_i64(t0, t0, 32);
3864
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3865
                    gen_op_mov_reg_T0(OT_LONG, R_EDX);
3866
                    tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3867
                }
3868
#endif
3869
                s->cc_op = CC_OP_MULL;
3870
                break;
3871
#ifdef TARGET_X86_64
3872
            case OT_QUAD:
3873
                tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
3874
                s->cc_op = CC_OP_MULQ;
3875
                break;
3876
#endif
3877
            }
3878
            break;
3879
        case 5: /* imul */
3880
            switch(ot) {
3881
            case OT_BYTE:
3882
                gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
3883
                tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
3884
                tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
3885
                /* XXX: use 32 bit mul which could be faster */
3886
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3887
                gen_op_mov_reg_T0(OT_WORD, R_EAX);
3888
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3889
                tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
3890
                tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3891
                s->cc_op = CC_OP_MULB;
3892
                break;
3893
            case OT_WORD:
3894
                gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
3895
                tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
3896
                tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
3897
                /* XXX: use 32 bit mul which could be faster */
3898
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3899
                gen_op_mov_reg_T0(OT_WORD, R_EAX);
3900
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3901
                tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
3902
                tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3903
                tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
3904
                gen_op_mov_reg_T0(OT_WORD, R_EDX);
3905
                s->cc_op = CC_OP_MULW;
3906
                break;
3907
            default:
3908
            case OT_LONG:
3909
#ifdef TARGET_X86_64
3910
                gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3911
                tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
3912
                tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
3913
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3914
                gen_op_mov_reg_T0(OT_LONG, R_EAX);
3915
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3916
                tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
3917
                tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3918
                tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
3919
                gen_op_mov_reg_T0(OT_LONG, R_EDX);
3920
#else
3921
                {
3922
                    TCGv t0, t1;
3923
                    t0 = tcg_temp_new(TCG_TYPE_I64);
3924
                    t1 = tcg_temp_new(TCG_TYPE_I64);
3925
                    gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3926
                    tcg_gen_ext_i32_i64(t0, cpu_T[0]);
3927
                    tcg_gen_ext_i32_i64(t1, cpu_T[1]);
3928
                    tcg_gen_mul_i64(t0, t0, t1);
3929
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3930
                    gen_op_mov_reg_T0(OT_LONG, R_EAX);
3931
                    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3932
                    tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
3933
                    tcg_gen_shri_i64(t0, t0, 32);
3934
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3935
                    gen_op_mov_reg_T0(OT_LONG, R_EDX);
3936
                    tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3937
                }
3938
#endif
3939
                s->cc_op = CC_OP_MULL;
3940
                break;
3941
#ifdef TARGET_X86_64
3942
            case OT_QUAD:
3943
                tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
3944
                s->cc_op = CC_OP_MULQ;
3945
                break;
3946
#endif
3947
            }
3948
            break;
3949
        case 6: /* div */
3950
            switch(ot) {
3951
            case OT_BYTE:
3952
                gen_jmp_im(pc_start - s->cs_base);
3953
                tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
3954
                break;
3955
            case OT_WORD:
3956
                gen_jmp_im(pc_start - s->cs_base);
3957
                tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
3958
                break;
3959
            default:
3960
            case OT_LONG:
3961
                gen_jmp_im(pc_start - s->cs_base);
3962
                tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
3963
                break;
3964
#ifdef TARGET_X86_64
3965
            case OT_QUAD:
3966
                gen_jmp_im(pc_start - s->cs_base);
3967
                tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
3968
                break;
3969
#endif
3970
            }
3971
            break;
3972
        case 7: /* idiv */
3973
            switch(ot) {
3974
            case OT_BYTE:
3975
                gen_jmp_im(pc_start - s->cs_base);
3976
                tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
3977
                break;
3978
            case OT_WORD:
3979
                gen_jmp_im(pc_start - s->cs_base);
3980
                tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
3981
                break;
3982
            default:
3983
            case OT_LONG:
3984
                gen_jmp_im(pc_start - s->cs_base);
3985
                tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
3986
                break;
3987
#ifdef TARGET_X86_64
3988
            case OT_QUAD:
3989
                gen_jmp_im(pc_start - s->cs_base);
3990
                tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
3991
                break;
3992
#endif
3993
            }
3994
            break;
3995
        default:
3996
            goto illegal_op;
3997
        }
3998
        break;
3999

    
4000
    case 0xfe: /* GRP4 */
4001
    case 0xff: /* GRP5 */
4002
        if ((b & 1) == 0)
4003
            ot = OT_BYTE;
4004
        else
4005
            ot = dflag + OT_WORD;
4006

    
4007
        modrm = ldub_code(s->pc++);
4008
        mod = (modrm >> 6) & 3;
4009
        rm = (modrm & 7) | REX_B(s);
4010
        op = (modrm >> 3) & 7;
4011
        if (op >= 2 && b == 0xfe) {
4012
            goto illegal_op;
4013
        }
4014
        if (CODE64(s)) {
4015
            if (op == 2 || op == 4) {
4016
                /* operand size for jumps is 64 bit */
4017
                ot = OT_QUAD;
4018
            } else if (op == 3 || op == 5) {
4019
                /* for call calls, the operand is 16 or 32 bit, even
4020
                   in long mode */
4021
                ot = dflag ? OT_LONG : OT_WORD;
4022
            } else if (op == 6) {
4023
                /* default push size is 64 bit */
4024
                ot = dflag ? OT_QUAD : OT_WORD;
4025
            }
4026
        }
4027
        if (mod != 3) {
4028
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4029
            if (op >= 2 && op != 3 && op != 5)
4030
                gen_op_ld_T0_A0(ot + s->mem_index);
4031
        } else {
4032
            gen_op_mov_TN_reg(ot, 0, rm);
4033
        }
4034

    
4035
        switch(op) {
4036
        case 0: /* inc Ev */
4037
            if (mod != 3)
4038
                opreg = OR_TMP0;
4039
            else
4040
                opreg = rm;
4041
            gen_inc(s, ot, opreg, 1);
4042
            break;
4043
        case 1: /* dec Ev */
4044
            if (mod != 3)
4045
                opreg = OR_TMP0;
4046
            else
4047
                opreg = rm;
4048
            gen_inc(s, ot, opreg, -1);
4049
            break;
4050
        case 2: /* call Ev */
4051
            /* XXX: optimize if memory (no 'and' is necessary) */
4052
            if (s->dflag == 0)
4053
                gen_op_andl_T0_ffff();
4054
            next_eip = s->pc - s->cs_base;
4055
            gen_movtl_T1_im(next_eip);
4056
            gen_push_T1(s);
4057
            gen_op_jmp_T0();
4058
            gen_eob(s);
4059
            break;
4060
        case 3: /* lcall Ev */
4061
            gen_op_ld_T1_A0(ot + s->mem_index);
4062
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4063
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4064
        do_lcall:
4065
            if (s->pe && !s->vm86) {
4066
                if (s->cc_op != CC_OP_DYNAMIC)
4067
                    gen_op_set_cc_op(s->cc_op);
4068
                gen_jmp_im(pc_start - s->cs_base);
4069
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4070
                tcg_gen_helper_0_4(helper_lcall_protected,
4071
                                   cpu_tmp2_i32, cpu_T[1],
4072
                                   tcg_const_i32(dflag), 
4073
                                   tcg_const_i32(s->pc - pc_start));
4074
            } else {
4075
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4076
                tcg_gen_helper_0_4(helper_lcall_real,
4077
                                   cpu_tmp2_i32, cpu_T[1],
4078
                                   tcg_const_i32(dflag), 
4079
                                   tcg_const_i32(s->pc - s->cs_base));
4080
            }
4081
            gen_eob(s);
4082
            break;
4083
        case 4: /* jmp Ev */
4084
            if (s->dflag == 0)
4085
                gen_op_andl_T0_ffff();
4086
            gen_op_jmp_T0();
4087
            gen_eob(s);
4088
            break;
4089
        case 5: /* ljmp Ev */
4090
            gen_op_ld_T1_A0(ot + s->mem_index);
4091
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4092
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4093
        do_ljmp:
4094
            if (s->pe && !s->vm86) {
4095
                if (s->cc_op != CC_OP_DYNAMIC)
4096
                    gen_op_set_cc_op(s->cc_op);
4097
                gen_jmp_im(pc_start - s->cs_base);
4098
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4099
                tcg_gen_helper_0_3(helper_ljmp_protected,
4100
                                   cpu_tmp2_i32,
4101
                                   cpu_T[1],
4102
                                   tcg_const_i32(s->pc - pc_start));
4103
            } else {
4104
                gen_op_movl_seg_T0_vm(R_CS);
4105
                gen_op_movl_T0_T1();
4106
                gen_op_jmp_T0();
4107
            }
4108
            gen_eob(s);
4109
            break;
4110
        case 6: /* push Ev */
4111
            gen_push_T0(s);
4112
            break;
4113
        default:
4114
            goto illegal_op;
4115
        }
4116
        break;
4117

    
4118
    case 0x84: /* test Ev, Gv */
4119
    case 0x85:
4120
        if ((b & 1) == 0)
4121
            ot = OT_BYTE;
4122
        else
4123
            ot = dflag + OT_WORD;
4124

    
4125
        modrm = ldub_code(s->pc++);
4126
        mod = (modrm >> 6) & 3;
4127
        rm = (modrm & 7) | REX_B(s);
4128
        reg = ((modrm >> 3) & 7) | rex_r;
4129

    
4130
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4131
        gen_op_mov_TN_reg(ot, 1, reg);
4132
        gen_op_testl_T0_T1_cc();
4133
        s->cc_op = CC_OP_LOGICB + ot;
4134
        break;
4135

    
4136
    case 0xa8: /* test eAX, Iv */
4137
    case 0xa9:
4138
        if ((b & 1) == 0)
4139
            ot = OT_BYTE;
4140
        else
4141
            ot = dflag + OT_WORD;
4142
        val = insn_get(s, ot);
4143

    
4144
        gen_op_mov_TN_reg(ot, 0, OR_EAX);
4145
        gen_op_movl_T1_im(val);
4146
        gen_op_testl_T0_T1_cc();
4147
        s->cc_op = CC_OP_LOGICB + ot;
4148
        break;
4149

    
4150
    case 0x98: /* CWDE/CBW */
4151
#ifdef TARGET_X86_64
4152
        if (dflag == 2) {
4153
            gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4154
            tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4155
            gen_op_mov_reg_T0(OT_QUAD, R_EAX);
4156
        } else
4157
#endif
4158
        if (dflag == 1) {
4159
            gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4160
            tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4161
            gen_op_mov_reg_T0(OT_LONG, R_EAX);
4162
        } else {
4163
            gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
4164
            tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4165
            gen_op_mov_reg_T0(OT_WORD, R_EAX);
4166
        }
4167
        break;
4168
    case 0x99: /* CDQ/CWD */
4169
#ifdef TARGET_X86_64
4170
        if (dflag == 2) {
4171
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4172
            tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
4173
            gen_op_mov_reg_T0(OT_QUAD, R_EDX);
4174
        } else
4175
#endif
4176
        if (dflag == 1) {
4177
            gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4178
            tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4179
            tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
4180
            gen_op_mov_reg_T0(OT_LONG, R_EDX);
4181
        } else {
4182
            gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4183
            tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4184
            tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
4185
            gen_op_mov_reg_T0(OT_WORD, R_EDX);
4186
        }
4187
        break;
4188
    case 0x1af: /* imul Gv, Ev */
4189
    case 0x69: /* imul Gv, Ev, I */
4190
    case 0x6b:
4191
        ot = dflag + OT_WORD;
4192
        modrm = ldub_code(s->pc++);
4193
        reg = ((modrm >> 3) & 7) | rex_r;
4194
        if (b == 0x69)
4195
            s->rip_offset = insn_const_size(ot);
4196
        else if (b == 0x6b)
4197
            s->rip_offset = 1;
4198
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4199
        if (b == 0x69) {
4200
            val = insn_get(s, ot);
4201
            gen_op_movl_T1_im(val);
4202
        } else if (b == 0x6b) {
4203
            val = (int8_t)insn_get(s, OT_BYTE);
4204
            gen_op_movl_T1_im(val);
4205
        } else {
4206
            gen_op_mov_TN_reg(ot, 1, reg);
4207
        }
4208

    
4209
#ifdef TARGET_X86_64
4210
        if (ot == OT_QUAD) {
4211
            tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
4212
        } else
4213
#endif
4214
        if (ot == OT_LONG) {
4215
#ifdef TARGET_X86_64
4216
                tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4217
                tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4218
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4219
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4220
                tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4221
                tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4222
#else
4223
                {
4224
                    TCGv t0, t1;
4225
                    t0 = tcg_temp_new(TCG_TYPE_I64);
4226
                    t1 = tcg_temp_new(TCG_TYPE_I64);
4227
                    tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4228
                    tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4229
                    tcg_gen_mul_i64(t0, t0, t1);
4230
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4231
                    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4232
                    tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4233
                    tcg_gen_shri_i64(t0, t0, 32);
4234
                    tcg_gen_trunc_i64_i32(cpu_T[1], t0);
4235
                    tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
4236
                }
4237
#endif
4238
        } else {
4239
            tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4240
            tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4241
            /* XXX: use 32 bit mul which could be faster */
4242
            tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4243
            tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4244
            tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4245
            tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4246
        }
4247
        gen_op_mov_reg_T0(ot, reg);
4248
        s->cc_op = CC_OP_MULB + ot;
4249
        break;
4250
    case 0x1c0:
4251
    case 0x1c1: /* xadd Ev, Gv */
4252
        if ((b & 1) == 0)
4253
            ot = OT_BYTE;
4254
        else
4255
            ot = dflag + OT_WORD;
4256
        modrm = ldub_code(s->pc++);
4257
        reg = ((modrm >> 3) & 7) | rex_r;
4258
        mod = (modrm >> 6) & 3;
4259
        if (mod == 3) {
4260
            rm = (modrm & 7) | REX_B(s);
4261
            gen_op_mov_TN_reg(ot, 0, reg);
4262
            gen_op_mov_TN_reg(ot, 1, rm);
4263
            gen_op_addl_T0_T1();
4264
            gen_op_mov_reg_T1(ot, reg);
4265
            gen_op_mov_reg_T0(ot, rm);
4266
        } else {
4267
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4268
            gen_op_mov_TN_reg(ot, 0, reg);
4269
            gen_op_ld_T1_A0(ot + s->mem_index);
4270
            gen_op_addl_T0_T1();
4271
            gen_op_st_T0_A0(ot + s->mem_index);
4272
            gen_op_mov_reg_T1(ot, reg);
4273
        }
4274
        gen_op_update2_cc();
4275
        s->cc_op = CC_OP_ADDB + ot;
4276
        break;
4277
    case 0x1b0:
4278
    case 0x1b1: /* cmpxchg Ev, Gv */
4279
        {
4280
            int label1;
4281

    
4282
            if ((b & 1) == 0)
4283
                ot = OT_BYTE;
4284
            else
4285
                ot = dflag + OT_WORD;
4286
            modrm = ldub_code(s->pc++);
4287
            reg = ((modrm >> 3) & 7) | rex_r;
4288
            mod = (modrm >> 6) & 3;
4289
            gen_op_mov_TN_reg(ot, 1, reg);
4290
            if (mod == 3) {
4291
                rm = (modrm & 7) | REX_B(s);
4292
                gen_op_mov_TN_reg(ot, 0, rm);
4293
            } else {
4294
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4295
                gen_op_ld_T0_A0(ot + s->mem_index);
4296
                rm = 0; /* avoid warning */
4297
            }
4298
            label1 = gen_new_label();
4299
            tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_EAX]));
4300
            tcg_gen_sub_tl(cpu_T3, cpu_T3, cpu_T[0]);
4301
            gen_extu(ot, cpu_T3);
4302
            tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
4303
            tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
4304
            gen_op_mov_reg_T0(ot, R_EAX);
4305
            gen_set_label(label1);
4306
            if (mod == 3) {
4307
                gen_op_mov_reg_T1(ot, rm);
4308
            } else {
4309
                gen_op_st_T1_A0(ot + s->mem_index);
4310
            }
4311
            tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4312
            tcg_gen_mov_tl(cpu_cc_dst, cpu_T3);
4313
            s->cc_op = CC_OP_SUBB + ot;
4314
        }
4315
        break;
4316
    case 0x1c7: /* cmpxchg8b */
4317
        modrm = ldub_code(s->pc++);
4318
        mod = (modrm >> 6) & 3;
4319
        if ((mod == 3) || ((modrm & 0x38) != 0x8))
4320
            goto illegal_op;
4321
        gen_jmp_im(pc_start - s->cs_base);
4322
        if (s->cc_op != CC_OP_DYNAMIC)
4323
            gen_op_set_cc_op(s->cc_op);
4324
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4325
        tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
4326
        s->cc_op = CC_OP_EFLAGS;
4327
        break;
4328

    
4329
        /**************************/
4330
        /* push/pop */
4331
    case 0x50 ... 0x57: /* push */
4332
        gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
4333
        gen_push_T0(s);
4334
        break;
4335
    case 0x58 ... 0x5f: /* pop */
4336
        if (CODE64(s)) {
4337
            ot = dflag ? OT_QUAD : OT_WORD;
4338
        } else {
4339
            ot = dflag + OT_WORD;
4340
        }
4341
        gen_pop_T0(s);
4342
        /* NOTE: order is important for pop %sp */
4343
        gen_pop_update(s);
4344
        gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
4345
        break;
4346
    case 0x60: /* pusha */
4347
        if (CODE64(s))
4348
            goto illegal_op;
4349
        gen_pusha(s);
4350
        break;
4351
    case 0x61: /* popa */
4352
        if (CODE64(s))
4353
            goto illegal_op;
4354
        gen_popa(s);
4355
        break;
4356
    case 0x68: /* push Iv */
4357
    case 0x6a:
4358
        if (CODE64(s)) {
4359
            ot = dflag ? OT_QUAD : OT_WORD;
4360
        } else {
4361
            ot = dflag + OT_WORD;
4362
        }
4363
        if (b == 0x68)
4364
            val = insn_get(s, ot);
4365
        else
4366
            val = (int8_t)insn_get(s, OT_BYTE);
4367
        gen_op_movl_T0_im(val);
4368
        gen_push_T0(s);
4369
        break;
4370
    case 0x8f: /* pop Ev */
4371
        if (CODE64(s)) {
4372
            ot = dflag ? OT_QUAD : OT_WORD;
4373
        } else {
4374
            ot = dflag + OT_WORD;
4375
        }
4376
        modrm = ldub_code(s->pc++);
4377
        mod = (modrm >> 6) & 3;
4378
        gen_pop_T0(s);
4379
        if (mod == 3) {
4380
            /* NOTE: order is important for pop %sp */
4381
            gen_pop_update(s);
4382
            rm = (modrm & 7) | REX_B(s);
4383
            gen_op_mov_reg_T0(ot, rm);
4384
        } else {
4385
            /* NOTE: order is important too for MMU exceptions */
4386
            s->popl_esp_hack = 1 << ot;
4387
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4388
            s->popl_esp_hack = 0;
4389
            gen_pop_update(s);
4390
        }
4391
        break;
4392
    case 0xc8: /* enter */
4393
        {
4394
            int level;
4395
            val = lduw_code(s->pc);
4396
            s->pc += 2;
4397
            level = ldub_code(s->pc++);
4398
            gen_enter(s, val, level);
4399
        }
4400
        break;
4401
    case 0xc9: /* leave */
4402
        /* XXX: exception not precise (ESP is updated before potential exception) */
4403
        if (CODE64(s)) {
4404
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4405
            gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4406
        } else if (s->ss32) {
4407
            gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4408
            gen_op_mov_reg_T0(OT_LONG, R_ESP);
4409
        } else {
4410
            gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4411
            gen_op_mov_reg_T0(OT_WORD, R_ESP);
4412
        }
4413
        gen_pop_T0(s);
4414
        if (CODE64(s)) {
4415
            ot = dflag ? OT_QUAD : OT_WORD;
4416
        } else {
4417
            ot = dflag + OT_WORD;
4418
        }
4419
        gen_op_mov_reg_T0(ot, R_EBP);
4420
        gen_pop_update(s);
4421
        break;
4422
    case 0x06: /* push es */
4423
    case 0x0e: /* push cs */
4424
    case 0x16: /* push ss */
4425
    case 0x1e: /* push ds */
4426
        if (CODE64(s))
4427
            goto illegal_op;
4428
        gen_op_movl_T0_seg(b >> 3);
4429
        gen_push_T0(s);
4430
        break;
4431
    case 0x1a0: /* push fs */
4432
    case 0x1a8: /* push gs */
4433
        gen_op_movl_T0_seg((b >> 3) & 7);
4434
        gen_push_T0(s);
4435
        break;
4436
    case 0x07: /* pop es */
4437
    case 0x17: /* pop ss */
4438
    case 0x1f: /* pop ds */
4439
        if (CODE64(s))
4440
            goto illegal_op;
4441
        reg = b >> 3;
4442
        gen_pop_T0(s);
4443
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4444
        gen_pop_update(s);
4445
        if (reg == R_SS) {
4446
            /* if reg == SS, inhibit interrupts/trace. */
4447
            /* If several instructions disable interrupts, only the
4448
               _first_ does it */
4449
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4450
                tcg_gen_helper_0_0(helper_set_inhibit_irq);
4451
            s->tf = 0;
4452
        }
4453
        if (s->is_jmp) {
4454
            gen_jmp_im(s->pc - s->cs_base);
4455
            gen_eob(s);
4456
        }
4457
        break;
4458
    case 0x1a1: /* pop fs */
4459
    case 0x1a9: /* pop gs */
4460
        gen_pop_T0(s);
4461
        gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4462
        gen_pop_update(s);
4463
        if (s->is_jmp) {
4464
            gen_jmp_im(s->pc - s->cs_base);
4465
            gen_eob(s);
4466
        }
4467
        break;
4468

    
4469
        /**************************/
4470
        /* mov */
4471
    case 0x88:
4472
    case 0x89: /* mov Gv, Ev */
4473
        if ((b & 1) == 0)
4474
            ot = OT_BYTE;
4475
        else
4476
            ot = dflag + OT_WORD;
4477
        modrm = ldub_code(s->pc++);
4478
        reg = ((modrm >> 3) & 7) | rex_r;
4479

    
4480
        /* generate a generic store */
4481
        gen_ldst_modrm(s, modrm, ot, reg, 1);
4482
        break;
4483
    case 0xc6:
4484
    case 0xc7: /* mov Ev, Iv */
4485
        if ((b & 1) == 0)
4486
            ot = OT_BYTE;
4487
        else
4488
            ot = dflag + OT_WORD;
4489
        modrm = ldub_code(s->pc++);
4490
        mod = (modrm >> 6) & 3;
4491
        if (mod != 3) {
4492
            s->rip_offset = insn_const_size(ot);
4493
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4494
        }
4495
        val = insn_get(s, ot);
4496
        gen_op_movl_T0_im(val);
4497
        if (mod != 3)
4498
            gen_op_st_T0_A0(ot + s->mem_index);
4499
        else
4500
            gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4501
        break;
4502
    case 0x8a:
4503
    case 0x8b: /* mov Ev, Gv */
4504
        if ((b & 1) == 0)
4505
            ot = OT_BYTE;
4506
        else
4507
            ot = OT_WORD + dflag;
4508
        modrm = ldub_code(s->pc++);
4509
        reg = ((modrm >> 3) & 7) | rex_r;
4510

    
4511
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4512
        gen_op_mov_reg_T0(ot, reg);
4513
        break;
4514
    case 0x8e: /* mov seg, Gv */
4515
        modrm = ldub_code(s->pc++);
4516
        reg = (modrm >> 3) & 7;
4517
        if (reg >= 6 || reg == R_CS)
4518
            goto illegal_op;
4519
        gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4520
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4521
        if (reg == R_SS) {
4522
            /* if reg == SS, inhibit interrupts/trace */
4523
            /* If several instructions disable interrupts, only the
4524
               _first_ does it */
4525
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4526
                tcg_gen_helper_0_0(helper_set_inhibit_irq);
4527
            s->tf = 0;
4528
        }
4529
        if (s->is_jmp) {
4530
            gen_jmp_im(s->pc - s->cs_base);
4531
            gen_eob(s);
4532
        }
4533
        break;
4534
    case 0x8c: /* mov Gv, seg */
4535
        modrm = ldub_code(s->pc++);
4536
        reg = (modrm >> 3) & 7;
4537
        mod = (modrm >> 6) & 3;
4538
        if (reg >= 6)
4539
            goto illegal_op;
4540
        gen_op_movl_T0_seg(reg);
4541
        if (mod == 3)
4542
            ot = OT_WORD + dflag;
4543
        else
4544
            ot = OT_WORD;
4545
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4546
        break;
4547

    
4548
    case 0x1b6: /* movzbS Gv, Eb */
4549
    case 0x1b7: /* movzwS Gv, Eb */
4550
    case 0x1be: /* movsbS Gv, Eb */
4551
    case 0x1bf: /* movswS Gv, Eb */
4552
        {
4553
            int d_ot;
4554
            /* d_ot is the size of destination */
4555
            d_ot = dflag + OT_WORD;
4556
            /* ot is the size of source */
4557
            ot = (b & 1) + OT_BYTE;
4558
            modrm = ldub_code(s->pc++);
4559
            reg = ((modrm >> 3) & 7) | rex_r;
4560
            mod = (modrm >> 6) & 3;
4561
            rm = (modrm & 7) | REX_B(s);
4562

    
4563
            if (mod == 3) {
4564
                gen_op_mov_TN_reg(ot, 0, rm);
4565
                switch(ot | (b & 8)) {
4566
                case OT_BYTE:
4567
                    tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4568
                    break;
4569
                case OT_BYTE | 8:
4570
                    tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4571
                    break;
4572
                case OT_WORD:
4573
                    tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4574
                    break;
4575
                default:
4576
                case OT_WORD | 8:
4577
                    tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4578
                    break;
4579
                }
4580
                gen_op_mov_reg_T0(d_ot, reg);
4581
            } else {
4582
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4583
                if (b & 8) {
4584
                    gen_op_lds_T0_A0(ot + s->mem_index);
4585
                } else {
4586
                    gen_op_ldu_T0_A0(ot + s->mem_index);
4587
                }
4588
                gen_op_mov_reg_T0(d_ot, reg);
4589
            }
4590
        }
4591
        break;
4592

    
4593
    case 0x8d: /* lea */
4594
        ot = dflag + OT_WORD;
4595
        modrm = ldub_code(s->pc++);
4596
        mod = (modrm >> 6) & 3;
4597
        if (mod == 3)
4598
            goto illegal_op;
4599
        reg = ((modrm >> 3) & 7) | rex_r;
4600
        /* we must ensure that no segment is added */
4601
        s->override = -1;
4602
        val = s->addseg;
4603
        s->addseg = 0;
4604
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4605
        s->addseg = val;
4606
        gen_op_mov_reg_A0(ot - OT_WORD, reg);
4607
        break;
4608

    
4609
    case 0xa0: /* mov EAX, Ov */
4610
    case 0xa1:
4611
    case 0xa2: /* mov Ov, EAX */
4612
    case 0xa3:
4613
        {
4614
            target_ulong offset_addr;
4615

    
4616
            if ((b & 1) == 0)
4617
                ot = OT_BYTE;
4618
            else
4619
                ot = dflag + OT_WORD;
4620
#ifdef TARGET_X86_64
4621
            if (s->aflag == 2) {
4622
                offset_addr = ldq_code(s->pc);
4623
                s->pc += 8;
4624
                gen_op_movq_A0_im(offset_addr);
4625
            } else
4626
#endif
4627
            {
4628
                if (s->aflag) {
4629
                    offset_addr = insn_get(s, OT_LONG);
4630
                } else {
4631
                    offset_addr = insn_get(s, OT_WORD);
4632
                }
4633
                gen_op_movl_A0_im(offset_addr);
4634
            }
4635
            gen_add_A0_ds_seg(s);
4636
            if ((b & 2) == 0) {
4637
                gen_op_ld_T0_A0(ot + s->mem_index);
4638
                gen_op_mov_reg_T0(ot, R_EAX);
4639
            } else {
4640
                gen_op_mov_TN_reg(ot, 0, R_EAX);
4641
                gen_op_st_T0_A0(ot + s->mem_index);
4642
            }
4643
        }
4644
        break;
4645
    case 0xd7: /* xlat */
4646
#ifdef TARGET_X86_64
4647
        if (s->aflag == 2) {
4648
            gen_op_movq_A0_reg(R_EBX);
4649
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4650
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4651
            tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4652
        } else
4653
#endif
4654
        {
4655
            gen_op_movl_A0_reg(R_EBX);
4656
            gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4657
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4658
            tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4659
            if (s->aflag == 0)
4660
                gen_op_andl_A0_ffff();
4661
            else
4662
                tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
4663
        }
4664
        gen_add_A0_ds_seg(s);
4665
        gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4666
        gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4667
        break;
4668
    case 0xb0 ... 0xb7: /* mov R, Ib */
4669
        val = insn_get(s, OT_BYTE);
4670
        gen_op_movl_T0_im(val);
4671
        gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4672
        break;
4673
    case 0xb8 ... 0xbf: /* mov R, Iv */
4674
#ifdef TARGET_X86_64
4675
        if (dflag == 2) {
4676
            uint64_t tmp;
4677
            /* 64 bit case */
4678
            tmp = ldq_code(s->pc);
4679
            s->pc += 8;
4680
            reg = (b & 7) | REX_B(s);
4681
            gen_movtl_T0_im(tmp);
4682
            gen_op_mov_reg_T0(OT_QUAD, reg);
4683
        } else
4684
#endif
4685
        {
4686
            ot = dflag ? OT_LONG : OT_WORD;
4687
            val = insn_get(s, ot);
4688
            reg = (b & 7) | REX_B(s);
4689
            gen_op_movl_T0_im(val);
4690
            gen_op_mov_reg_T0(ot, reg);
4691
        }
4692
        break;
4693

    
4694
    case 0x91 ... 0x97: /* xchg R, EAX */
4695
        ot = dflag + OT_WORD;
4696
        reg = (b & 7) | REX_B(s);
4697
        rm = R_EAX;
4698
        goto do_xchg_reg;
4699
    case 0x86:
4700
    case 0x87: /* xchg Ev, Gv */
4701
        if ((b & 1) == 0)
4702
            ot = OT_BYTE;
4703
        else
4704
            ot = dflag + OT_WORD;
4705
        modrm = ldub_code(s->pc++);
4706
        reg = ((modrm >> 3) & 7) | rex_r;
4707
        mod = (modrm >> 6) & 3;
4708
        if (mod == 3) {
4709
            rm = (modrm & 7) | REX_B(s);
4710
        do_xchg_reg:
4711
            gen_op_mov_TN_reg(ot, 0, reg);
4712
            gen_op_mov_TN_reg(ot, 1, rm);
4713
            gen_op_mov_reg_T0(ot, rm);
4714
            gen_op_mov_reg_T1(ot, reg);
4715
        } else {
4716
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4717
            gen_op_mov_TN_reg(ot, 0, reg);
4718
            /* for xchg, lock is implicit */
4719
            if (!(prefixes & PREFIX_LOCK))
4720
                tcg_gen_helper_0_0(helper_lock);
4721
            gen_op_ld_T1_A0(ot + s->mem_index);
4722
            gen_op_st_T0_A0(ot + s->mem_index);
4723
            if (!(prefixes & PREFIX_LOCK))
4724
                tcg_gen_helper_0_0(helper_unlock);
4725
            gen_op_mov_reg_T1(ot, reg);
4726
        }
4727
        break;
4728
    case 0xc4: /* les Gv */
4729
        if (CODE64(s))
4730
            goto illegal_op;
4731
        op = R_ES;
4732
        goto do_lxx;
4733
    case 0xc5: /* lds Gv */
4734
        if (CODE64(s))
4735
            goto illegal_op;
4736
        op = R_DS;
4737
        goto do_lxx;
4738
    case 0x1b2: /* lss Gv */
4739
        op = R_SS;
4740
        goto do_lxx;
4741
    case 0x1b4: /* lfs Gv */
4742
        op = R_FS;
4743
        goto do_lxx;
4744
    case 0x1b5: /* lgs Gv */
4745
        op = R_GS;
4746
    do_lxx:
4747
        ot = dflag ? OT_LONG : OT_WORD;
4748
        modrm = ldub_code(s->pc++);
4749
        reg = ((modrm >> 3) & 7) | rex_r;
4750
        mod = (modrm >> 6) & 3;
4751
        if (mod == 3)
4752
            goto illegal_op;
4753
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4754
        gen_op_ld_T1_A0(ot + s->mem_index);
4755
        gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4756
        /* load the segment first to handle exceptions properly */
4757
        gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4758
        gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4759
        /* then put the data */
4760
        gen_op_mov_reg_T1(ot, reg);
4761
        if (s->is_jmp) {
4762
            gen_jmp_im(s->pc - s->cs_base);
4763
            gen_eob(s);
4764
        }
4765
        break;
4766

    
4767
        /************************/
4768
        /* shifts */
4769
    case 0xc0:
4770
    case 0xc1:
4771
        /* shift Ev,Ib */
4772
        shift = 2;
4773
    grp2:
4774
        {
4775
            if ((b & 1) == 0)
4776
                ot = OT_BYTE;
4777
            else
4778
                ot = dflag + OT_WORD;
4779

    
4780
            modrm = ldub_code(s->pc++);
4781
            mod = (modrm >> 6) & 3;
4782
            op = (modrm >> 3) & 7;
4783

    
4784
            if (mod != 3) {
4785
                if (shift == 2) {
4786
                    s->rip_offset = 1;
4787
                }
4788
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4789
                opreg = OR_TMP0;
4790
            } else {
4791
                opreg = (modrm & 7) | REX_B(s);
4792
            }
4793

    
4794
            /* simpler op */
4795
            if (shift == 0) {
4796
                gen_shift(s, op, ot, opreg, OR_ECX);
4797
            } else {
4798
                if (shift == 2) {
4799
                    shift = ldub_code(s->pc++);
4800
                }
4801
                gen_shifti(s, op, ot, opreg, shift);
4802
            }
4803
        }
4804
        break;
4805
    case 0xd0:
4806
    case 0xd1:
4807
        /* shift Ev,1 */
4808
        shift = 1;
4809
        goto grp2;
4810
    case 0xd2:
4811
    case 0xd3:
4812
        /* shift Ev,cl */
4813
        shift = 0;
4814
        goto grp2;
4815

    
4816
    case 0x1a4: /* shld imm */
4817
        op = 0;
4818
        shift = 1;
4819
        goto do_shiftd;
4820
    case 0x1a5: /* shld cl */
4821
        op = 0;
4822
        shift = 0;
4823
        goto do_shiftd;
4824
    case 0x1ac: /* shrd imm */
4825
        op = 1;
4826
        shift = 1;
4827
        goto do_shiftd;
4828
    case 0x1ad: /* shrd cl */
4829
        op = 1;
4830
        shift = 0;
4831
    do_shiftd:
4832
        ot = dflag + OT_WORD;
4833
        modrm = ldub_code(s->pc++);
4834
        mod = (modrm >> 6) & 3;
4835
        rm = (modrm & 7) | REX_B(s);
4836
        reg = ((modrm >> 3) & 7) | rex_r;
4837
        if (mod != 3) {
4838
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4839
            opreg = OR_TMP0;
4840
        } else {
4841
            opreg = rm;
4842
        }
4843
        gen_op_mov_TN_reg(ot, 1, reg);
4844

    
4845
        if (shift) {
4846
            val = ldub_code(s->pc++);
4847
            tcg_gen_movi_tl(cpu_T3, val);
4848
        } else {
4849
            tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
4850
        }
4851
        gen_shiftd_rm_T1_T3(s, ot, opreg, op);
4852
        break;
4853

    
4854
        /************************/
4855
        /* floats */
4856
    case 0xd8 ... 0xdf:
4857
        if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4858
            /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4859
            /* XXX: what to do if illegal op ? */
4860
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4861
            break;
4862
        }
4863
        modrm = ldub_code(s->pc++);
4864
        mod = (modrm >> 6) & 3;
4865
        rm = modrm & 7;
4866
        op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4867
        if (mod != 3) {
4868
            /* memory op */
4869
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4870
            switch(op) {
4871
            case 0x00 ... 0x07: /* fxxxs */
4872
            case 0x10 ... 0x17: /* fixxxl */
4873
            case 0x20 ... 0x27: /* fxxxl */
4874
            case 0x30 ... 0x37: /* fixxx */
4875
                {
4876
                    int op1;
4877
                    op1 = op & 7;
4878

    
4879
                    switch(op >> 4) {
4880
                    case 0:
4881
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4882
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4883
                        tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
4884
                        break;
4885
                    case 1:
4886
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4887
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4888
                        tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4889
                        break;
4890
                    case 2:
4891
                        tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
4892
                                          (s->mem_index >> 2) - 1);
4893
                        tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
4894
                        break;
4895
                    case 3:
4896
                    default:
4897
                        gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4898
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4899
                        tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4900
                        break;
4901
                    }
4902

    
4903
                    tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
4904
                    if (op1 == 3) {
4905
                        /* fcomp needs pop */
4906
                        tcg_gen_helper_0_0(helper_fpop);
4907
                    }
4908
                }
4909
                break;
4910
            case 0x08: /* flds */
4911
            case 0x0a: /* fsts */
4912
            case 0x0b: /* fstps */
4913
            case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4914
            case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4915
            case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4916
                switch(op & 7) {
4917
                case 0:
4918
                    switch(op >> 4) {
4919
                    case 0:
4920
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4921
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4922
                        tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
4923
                        break;
4924
                    case 1:
4925
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4926
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4927
                        tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4928
                        break;
4929
                    case 2:
4930
                        tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
4931
                                          (s->mem_index >> 2) - 1);
4932
                        tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
4933
                        break;
4934
                    case 3:
4935
                    default:
4936
                        gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4937
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4938
                        tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4939
                        break;
4940
                    }
4941
                    break;
4942
                case 1:
4943
                    /* XXX: the corresponding CPUID bit must be tested ! */
4944
                    switch(op >> 4) {
4945
                    case 1:
4946
                        tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
4947
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4948
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4949
                        break;
4950
                    case 2:
4951
                        tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
4952
                        tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
4953
                                          (s->mem_index >> 2) - 1);
4954
                        break;
4955
                    case 3:
4956
                    default:
4957
                        tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
4958
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4959
                        gen_op_st_T0_A0(OT_WORD + s->mem_index);
4960
                        break;
4961
                    }
4962
                    tcg_gen_helper_0_0(helper_fpop);
4963
                    break;
4964
                default:
4965
                    switch(op >> 4) {
4966
                    case 0:
4967
                        tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
4968
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4969
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4970
                        break;
4971
                    case 1:
4972
                        tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
4973
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4974
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4975
                        break;
4976
                    case 2:
4977
                        tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
4978
                        tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
4979
                                          (s->mem_index >> 2) - 1);
4980
                        break;
4981
                    case 3:
4982
                    default:
4983
                        tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
4984
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4985
                        gen_op_st_T0_A0(OT_WORD + s->mem_index);
4986
                        break;
4987
                    }
4988
                    if ((op & 7) == 3)
4989
                        tcg_gen_helper_0_0(helper_fpop);
4990
                    break;
4991
                }
4992
                break;
4993
            case 0x0c: /* fldenv mem */
4994
                if (s->cc_op != CC_OP_DYNAMIC)
4995
                    gen_op_set_cc_op(s->cc_op);
4996
                gen_jmp_im(pc_start - s->cs_base);
4997
                tcg_gen_helper_0_2(helper_fldenv, 
4998
                                   cpu_A0, tcg_const_i32(s->dflag));
4999
                break;
5000
            case 0x0d: /* fldcw mem */
5001
                gen_op_ld_T0_A0(OT_WORD + s->mem_index);
5002
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5003
                tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
5004
                break;
5005
            case 0x0e: /* fnstenv mem */
5006
                if (s->cc_op != CC_OP_DYNAMIC)
5007
                    gen_op_set_cc_op(s->cc_op);
5008
                gen_jmp_im(pc_start - s->cs_base);
5009
                tcg_gen_helper_0_2(helper_fstenv,
5010
                                   cpu_A0, tcg_const_i32(s->dflag));
5011
                break;
5012
            case 0x0f: /* fnstcw mem */
5013
                tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
5014
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5015
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
5016
                break;
5017
            case 0x1d: /* fldt mem */
5018
                if (s->cc_op != CC_OP_DYNAMIC)
5019
                    gen_op_set_cc_op(s->cc_op);
5020
                gen_jmp_im(pc_start - s->cs_base);
5021
                tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
5022
                break;
5023
            case 0x1f: /* fstpt mem */
5024
                if (s->cc_op != CC_OP_DYNAMIC)
5025
                    gen_op_set_cc_op(s->cc_op);
5026
                gen_jmp_im(pc_start - s->cs_base);
5027
                tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
5028
                tcg_gen_helper_0_0(helper_fpop);
5029
                break;
5030
            case 0x2c: /* frstor mem */
5031
                if (s->cc_op != CC_OP_DYNAMIC)
5032
                    gen_op_set_cc_op(s->cc_op);
5033
                gen_jmp_im(pc_start - s->cs_base);
5034
                tcg_gen_helper_0_2(helper_frstor,
5035
                                   cpu_A0, tcg_const_i32(s->dflag));
5036
                break;
5037
            case 0x2e: /* fnsave mem */
5038
                if (s->cc_op != CC_OP_DYNAMIC)
5039
                    gen_op_set_cc_op(s->cc_op);
5040
                gen_jmp_im(pc_start - s->cs_base);
5041
                tcg_gen_helper_0_2(helper_fsave,
5042
                                   cpu_A0, tcg_const_i32(s->dflag));
5043
                break;
5044
            case 0x2f: /* fnstsw mem */
5045
                tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5046
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5047
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
5048
                break;
5049
            case 0x3c: /* fbld */
5050
                if (s->cc_op != CC_OP_DYNAMIC)
5051
                    gen_op_set_cc_op(s->cc_op);
5052
                gen_jmp_im(pc_start - s->cs_base);
5053
                tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
5054
                break;
5055
            case 0x3e: /* fbstp */
5056
                if (s->cc_op != CC_OP_DYNAMIC)
5057
                    gen_op_set_cc_op(s->cc_op);
5058
                gen_jmp_im(pc_start - s->cs_base);
5059
                tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
5060
                tcg_gen_helper_0_0(helper_fpop);
5061
                break;
5062
            case 0x3d: /* fildll */
5063
                tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
5064
                                  (s->mem_index >> 2) - 1);
5065
                tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
5066
                break;
5067
            case 0x3f: /* fistpll */
5068
                tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
5069
                tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
5070
                                  (s->mem_index >> 2) - 1);
5071
                tcg_gen_helper_0_0(helper_fpop);
5072
                break;
5073
            default:
5074
                goto illegal_op;
5075
            }
5076
        } else {
5077
            /* register float ops */
5078
            opreg = rm;
5079

    
5080
            switch(op) {
5081
            case 0x08: /* fld sti */
5082
                tcg_gen_helper_0_0(helper_fpush);
5083
                tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
5084
                break;
5085
            case 0x09: /* fxchg sti */
5086
            case 0x29: /* fxchg4 sti, undocumented op */
5087
            case 0x39: /* fxchg7 sti, undocumented op */
5088
                tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
5089
                break;
5090
            case 0x0a: /* grp d9/2 */
5091
                switch(rm) {
5092
                case 0: /* fnop */
5093
                    /* check exceptions (FreeBSD FPU probe) */
5094
                    if (s->cc_op != CC_OP_DYNAMIC)
5095
                        gen_op_set_cc_op(s->cc_op);
5096
                    gen_jmp_im(pc_start - s->cs_base);
5097
                    tcg_gen_helper_0_0(helper_fwait);
5098
                    break;
5099
                default:
5100
                    goto illegal_op;
5101
                }
5102
                break;
5103
            case 0x0c: /* grp d9/4 */
5104
                switch(rm) {
5105
                case 0: /* fchs */
5106
                    tcg_gen_helper_0_0(helper_fchs_ST0);
5107
                    break;
5108
                case 1: /* fabs */
5109
                    tcg_gen_helper_0_0(helper_fabs_ST0);
5110
                    break;
5111
                case 4: /* ftst */
5112
                    tcg_gen_helper_0_0(helper_fldz_FT0);
5113
                    tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5114
                    break;
5115
                case 5: /* fxam */
5116
                    tcg_gen_helper_0_0(helper_fxam_ST0);
5117
                    break;
5118
                default:
5119
                    goto illegal_op;
5120
                }
5121
                break;
5122
            case 0x0d: /* grp d9/5 */
5123
                {
5124
                    switch(rm) {
5125
                    case 0:
5126
                        tcg_gen_helper_0_0(helper_fpush);
5127
                        tcg_gen_helper_0_0(helper_fld1_ST0);
5128
                        break;
5129
                    case 1:
5130
                        tcg_gen_helper_0_0(helper_fpush);
5131
                        tcg_gen_helper_0_0(helper_fldl2t_ST0);
5132
                        break;
5133
                    case 2:
5134
                        tcg_gen_helper_0_0(helper_fpush);
5135
                        tcg_gen_helper_0_0(helper_fldl2e_ST0);
5136
                        break;
5137
                    case 3:
5138
                        tcg_gen_helper_0_0(helper_fpush);
5139
                        tcg_gen_helper_0_0(helper_fldpi_ST0);
5140
                        break;
5141
                    case 4:
5142
                        tcg_gen_helper_0_0(helper_fpush);
5143
                        tcg_gen_helper_0_0(helper_fldlg2_ST0);
5144
                        break;
5145
                    case 5:
5146
                        tcg_gen_helper_0_0(helper_fpush);
5147
                        tcg_gen_helper_0_0(helper_fldln2_ST0);
5148
                        break;
5149
                    case 6:
5150
                        tcg_gen_helper_0_0(helper_fpush);
5151
                        tcg_gen_helper_0_0(helper_fldz_ST0);
5152
                        break;
5153
                    default:
5154
                        goto illegal_op;
5155
                    }
5156
                }
5157
                break;
5158
            case 0x0e: /* grp d9/6 */
5159
                switch(rm) {
5160
                case 0: /* f2xm1 */
5161
                    tcg_gen_helper_0_0(helper_f2xm1);
5162
                    break;
5163
                case 1: /* fyl2x */
5164
                    tcg_gen_helper_0_0(helper_fyl2x);
5165
                    break;
5166
                case 2: /* fptan */
5167
                    tcg_gen_helper_0_0(helper_fptan);
5168
                    break;
5169
                case 3: /* fpatan */
5170
                    tcg_gen_helper_0_0(helper_fpatan);
5171
                    break;
5172
                case 4: /* fxtract */
5173
                    tcg_gen_helper_0_0(helper_fxtract);
5174
                    break;
5175
                case 5: /* fprem1 */
5176
                    tcg_gen_helper_0_0(helper_fprem1);
5177
                    break;
5178
                case 6: /* fdecstp */
5179
                    tcg_gen_helper_0_0(helper_fdecstp);
5180
                    break;
5181
                default:
5182
                case 7: /* fincstp */
5183
                    tcg_gen_helper_0_0(helper_fincstp);
5184
                    break;
5185
                }
5186
                break;
5187
            case 0x0f: /* grp d9/7 */
5188
                switch(rm) {
5189
                case 0: /* fprem */
5190
                    tcg_gen_helper_0_0(helper_fprem);
5191
                    break;
5192
                case 1: /* fyl2xp1 */
5193
                    tcg_gen_helper_0_0(helper_fyl2xp1);
5194
                    break;
5195
                case 2: /* fsqrt */
5196
                    tcg_gen_helper_0_0(helper_fsqrt);
5197
                    break;
5198
                case 3: /* fsincos */
5199
                    tcg_gen_helper_0_0(helper_fsincos);
5200
                    break;
5201
                case 5: /* fscale */
5202
                    tcg_gen_helper_0_0(helper_fscale);
5203
                    break;
5204
                case 4: /* frndint */
5205
                    tcg_gen_helper_0_0(helper_frndint);
5206
                    break;
5207
                case 6: /* fsin */
5208
                    tcg_gen_helper_0_0(helper_fsin);
5209
                    break;
5210
                default:
5211
                case 7: /* fcos */
5212
                    tcg_gen_helper_0_0(helper_fcos);
5213
                    break;
5214
                }
5215
                break;
5216
            case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5217
            case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5218
            case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5219
                {
5220
                    int op1;
5221

    
5222
                    op1 = op & 7;
5223
                    if (op >= 0x20) {
5224
                        tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
5225
                        if (op >= 0x30)
5226
                            tcg_gen_helper_0_0(helper_fpop);
5227
                    } else {
5228
                        tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5229
                        tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5230
                    }
5231
                }
5232
                break;
5233
            case 0x02: /* fcom */
5234
            case 0x22: /* fcom2, undocumented op */
5235
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5236
                tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5237
                break;
5238
            case 0x03: /* fcomp */
5239
            case 0x23: /* fcomp3, undocumented op */
5240
            case 0x32: /* fcomp5, undocumented op */
5241
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5242
                tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5243
                tcg_gen_helper_0_0(helper_fpop);
5244
                break;
5245
            case 0x15: /* da/5 */
5246
                switch(rm) {
5247
                case 1: /* fucompp */
5248
                    tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5249
                    tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5250
                    tcg_gen_helper_0_0(helper_fpop);
5251
                    tcg_gen_helper_0_0(helper_fpop);
5252
                    break;
5253
                default:
5254
                    goto illegal_op;
5255
                }
5256
                break;
5257
            case 0x1c:
5258
                switch(rm) {
5259
                case 0: /* feni (287 only, just do nop here) */
5260
                    break;
5261
                case 1: /* fdisi (287 only, just do nop here) */
5262
                    break;
5263
                case 2: /* fclex */
5264
                    tcg_gen_helper_0_0(helper_fclex);
5265
                    break;
5266
                case 3: /* fninit */
5267
                    tcg_gen_helper_0_0(helper_fninit);
5268
                    break;
5269
                case 4: /* fsetpm (287 only, just do nop here) */
5270
                    break;
5271
                default:
5272
                    goto illegal_op;
5273
                }
5274
                break;
5275
            case 0x1d: /* fucomi */
5276
                if (s->cc_op != CC_OP_DYNAMIC)
5277
                    gen_op_set_cc_op(s->cc_op);
5278
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5279
                tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5280
                s->cc_op = CC_OP_EFLAGS;
5281
                break;
5282
            case 0x1e: /* fcomi */
5283
                if (s->cc_op != CC_OP_DYNAMIC)
5284
                    gen_op_set_cc_op(s->cc_op);
5285
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5286
                tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5287
                s->cc_op = CC_OP_EFLAGS;
5288
                break;
5289
            case 0x28: /* ffree sti */
5290
                tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5291
                break;
5292
            case 0x2a: /* fst sti */
5293
                tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5294
                break;
5295
            case 0x2b: /* fstp sti */
5296
            case 0x0b: /* fstp1 sti, undocumented op */
5297
            case 0x3a: /* fstp8 sti, undocumented op */
5298
            case 0x3b: /* fstp9 sti, undocumented op */
5299
                tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5300
                tcg_gen_helper_0_0(helper_fpop);
5301
                break;
5302
            case 0x2c: /* fucom st(i) */
5303
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5304
                tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5305
                break;
5306
            case 0x2d: /* fucomp st(i) */
5307
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5308
                tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5309
                tcg_gen_helper_0_0(helper_fpop);
5310
                break;
5311
            case 0x33: /* de/3 */
5312
                switch(rm) {
5313
                case 1: /* fcompp */
5314
                    tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5315
                    tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5316
                    tcg_gen_helper_0_0(helper_fpop);
5317
                    tcg_gen_helper_0_0(helper_fpop);
5318
                    break;
5319
                default:
5320
                    goto illegal_op;
5321
                }
5322
                break;
5323
            case 0x38: /* ffreep sti, undocumented op */
5324
                tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5325
                tcg_gen_helper_0_0(helper_fpop);
5326
                break;
5327
            case 0x3c: /* df/4 */
5328
                switch(rm) {
5329
                case 0:
5330
                    tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5331
                    tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5332
                    gen_op_mov_reg_T0(OT_WORD, R_EAX);
5333
                    break;
5334
                default:
5335
                    goto illegal_op;
5336
                }
5337
                break;
5338
            case 0x3d: /* fucomip */
5339
                if (s->cc_op != CC_OP_DYNAMIC)
5340
                    gen_op_set_cc_op(s->cc_op);
5341
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5342
                tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5343
                tcg_gen_helper_0_0(helper_fpop);
5344
                s->cc_op = CC_OP_EFLAGS;
5345
                break;
5346
            case 0x3e: /* fcomip */
5347
                if (s->cc_op != CC_OP_DYNAMIC)
5348
                    gen_op_set_cc_op(s->cc_op);
5349
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5350
                tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5351
                tcg_gen_helper_0_0(helper_fpop);
5352
                s->cc_op = CC_OP_EFLAGS;
5353
                break;
5354
            case 0x10 ... 0x13: /* fcmovxx */
5355
            case 0x18 ... 0x1b:
5356
                {
5357
                    int op1, l1;
5358
                    const static uint8_t fcmov_cc[8] = {
5359
                        (JCC_B << 1),
5360
                        (JCC_Z << 1),
5361
                        (JCC_BE << 1),
5362
                        (JCC_P << 1),
5363
                    };
5364
                    op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
5365
                    gen_setcc(s, op1);
5366
                    l1 = gen_new_label();
5367
                    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), l1);
5368
                    tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
5369
                    gen_set_label(l1);
5370
                }
5371
                break;
5372
            default:
5373
                goto illegal_op;
5374
            }
5375
        }
5376
        break;
5377
        /************************/
5378
        /* string ops */
5379

    
5380
    case 0xa4: /* movsS */
5381
    case 0xa5:
5382
        if ((b & 1) == 0)
5383
            ot = OT_BYTE;
5384
        else
5385
            ot = dflag + OT_WORD;
5386

    
5387
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5388
            gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5389
        } else {
5390
            gen_movs(s, ot);
5391
        }
5392
        break;
5393

    
5394
    case 0xaa: /* stosS */
5395
    case 0xab:
5396
        if ((b & 1) == 0)
5397
            ot = OT_BYTE;
5398
        else
5399
            ot = dflag + OT_WORD;
5400

    
5401
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5402
            gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5403
        } else {
5404
            gen_stos(s, ot);
5405
        }
5406
        break;
5407
    case 0xac: /* lodsS */
5408
    case 0xad:
5409
        if ((b & 1) == 0)
5410
            ot = OT_BYTE;
5411
        else
5412
            ot = dflag + OT_WORD;
5413
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5414
            gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5415
        } else {
5416
            gen_lods(s, ot);
5417
        }
5418
        break;
5419
    case 0xae: /* scasS */
5420
    case 0xaf:
5421
        if ((b & 1) == 0)
5422
            ot = OT_BYTE;
5423
        else
5424
            ot = dflag + OT_WORD;
5425
        if (prefixes & PREFIX_REPNZ) {
5426
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5427
        } else if (prefixes & PREFIX_REPZ) {
5428
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5429
        } else {
5430
            gen_scas(s, ot);
5431
            s->cc_op = CC_OP_SUBB + ot;
5432
        }
5433
        break;
5434

    
5435
    case 0xa6: /* cmpsS */
5436
    case 0xa7:
5437
        if ((b & 1) == 0)
5438
            ot = OT_BYTE;
5439
        else
5440
            ot = dflag + OT_WORD;
5441
        if (prefixes & PREFIX_REPNZ) {
5442
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5443
        } else if (prefixes & PREFIX_REPZ) {
5444
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5445
        } else {
5446
            gen_cmps(s, ot);
5447
            s->cc_op = CC_OP_SUBB + ot;
5448
        }
5449
        break;
5450
    case 0x6c: /* insS */
5451
    case 0x6d:
5452
        if ((b & 1) == 0)
5453
            ot = OT_BYTE;
5454
        else
5455
            ot = dflag ? OT_LONG : OT_WORD;
5456
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5457
        gen_op_andl_T0_ffff();
5458
        gen_check_io(s, ot, pc_start - s->cs_base, 
5459
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
5460
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5461
            gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5462
        } else {
5463
            gen_ins(s, ot);
5464
        }
5465
        break;
5466
    case 0x6e: /* outsS */
5467
    case 0x6f:
5468
        if ((b & 1) == 0)
5469
            ot = OT_BYTE;
5470
        else
5471
            ot = dflag ? OT_LONG : OT_WORD;
5472
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5473
        gen_op_andl_T0_ffff();
5474
        gen_check_io(s, ot, pc_start - s->cs_base,
5475
                     svm_is_rep(prefixes) | 4);
5476
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5477
            gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5478
        } else {
5479
            gen_outs(s, ot);
5480
        }
5481
        break;
5482

    
5483
        /************************/
5484
        /* port I/O */
5485

    
5486
    case 0xe4:
5487
    case 0xe5:
5488
        if ((b & 1) == 0)
5489
            ot = OT_BYTE;
5490
        else
5491
            ot = dflag ? OT_LONG : OT_WORD;
5492
        val = ldub_code(s->pc++);
5493
        gen_op_movl_T0_im(val);
5494
        gen_check_io(s, ot, pc_start - s->cs_base,
5495
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5496
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5497
        tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5498
        gen_op_mov_reg_T1(ot, R_EAX);
5499
        break;
5500
    case 0xe6:
5501
    case 0xe7:
5502
        if ((b & 1) == 0)
5503
            ot = OT_BYTE;
5504
        else
5505
            ot = dflag ? OT_LONG : OT_WORD;
5506
        val = ldub_code(s->pc++);
5507
        gen_op_movl_T0_im(val);
5508
        gen_check_io(s, ot, pc_start - s->cs_base,
5509
                     svm_is_rep(prefixes));
5510
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5511

    
5512
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5513
        tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5514
        tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5515
        tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5516
        break;
5517
    case 0xec:
5518
    case 0xed:
5519
        if ((b & 1) == 0)
5520
            ot = OT_BYTE;
5521
        else
5522
            ot = dflag ? OT_LONG : OT_WORD;
5523
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5524
        gen_op_andl_T0_ffff();
5525
        gen_check_io(s, ot, pc_start - s->cs_base,
5526
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5527
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5528
        tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5529
        gen_op_mov_reg_T1(ot, R_EAX);
5530
        break;
5531
    case 0xee:
5532
    case 0xef:
5533
        if ((b & 1) == 0)
5534
            ot = OT_BYTE;
5535
        else
5536
            ot = dflag ? OT_LONG : OT_WORD;
5537
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5538
        gen_op_andl_T0_ffff();
5539
        gen_check_io(s, ot, pc_start - s->cs_base,
5540
                     svm_is_rep(prefixes));
5541
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5542

    
5543
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5544
        tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5545
        tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5546
        tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5547
        break;
5548

    
5549
        /************************/
5550
        /* control */
5551
    case 0xc2: /* ret im */
5552
        val = ldsw_code(s->pc);
5553
        s->pc += 2;
5554
        gen_pop_T0(s);
5555
        if (CODE64(s) && s->dflag)
5556
            s->dflag = 2;
5557
        gen_stack_update(s, val + (2 << s->dflag));
5558
        if (s->dflag == 0)
5559
            gen_op_andl_T0_ffff();
5560
        gen_op_jmp_T0();
5561
        gen_eob(s);
5562
        break;
5563
    case 0xc3: /* ret */
5564
        gen_pop_T0(s);
5565
        gen_pop_update(s);
5566
        if (s->dflag == 0)
5567
            gen_op_andl_T0_ffff();
5568
        gen_op_jmp_T0();
5569
        gen_eob(s);
5570
        break;
5571
    case 0xca: /* lret im */
5572
        val = ldsw_code(s->pc);
5573
        s->pc += 2;
5574
    do_lret:
5575
        if (s->pe && !s->vm86) {
5576
            if (s->cc_op != CC_OP_DYNAMIC)
5577
                gen_op_set_cc_op(s->cc_op);
5578
            gen_jmp_im(pc_start - s->cs_base);
5579
            tcg_gen_helper_0_2(helper_lret_protected,
5580
                               tcg_const_i32(s->dflag), 
5581
                               tcg_const_i32(val));
5582
        } else {
5583
            gen_stack_A0(s);
5584
            /* pop offset */
5585
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5586
            if (s->dflag == 0)
5587
                gen_op_andl_T0_ffff();
5588
            /* NOTE: keeping EIP updated is not a problem in case of
5589
               exception */
5590
            gen_op_jmp_T0();
5591
            /* pop selector */
5592
            gen_op_addl_A0_im(2 << s->dflag);
5593
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5594
            gen_op_movl_seg_T0_vm(R_CS);
5595
            /* add stack offset */
5596
            gen_stack_update(s, val + (4 << s->dflag));
5597
        }
5598
        gen_eob(s);
5599
        break;
5600
    case 0xcb: /* lret */
5601
        val = 0;
5602
        goto do_lret;
5603
    case 0xcf: /* iret */
5604
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5605
            break;
5606
        if (!s->pe) {
5607
            /* real mode */
5608
            tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5609
            s->cc_op = CC_OP_EFLAGS;
5610
        } else if (s->vm86) {
5611
            if (s->iopl != 3) {
5612
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5613
            } else {
5614
                tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5615
                s->cc_op = CC_OP_EFLAGS;
5616
            }
5617
        } else {
5618
            if (s->cc_op != CC_OP_DYNAMIC)
5619
                gen_op_set_cc_op(s->cc_op);
5620
            gen_jmp_im(pc_start - s->cs_base);
5621
            tcg_gen_helper_0_2(helper_iret_protected,
5622
                               tcg_const_i32(s->dflag), 
5623
                               tcg_const_i32(s->pc - s->cs_base));
5624
            s->cc_op = CC_OP_EFLAGS;
5625
        }
5626
        gen_eob(s);
5627
        break;
5628
    case 0xe8: /* call im */
5629
        {
5630
            if (dflag)
5631
                tval = (int32_t)insn_get(s, OT_LONG);
5632
            else
5633
                tval = (int16_t)insn_get(s, OT_WORD);
5634
            next_eip = s->pc - s->cs_base;
5635
            tval += next_eip;
5636
            if (s->dflag == 0)
5637
                tval &= 0xffff;
5638
            gen_movtl_T0_im(next_eip);
5639
            gen_push_T0(s);
5640
            gen_jmp(s, tval);
5641
        }
5642
        break;
5643
    case 0x9a: /* lcall im */
5644
        {
5645
            unsigned int selector, offset;
5646

    
5647
            if (CODE64(s))
5648
                goto illegal_op;
5649
            ot = dflag ? OT_LONG : OT_WORD;
5650
            offset = insn_get(s, ot);
5651
            selector = insn_get(s, OT_WORD);
5652

    
5653
            gen_op_movl_T0_im(selector);
5654
            gen_op_movl_T1_imu(offset);
5655
        }
5656
        goto do_lcall;
5657
    case 0xe9: /* jmp im */
5658
        if (dflag)
5659
            tval = (int32_t)insn_get(s, OT_LONG);
5660
        else
5661
            tval = (int16_t)insn_get(s, OT_WORD);
5662
        tval += s->pc - s->cs_base;
5663
        if (s->dflag == 0)
5664
            tval &= 0xffff;
5665
        gen_jmp(s, tval);
5666
        break;
5667
    case 0xea: /* ljmp im */
5668
        {
5669
            unsigned int selector, offset;
5670

    
5671
            if (CODE64(s))
5672
                goto illegal_op;
5673
            ot = dflag ? OT_LONG : OT_WORD;
5674
            offset = insn_get(s, ot);
5675
            selector = insn_get(s, OT_WORD);
5676

    
5677
            gen_op_movl_T0_im(selector);
5678
            gen_op_movl_T1_imu(offset);
5679
        }
5680
        goto do_ljmp;
5681
    case 0xeb: /* jmp Jb */
5682
        tval = (int8_t)insn_get(s, OT_BYTE);
5683
        tval += s->pc - s->cs_base;
5684
        if (s->dflag == 0)
5685
            tval &= 0xffff;
5686
        gen_jmp(s, tval);
5687
        break;
5688
    case 0x70 ... 0x7f: /* jcc Jb */
5689
        tval = (int8_t)insn_get(s, OT_BYTE);
5690
        goto do_jcc;
5691
    case 0x180 ... 0x18f: /* jcc Jv */
5692
        if (dflag) {
5693
            tval = (int32_t)insn_get(s, OT_LONG);
5694
        } else {
5695
            tval = (int16_t)insn_get(s, OT_WORD);
5696
        }
5697
    do_jcc:
5698
        next_eip = s->pc - s->cs_base;
5699
        tval += next_eip;
5700
        if (s->dflag == 0)
5701
            tval &= 0xffff;
5702
        gen_jcc(s, b, tval, next_eip);
5703
        break;
5704

    
5705
    case 0x190 ... 0x19f: /* setcc Gv */
5706
        modrm = ldub_code(s->pc++);
5707
        gen_setcc(s, b);
5708
        gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5709
        break;
5710
    case 0x140 ... 0x14f: /* cmov Gv, Ev */
5711
        ot = dflag + OT_WORD;
5712
        modrm = ldub_code(s->pc++);
5713
        reg = ((modrm >> 3) & 7) | rex_r;
5714
        mod = (modrm >> 6) & 3;
5715
        gen_setcc(s, b);
5716
        if (mod != 3) {
5717
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5718
            gen_op_ld_T1_A0(ot + s->mem_index);
5719
        } else {
5720
            rm = (modrm & 7) | REX_B(s);
5721
            gen_op_mov_TN_reg(ot, 1, rm);
5722
        }
5723
        gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5724
        break;
5725

    
5726
        /************************/
5727
        /* flags */
5728
    case 0x9c: /* pushf */
5729
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5730
            break;
5731
        if (s->vm86 && s->iopl != 3) {
5732
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5733
        } else {
5734
            if (s->cc_op != CC_OP_DYNAMIC)
5735
                gen_op_set_cc_op(s->cc_op);
5736
            tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
5737
            gen_push_T0(s);
5738
        }
5739
        break;
5740
    case 0x9d: /* popf */
5741
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5742
            break;
5743
        if (s->vm86 && s->iopl != 3) {
5744
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5745
        } else {
5746
            gen_pop_T0(s);
5747
            if (s->cpl == 0) {
5748
                if (s->dflag) {
5749
                    tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5750
                                       tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
5751
                } else {
5752
                    tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5753
                                       tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
5754
                }
5755
            } else {
5756
                if (s->cpl <= s->iopl) {
5757
                    if (s->dflag) {
5758
                        tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5759
                                           tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
5760
                    } else {
5761
                        tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5762
                                           tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
5763
                    }
5764
                } else {
5765
                    if (s->dflag) {
5766
                        tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5767
                                           tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
5768
                    } else {
5769
                        tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5770
                                           tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
5771
                    }
5772
                }
5773
            }
5774
            gen_pop_update(s);
5775
            s->cc_op = CC_OP_EFLAGS;
5776
            /* abort translation because TF flag may change */
5777
            gen_jmp_im(s->pc - s->cs_base);
5778
            gen_eob(s);
5779
        }
5780
        break;
5781
    case 0x9e: /* sahf */
5782
        if (CODE64(s))
5783
            goto illegal_op;
5784
        gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5785
        if (s->cc_op != CC_OP_DYNAMIC)
5786
            gen_op_set_cc_op(s->cc_op);
5787
        gen_compute_eflags(cpu_cc_src);
5788
        tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
5789
        tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
5790
        tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
5791
        s->cc_op = CC_OP_EFLAGS;
5792
        break;
5793
    case 0x9f: /* lahf */
5794
        if (CODE64(s))
5795
            goto illegal_op;
5796
        if (s->cc_op != CC_OP_DYNAMIC)
5797
            gen_op_set_cc_op(s->cc_op);
5798
        gen_compute_eflags(cpu_T[0]);
5799
        /* Note: gen_compute_eflags() only gives the condition codes */
5800
        tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
5801
        gen_op_mov_reg_T0(OT_BYTE, R_AH);
5802
        break;
5803
    case 0xf5: /* cmc */
5804
        if (s->cc_op != CC_OP_DYNAMIC)
5805
            gen_op_set_cc_op(s->cc_op);
5806
        gen_compute_eflags(cpu_cc_src);
5807
        tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
5808
        s->cc_op = CC_OP_EFLAGS;
5809
        break;
5810
    case 0xf8: /* clc */
5811
        if (s->cc_op != CC_OP_DYNAMIC)
5812
            gen_op_set_cc_op(s->cc_op);
5813
        gen_compute_eflags(cpu_cc_src);
5814
        tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
5815
        s->cc_op = CC_OP_EFLAGS;
5816
        break;
5817
    case 0xf9: /* stc */
5818
        if (s->cc_op != CC_OP_DYNAMIC)
5819
            gen_op_set_cc_op(s->cc_op);
5820
        gen_compute_eflags(cpu_cc_src);
5821
        tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
5822
        s->cc_op = CC_OP_EFLAGS;
5823
        break;
5824
    case 0xfc: /* cld */
5825
        tcg_gen_movi_i32(cpu_tmp2_i32, 1);
5826
        tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5827
        break;
5828
    case 0xfd: /* std */
5829
        tcg_gen_movi_i32(cpu_tmp2_i32, -1);
5830
        tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5831
        break;
5832

    
5833
        /************************/
5834
        /* bit operations */
5835
    case 0x1ba: /* bt/bts/btr/btc Gv, im */
5836
        ot = dflag + OT_WORD;
5837
        modrm = ldub_code(s->pc++);
5838
        op = (modrm >> 3) & 7;
5839
        mod = (modrm >> 6) & 3;
5840
        rm = (modrm & 7) | REX_B(s);
5841
        if (mod != 3) {
5842
            s->rip_offset = 1;
5843
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5844
            gen_op_ld_T0_A0(ot + s->mem_index);
5845
        } else {
5846
            gen_op_mov_TN_reg(ot, 0, rm);
5847
        }
5848
        /* load shift */
5849
        val = ldub_code(s->pc++);
5850
        gen_op_movl_T1_im(val);
5851
        if (op < 4)
5852
            goto illegal_op;
5853
        op -= 4;
5854
        goto bt_op;
5855
    case 0x1a3: /* bt Gv, Ev */
5856
        op = 0;
5857
        goto do_btx;
5858
    case 0x1ab: /* bts */
5859
        op = 1;
5860
        goto do_btx;
5861
    case 0x1b3: /* btr */
5862
        op = 2;
5863
        goto do_btx;
5864
    case 0x1bb: /* btc */
5865
        op = 3;
5866
    do_btx:
5867
        ot = dflag + OT_WORD;
5868
        modrm = ldub_code(s->pc++);
5869
        reg = ((modrm >> 3) & 7) | rex_r;
5870
        mod = (modrm >> 6) & 3;
5871
        rm = (modrm & 7) | REX_B(s);
5872
        gen_op_mov_TN_reg(OT_LONG, 1, reg);
5873
        if (mod != 3) {
5874
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5875
            /* specific case: we need to add a displacement */
5876
            gen_exts(ot, cpu_T[1]);
5877
            tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
5878
            tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
5879
            tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
5880
            gen_op_ld_T0_A0(ot + s->mem_index);
5881
        } else {
5882
            gen_op_mov_TN_reg(ot, 0, rm);
5883
        }
5884
    bt_op:
5885
        tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
5886
        switch(op) {
5887
        case 0:
5888
            tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
5889
            tcg_gen_movi_tl(cpu_cc_dst, 0);
5890
            break;
5891
        case 1:
5892
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5893
            tcg_gen_movi_tl(cpu_tmp0, 1);
5894
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5895
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5896
            break;
5897
        case 2:
5898
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5899
            tcg_gen_movi_tl(cpu_tmp0, 1);
5900
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5901
            tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
5902
            tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5903
            break;
5904
        default:
5905
        case 3:
5906
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5907
            tcg_gen_movi_tl(cpu_tmp0, 1);
5908
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5909
            tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5910
            break;
5911
        }
5912
        s->cc_op = CC_OP_SARB + ot;
5913
        if (op != 0) {
5914
            if (mod != 3)
5915
                gen_op_st_T0_A0(ot + s->mem_index);
5916
            else
5917
                gen_op_mov_reg_T0(ot, rm);
5918
            tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
5919
            tcg_gen_movi_tl(cpu_cc_dst, 0);
5920
        }
5921
        break;
5922
    case 0x1bc: /* bsf */
5923
    case 0x1bd: /* bsr */
5924
        {
5925
            int label1;
5926
            ot = dflag + OT_WORD;
5927
            modrm = ldub_code(s->pc++);
5928
            reg = ((modrm >> 3) & 7) | rex_r;
5929
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5930
            gen_extu(ot, cpu_T[0]);
5931
            label1 = gen_new_label();
5932
            tcg_gen_movi_tl(cpu_cc_dst, 0);
5933
            tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), label1);
5934
            if (b & 1) {
5935
                tcg_gen_helper_1_1(helper_bsr, cpu_T[0], cpu_T[0]);
5936
            } else {
5937
                tcg_gen_helper_1_1(helper_bsf, cpu_T[0], cpu_T[0]);
5938
            }
5939
            gen_op_mov_reg_T0(ot, reg);
5940
            tcg_gen_movi_tl(cpu_cc_dst, 1);
5941
            gen_set_label(label1);
5942
            tcg_gen_discard_tl(cpu_cc_src);
5943
            s->cc_op = CC_OP_LOGICB + ot;
5944
        }
5945
        break;
5946
        /************************/
5947
        /* bcd */
5948
    case 0x27: /* daa */
5949
        if (CODE64(s))
5950
            goto illegal_op;
5951
        if (s->cc_op != CC_OP_DYNAMIC)
5952
            gen_op_set_cc_op(s->cc_op);
5953
        tcg_gen_helper_0_0(helper_daa);
5954
        s->cc_op = CC_OP_EFLAGS;
5955
        break;
5956
    case 0x2f: /* das */
5957
        if (CODE64(s))
5958
            goto illegal_op;
5959
        if (s->cc_op != CC_OP_DYNAMIC)
5960
            gen_op_set_cc_op(s->cc_op);
5961
        tcg_gen_helper_0_0(helper_das);
5962
        s->cc_op = CC_OP_EFLAGS;
5963
        break;
5964
    case 0x37: /* aaa */
5965
        if (CODE64(s))
5966
            goto illegal_op;
5967
        if (s->cc_op != CC_OP_DYNAMIC)
5968
            gen_op_set_cc_op(s->cc_op);
5969
        tcg_gen_helper_0_0(helper_aaa);
5970
        s->cc_op = CC_OP_EFLAGS;
5971
        break;
5972
    case 0x3f: /* aas */
5973
        if (CODE64(s))
5974
            goto illegal_op;
5975
        if (s->cc_op != CC_OP_DYNAMIC)
5976
            gen_op_set_cc_op(s->cc_op);
5977
        tcg_gen_helper_0_0(helper_aas);
5978
        s->cc_op = CC_OP_EFLAGS;
5979
        break;
5980
    case 0xd4: /* aam */
5981
        if (CODE64(s))
5982
            goto illegal_op;
5983
        val = ldub_code(s->pc++);
5984
        if (val == 0) {
5985
            gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5986
        } else {
5987
            tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
5988
            s->cc_op = CC_OP_LOGICB;
5989
        }
5990
        break;
5991
    case 0xd5: /* aad */
5992
        if (CODE64(s))
5993
            goto illegal_op;
5994
        val = ldub_code(s->pc++);
5995
        tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
5996
        s->cc_op = CC_OP_LOGICB;
5997
        break;
5998
        /************************/
5999
        /* misc */
6000
    case 0x90: /* nop */
6001
        /* XXX: xchg + rex handling */
6002
        /* XXX: correct lock test for all insn */
6003
        if (prefixes & PREFIX_LOCK)
6004
            goto illegal_op;
6005
        if (prefixes & PREFIX_REPZ) {
6006
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
6007
        }
6008
        break;
6009
    case 0x9b: /* fwait */
6010
        if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
6011
            (HF_MP_MASK | HF_TS_MASK)) {
6012
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6013
        } else {
6014
            if (s->cc_op != CC_OP_DYNAMIC)
6015
                gen_op_set_cc_op(s->cc_op);
6016
            gen_jmp_im(pc_start - s->cs_base);
6017
            tcg_gen_helper_0_0(helper_fwait);
6018
        }
6019
        break;
6020
    case 0xcc: /* int3 */
6021
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
6022
            break;
6023
        gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
6024
        break;
6025
    case 0xcd: /* int N */
6026
        val = ldub_code(s->pc++);
6027
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
6028
            break;
6029
        if (s->vm86 && s->iopl != 3) {
6030
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6031
        } else {
6032
            gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
6033
        }
6034
        break;
6035
    case 0xce: /* into */
6036
        if (CODE64(s))
6037
            goto illegal_op;
6038
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
6039
            break;
6040
        if (s->cc_op != CC_OP_DYNAMIC)
6041
            gen_op_set_cc_op(s->cc_op);
6042
        gen_jmp_im(pc_start - s->cs_base);
6043
        tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
6044
        break;
6045
    case 0xf1: /* icebp (undocumented, exits to external debugger) */
6046
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
6047
            break;
6048
#if 1
6049
        gen_debug(s, pc_start - s->cs_base);
6050
#else
6051
        /* start debug */
6052
        tb_flush(cpu_single_env);
6053
        cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
6054
#endif
6055
        break;
6056
    case 0xfa: /* cli */
6057
        if (!s->vm86) {
6058
            if (s->cpl <= s->iopl) {
6059
                tcg_gen_helper_0_0(helper_cli);
6060
            } else {
6061
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6062
            }
6063
        } else {
6064
            if (s->iopl == 3) {
6065
                tcg_gen_helper_0_0(helper_cli);
6066
            } else {
6067
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6068
            }
6069
        }
6070
        break;
6071
    case 0xfb: /* sti */
6072
        if (!s->vm86) {
6073
            if (s->cpl <= s->iopl) {
6074
            gen_sti:
6075
                tcg_gen_helper_0_0(helper_sti);
6076
                /* interruptions are enabled only the first insn after sti */
6077
                /* If several instructions disable interrupts, only the
6078
                   _first_ does it */
6079
                if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
6080
                    tcg_gen_helper_0_0(helper_set_inhibit_irq);
6081
                /* give a chance to handle pending irqs */
6082
                gen_jmp_im(s->pc - s->cs_base);
6083
                gen_eob(s);
6084
            } else {
6085
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6086
            }
6087
        } else {
6088
            if (s->iopl == 3) {
6089
                goto gen_sti;
6090
            } else {
6091
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6092
            }
6093
        }
6094
        break;
6095
    case 0x62: /* bound */
6096
        if (CODE64(s))
6097
            goto illegal_op;
6098
        ot = dflag ? OT_LONG : OT_WORD;
6099
        modrm = ldub_code(s->pc++);
6100
        reg = (modrm >> 3) & 7;
6101
        mod = (modrm >> 6) & 3;
6102
        if (mod == 3)
6103
            goto illegal_op;
6104
        gen_op_mov_TN_reg(ot, 0, reg);
6105
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6106
        gen_jmp_im(pc_start - s->cs_base);
6107
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6108
        if (ot == OT_WORD)
6109
            tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
6110
        else
6111
            tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
6112
        break;
6113
    case 0x1c8 ... 0x1cf: /* bswap reg */
6114
        reg = (b & 7) | REX_B(s);
6115
#ifdef TARGET_X86_64
6116
        if (dflag == 2) {
6117
            gen_op_mov_TN_reg(OT_QUAD, 0, reg);
6118
            tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
6119
            gen_op_mov_reg_T0(OT_QUAD, reg);
6120
        } else
6121
        {
6122
            TCGv tmp0;
6123
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
6124
            
6125
            tmp0 = tcg_temp_new(TCG_TYPE_I32);
6126
            tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
6127
            tcg_gen_bswap_i32(tmp0, tmp0);
6128
            tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
6129
            gen_op_mov_reg_T0(OT_LONG, reg);
6130
        }
6131
#else
6132
        {
6133
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
6134
            tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
6135
            gen_op_mov_reg_T0(OT_LONG, reg);
6136
        }
6137
#endif
6138
        break;
6139
    case 0xd6: /* salc */
6140
        if (CODE64(s))
6141
            goto illegal_op;
6142
        if (s->cc_op != CC_OP_DYNAMIC)
6143
            gen_op_set_cc_op(s->cc_op);
6144
        gen_compute_eflags_c(cpu_T[0]);
6145
        tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
6146
        gen_op_mov_reg_T0(OT_BYTE, R_EAX);
6147
        break;
6148
    case 0xe0: /* loopnz */
6149
    case 0xe1: /* loopz */
6150
    case 0xe2: /* loop */
6151
    case 0xe3: /* jecxz */
6152
        {
6153
            int l1, l2, l3;
6154

    
6155
            tval = (int8_t)insn_get(s, OT_BYTE);
6156
            next_eip = s->pc - s->cs_base;
6157
            tval += next_eip;
6158
            if (s->dflag == 0)
6159
                tval &= 0xffff;
6160

    
6161
            l1 = gen_new_label();
6162
            l2 = gen_new_label();
6163
            l3 = gen_new_label();
6164
            b &= 3;
6165
            switch(b) {
6166
            case 0: /* loopnz */
6167
            case 1: /* loopz */
6168
                if (s->cc_op != CC_OP_DYNAMIC)
6169
                    gen_op_set_cc_op(s->cc_op);
6170
                gen_op_add_reg_im(s->aflag, R_ECX, -1);
6171
                gen_op_jz_ecx(s->aflag, l3);
6172
                gen_compute_eflags(cpu_tmp0);
6173
                tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
6174
                if (b == 0) {
6175
                    tcg_gen_brcond_tl(TCG_COND_EQ, 
6176
                                      cpu_tmp0, tcg_const_tl(0), l1);
6177
                } else {
6178
                    tcg_gen_brcond_tl(TCG_COND_NE, 
6179
                                      cpu_tmp0, tcg_const_tl(0), l1);
6180
                }
6181
                break;
6182
            case 2: /* loop */
6183
                gen_op_add_reg_im(s->aflag, R_ECX, -1);
6184
                gen_op_jnz_ecx(s->aflag, l1);
6185
                break;
6186
            default:
6187
            case 3: /* jcxz */
6188
                gen_op_jz_ecx(s->aflag, l1);
6189
                break;
6190
            }
6191

    
6192
            gen_set_label(l3);
6193
            gen_jmp_im(next_eip);
6194
            gen_op_jmp_label(l2);
6195

    
6196
            gen_set_label(l1);
6197
            gen_jmp_im(tval);
6198
            gen_set_label(l2);
6199
            gen_eob(s);
6200
        }
6201
        break;
6202
    case 0x130: /* wrmsr */
6203
    case 0x132: /* rdmsr */
6204
        if (s->cpl != 0) {
6205
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6206
        } else {
6207
            int retval = 0;
6208
            if (b & 2) {
6209
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
6210
                tcg_gen_helper_0_0(helper_rdmsr);
6211
            } else {
6212
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
6213
                tcg_gen_helper_0_0(helper_wrmsr);
6214
            }
6215
            if(retval)
6216
                gen_eob(s);
6217
        }
6218
        break;
6219
    case 0x131: /* rdtsc */
6220
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
6221
            break;
6222
        gen_jmp_im(pc_start - s->cs_base);
6223
        tcg_gen_helper_0_0(helper_rdtsc);
6224
        break;
6225
    case 0x133: /* rdpmc */
6226
        gen_jmp_im(pc_start - s->cs_base);
6227
        tcg_gen_helper_0_0(helper_rdpmc);
6228
        break;
6229
    case 0x134: /* sysenter */
6230
        if (CODE64(s))
6231
            goto illegal_op;
6232
        if (!s->pe) {
6233
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6234
        } else {
6235
            if (s->cc_op != CC_OP_DYNAMIC) {
6236
                gen_op_set_cc_op(s->cc_op);
6237
                s->cc_op = CC_OP_DYNAMIC;
6238
            }
6239
            gen_jmp_im(pc_start - s->cs_base);
6240
            tcg_gen_helper_0_0(helper_sysenter);
6241
            gen_eob(s);
6242
        }
6243
        break;
6244
    case 0x135: /* sysexit */
6245
        if (CODE64(s))
6246
            goto illegal_op;
6247
        if (!s->pe) {
6248
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6249
        } else {
6250
            if (s->cc_op != CC_OP_DYNAMIC) {
6251
                gen_op_set_cc_op(s->cc_op);
6252
                s->cc_op = CC_OP_DYNAMIC;
6253
            }
6254
            gen_jmp_im(pc_start - s->cs_base);
6255
            tcg_gen_helper_0_0(helper_sysexit);
6256
            gen_eob(s);
6257
        }
6258
        break;
6259
#ifdef TARGET_X86_64
6260
    case 0x105: /* syscall */
6261
        /* XXX: is it usable in real mode ? */
6262
        if (s->cc_op != CC_OP_DYNAMIC) {
6263
            gen_op_set_cc_op(s->cc_op);
6264
            s->cc_op = CC_OP_DYNAMIC;
6265
        }
6266
        gen_jmp_im(pc_start - s->cs_base);
6267
        tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
6268
        gen_eob(s);
6269
        break;
6270
    case 0x107: /* sysret */
6271
        if (!s->pe) {
6272
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6273
        } else {
6274
            if (s->cc_op != CC_OP_DYNAMIC) {
6275
                gen_op_set_cc_op(s->cc_op);
6276
                s->cc_op = CC_OP_DYNAMIC;
6277
            }
6278
            gen_jmp_im(pc_start - s->cs_base);
6279
            tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
6280
            /* condition codes are modified only in long mode */
6281
            if (s->lma)
6282
                s->cc_op = CC_OP_EFLAGS;
6283
            gen_eob(s);
6284
        }
6285
        break;
6286
#endif
6287
    case 0x1a2: /* cpuid */
6288
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
6289
            break;
6290
        tcg_gen_helper_0_0(helper_cpuid);
6291
        break;
6292
    case 0xf4: /* hlt */
6293
        if (s->cpl != 0) {
6294
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6295
        } else {
6296
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
6297
                break;
6298
            if (s->cc_op != CC_OP_DYNAMIC)
6299
                gen_op_set_cc_op(s->cc_op);
6300
            gen_jmp_im(s->pc - s->cs_base);
6301
            tcg_gen_helper_0_0(helper_hlt);
6302
            s->is_jmp = 3;
6303
        }
6304
        break;
6305
    case 0x100:
6306
        modrm = ldub_code(s->pc++);
6307
        mod = (modrm >> 6) & 3;
6308
        op = (modrm >> 3) & 7;
6309
        switch(op) {
6310
        case 0: /* sldt */
6311
            if (!s->pe || s->vm86)
6312
                goto illegal_op;
6313
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
6314
                break;
6315
            gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
6316
            ot = OT_WORD;
6317
            if (mod == 3)
6318
                ot += s->dflag;
6319
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6320
            break;
6321
        case 2: /* lldt */
6322
            if (!s->pe || s->vm86)
6323
                goto illegal_op;
6324
            if (s->cpl != 0) {
6325
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6326
            } else {
6327
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
6328
                    break;
6329
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6330
                gen_jmp_im(pc_start - s->cs_base);
6331
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6332
                tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
6333
            }
6334
            break;
6335
        case 1: /* str */
6336
            if (!s->pe || s->vm86)
6337
                goto illegal_op;
6338
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
6339
                break;
6340
            gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
6341
            ot = OT_WORD;
6342
            if (mod == 3)
6343
                ot += s->dflag;
6344
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6345
            break;
6346
        case 3: /* ltr */
6347
            if (!s->pe || s->vm86)
6348
                goto illegal_op;
6349
            if (s->cpl != 0) {
6350
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6351
            } else {
6352
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
6353
                    break;
6354
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6355
                gen_jmp_im(pc_start - s->cs_base);
6356
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6357
                tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
6358
            }
6359
            break;
6360
        case 4: /* verr */
6361
        case 5: /* verw */
6362
            if (!s->pe || s->vm86)
6363
                goto illegal_op;
6364
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6365
            if (s->cc_op != CC_OP_DYNAMIC)
6366
                gen_op_set_cc_op(s->cc_op);
6367
            if (op == 4)
6368
                tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
6369
            else
6370
                tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
6371
            s->cc_op = CC_OP_EFLAGS;
6372
            break;
6373
        default:
6374
            goto illegal_op;
6375
        }
6376
        break;
6377
    case 0x101:
6378
        modrm = ldub_code(s->pc++);
6379
        mod = (modrm >> 6) & 3;
6380
        op = (modrm >> 3) & 7;
6381
        rm = modrm & 7;
6382
        switch(op) {
6383
        case 0: /* sgdt */
6384
            if (mod == 3)
6385
                goto illegal_op;
6386
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
6387
                break;
6388
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6389
            gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
6390
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
6391
            gen_add_A0_im(s, 2);
6392
            gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
6393
            if (!s->dflag)
6394
                gen_op_andl_T0_im(0xffffff);
6395
            gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6396
            break;
6397
        case 1:
6398
            if (mod == 3) {
6399
                switch (rm) {
6400
                case 0: /* monitor */
6401
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6402
                        s->cpl != 0)
6403
                        goto illegal_op;
6404
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
6405
                        break;
6406
                    gen_jmp_im(pc_start - s->cs_base);
6407
#ifdef TARGET_X86_64
6408
                    if (s->aflag == 2) {
6409
                        gen_op_movq_A0_reg(R_EAX);
6410
                    } else
6411
#endif
6412
                    {
6413
                        gen_op_movl_A0_reg(R_EAX);
6414
                        if (s->aflag == 0)
6415
                            gen_op_andl_A0_ffff();
6416
                    }
6417
                    gen_add_A0_ds_seg(s);
6418
                    tcg_gen_helper_0_1(helper_monitor, cpu_A0);
6419
                    break;
6420
                case 1: /* mwait */
6421
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6422
                        s->cpl != 0)
6423
                        goto illegal_op;
6424
                    if (s->cc_op != CC_OP_DYNAMIC) {
6425
                        gen_op_set_cc_op(s->cc_op);
6426
                        s->cc_op = CC_OP_DYNAMIC;
6427
                    }
6428
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
6429
                        break;
6430
                    gen_jmp_im(s->pc - s->cs_base);
6431
                    tcg_gen_helper_0_0(helper_mwait);
6432
                    gen_eob(s);
6433
                    break;
6434
                default:
6435
                    goto illegal_op;
6436
                }
6437
            } else { /* sidt */
6438
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
6439
                    break;
6440
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6441
                gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
6442
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
6443
                gen_add_A0_im(s, 2);
6444
                gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
6445
                if (!s->dflag)
6446
                    gen_op_andl_T0_im(0xffffff);
6447
                gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6448
            }
6449
            break;
6450
        case 2: /* lgdt */
6451
        case 3: /* lidt */
6452
            if (mod == 3) {
6453
                switch(rm) {
6454
                case 0: /* VMRUN */
6455
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
6456
                        break;
6457
                    if (s->cc_op != CC_OP_DYNAMIC)
6458
                        gen_op_set_cc_op(s->cc_op);
6459
                    gen_jmp_im(s->pc - s->cs_base);
6460
                    tcg_gen_helper_0_0(helper_vmrun);
6461
                    s->cc_op = CC_OP_EFLAGS;
6462
                    gen_eob(s);
6463
                    break;
6464
                case 1: /* VMMCALL */
6465
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
6466
                         break;
6467
                    /* FIXME: cause #UD if hflags & SVM */
6468
                    tcg_gen_helper_0_0(helper_vmmcall);
6469
                    break;
6470
                case 2: /* VMLOAD */
6471
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
6472
                         break;
6473
                    tcg_gen_helper_0_0(helper_vmload);
6474
                    break;
6475
                case 3: /* VMSAVE */
6476
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
6477
                         break;
6478
                    tcg_gen_helper_0_0(helper_vmsave);
6479
                    break;
6480
                case 4: /* STGI */
6481
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
6482
                         break;
6483
                    tcg_gen_helper_0_0(helper_stgi);
6484
                    break;
6485
                case 5: /* CLGI */
6486
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
6487
                         break;
6488
                    tcg_gen_helper_0_0(helper_clgi);
6489
                    break;
6490
                case 6: /* SKINIT */
6491
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
6492
                         break;
6493
                    tcg_gen_helper_0_0(helper_skinit);
6494
                    break;
6495
                case 7: /* INVLPGA */
6496
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
6497
                         break;
6498
                    tcg_gen_helper_0_0(helper_invlpga);
6499
                    break;
6500
                default:
6501
                    goto illegal_op;
6502
                }
6503
            } else if (s->cpl != 0) {
6504
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6505
            } else {
6506
                if (gen_svm_check_intercept(s, pc_start,
6507
                                            op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
6508
                    break;
6509
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6510
                gen_op_ld_T1_A0(OT_WORD + s->mem_index);
6511
                gen_add_A0_im(s, 2);
6512
                gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6513
                if (!s->dflag)
6514
                    gen_op_andl_T0_im(0xffffff);
6515
                if (op == 2) {
6516
                    gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
6517
                    gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
6518
                } else {
6519
                    gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
6520
                    gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
6521
                }
6522
            }
6523
            break;
6524
        case 4: /* smsw */
6525
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
6526
                break;
6527
            gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
6528
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6529
            break;
6530
        case 6: /* lmsw */
6531
            if (s->cpl != 0) {
6532
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6533
            } else {
6534
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
6535
                    break;
6536
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6537
                tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
6538
                gen_jmp_im(s->pc - s->cs_base);
6539
                gen_eob(s);
6540
            }
6541
            break;
6542
        case 7: /* invlpg */
6543
            if (s->cpl != 0) {
6544
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6545
            } else {
6546
                if (mod == 3) {
6547
#ifdef TARGET_X86_64
6548
                    if (CODE64(s) && rm == 0) {
6549
                        /* swapgs */
6550
                        gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
6551
                        gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
6552
                        gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
6553
                        gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
6554
                    } else
6555
#endif
6556
                    {
6557
                        goto illegal_op;
6558
                    }
6559
                } else {
6560
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
6561
                        break;
6562
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6563
                    tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
6564
                    gen_jmp_im(s->pc - s->cs_base);
6565
                    gen_eob(s);
6566
                }
6567
            }
6568
            break;
6569
        default:
6570
            goto illegal_op;
6571
        }
6572
        break;
6573
    case 0x108: /* invd */
6574
    case 0x109: /* wbinvd */
6575
        if (s->cpl != 0) {
6576
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6577
        } else {
6578
            if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6579
                break;
6580
            /* nothing to do */
6581
        }
6582
        break;
6583
    case 0x63: /* arpl or movslS (x86_64) */
6584
#ifdef TARGET_X86_64
6585
        if (CODE64(s)) {
6586
            int d_ot;
6587
            /* d_ot is the size of destination */
6588
            d_ot = dflag + OT_WORD;
6589

    
6590
            modrm = ldub_code(s->pc++);
6591
            reg = ((modrm >> 3) & 7) | rex_r;
6592
            mod = (modrm >> 6) & 3;
6593
            rm = (modrm & 7) | REX_B(s);
6594

    
6595
            if (mod == 3) {
6596
                gen_op_mov_TN_reg(OT_LONG, 0, rm);
6597
                /* sign extend */
6598
                if (d_ot == OT_QUAD)
6599
                    tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
6600
                gen_op_mov_reg_T0(d_ot, reg);
6601
            } else {
6602
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6603
                if (d_ot == OT_QUAD) {
6604
                    gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6605
                } else {
6606
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6607
                }
6608
                gen_op_mov_reg_T0(d_ot, reg);
6609
            }
6610
        } else
6611
#endif
6612
        {
6613
            int label1;
6614
            if (!s->pe || s->vm86)
6615
                goto illegal_op;
6616
            ot = OT_WORD;
6617
            modrm = ldub_code(s->pc++);
6618
            reg = (modrm >> 3) & 7;
6619
            mod = (modrm >> 6) & 3;
6620
            rm = modrm & 7;
6621
            if (mod != 3) {
6622
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6623
                gen_op_ld_T0_A0(ot + s->mem_index);
6624
            } else {
6625
                gen_op_mov_TN_reg(ot, 0, rm);
6626
            }
6627
            gen_op_mov_TN_reg(ot, 1, reg);
6628
            tcg_gen_andi_tl(cpu_tmp0, cpu_T[0], 3);
6629
            tcg_gen_andi_tl(cpu_T[1], cpu_T[1], 3);
6630
            tcg_gen_movi_tl(cpu_T3, 0);
6631
            label1 = gen_new_label();
6632
            tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, cpu_T[1], label1);
6633
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], ~3);
6634
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
6635
            tcg_gen_movi_tl(cpu_T3, CC_Z);
6636
            gen_set_label(label1);
6637
            if (mod != 3) {
6638
                gen_op_st_T0_A0(ot + s->mem_index);
6639
            } else {
6640
                gen_op_mov_reg_T0(ot, rm);
6641
            }
6642
            if (s->cc_op != CC_OP_DYNAMIC)
6643
                gen_op_set_cc_op(s->cc_op);
6644
            gen_compute_eflags(cpu_cc_src);
6645
            tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
6646
            tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T3);
6647
            s->cc_op = CC_OP_EFLAGS;
6648
        }
6649
        break;
6650
    case 0x102: /* lar */
6651
    case 0x103: /* lsl */
6652
        {
6653
            int label1;
6654
            if (!s->pe || s->vm86)
6655
                goto illegal_op;
6656
            ot = dflag ? OT_LONG : OT_WORD;
6657
            modrm = ldub_code(s->pc++);
6658
            reg = ((modrm >> 3) & 7) | rex_r;
6659
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6660
            if (s->cc_op != CC_OP_DYNAMIC)
6661
                gen_op_set_cc_op(s->cc_op);
6662
            if (b == 0x102)
6663
                tcg_gen_helper_1_1(helper_lar, cpu_T[0], cpu_T[0]);
6664
            else
6665
                tcg_gen_helper_1_1(helper_lsl, cpu_T[0], cpu_T[0]);
6666
            tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
6667
            label1 = gen_new_label();
6668
            tcg_gen_brcond_tl(TCG_COND_EQ, cpu_tmp0, tcg_const_tl(0), label1);
6669
            gen_op_mov_reg_T0(ot, reg);
6670
            gen_set_label(label1);
6671
            s->cc_op = CC_OP_EFLAGS;
6672
        }
6673
        break;
6674
    case 0x118:
6675
        modrm = ldub_code(s->pc++);
6676
        mod = (modrm >> 6) & 3;
6677
        op = (modrm >> 3) & 7;
6678
        switch(op) {
6679
        case 0: /* prefetchnta */
6680
        case 1: /* prefetchnt0 */
6681
        case 2: /* prefetchnt0 */
6682
        case 3: /* prefetchnt0 */
6683
            if (mod == 3)
6684
                goto illegal_op;
6685
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6686
            /* nothing more to do */
6687
            break;
6688
        default: /* nop (multi byte) */
6689
            gen_nop_modrm(s, modrm);
6690
            break;
6691
        }
6692
        break;
6693
    case 0x119 ... 0x11f: /* nop (multi byte) */
6694
        modrm = ldub_code(s->pc++);
6695
        gen_nop_modrm(s, modrm);
6696
        break;
6697
    case 0x120: /* mov reg, crN */
6698
    case 0x122: /* mov crN, reg */
6699
        if (s->cpl != 0) {
6700
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6701
        } else {
6702
            modrm = ldub_code(s->pc++);
6703
            if ((modrm & 0xc0) != 0xc0)
6704
                goto illegal_op;
6705
            rm = (modrm & 7) | REX_B(s);
6706
            reg = ((modrm >> 3) & 7) | rex_r;
6707
            if (CODE64(s))
6708
                ot = OT_QUAD;
6709
            else
6710
                ot = OT_LONG;
6711
            switch(reg) {
6712
            case 0:
6713
            case 2:
6714
            case 3:
6715
            case 4:
6716
            case 8:
6717
                if (b & 2) {
6718
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6719
                    gen_op_mov_TN_reg(ot, 0, rm);
6720
                    tcg_gen_helper_0_2(helper_movl_crN_T0, 
6721
                                       tcg_const_i32(reg), cpu_T[0]);
6722
                    gen_jmp_im(s->pc - s->cs_base);
6723
                    gen_eob(s);
6724
                } else {
6725
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6726
#if !defined(CONFIG_USER_ONLY)
6727
                    if (reg == 8)
6728
                        tcg_gen_helper_1_0(helper_movtl_T0_cr8, cpu_T[0]);
6729
                    else
6730
#endif
6731
                        gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6732
                    gen_op_mov_reg_T0(ot, rm);
6733
                }
6734
                break;
6735
            default:
6736
                goto illegal_op;
6737
            }
6738
        }
6739
        break;
6740
    case 0x121: /* mov reg, drN */
6741
    case 0x123: /* mov drN, reg */
6742
        if (s->cpl != 0) {
6743
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6744
        } else {
6745
            modrm = ldub_code(s->pc++);
6746
            if ((modrm & 0xc0) != 0xc0)
6747
                goto illegal_op;
6748
            rm = (modrm & 7) | REX_B(s);
6749
            reg = ((modrm >> 3) & 7) | rex_r;
6750
            if (CODE64(s))
6751
                ot = OT_QUAD;
6752
            else
6753
                ot = OT_LONG;
6754
            /* XXX: do it dynamically with CR4.DE bit */
6755
            if (reg == 4 || reg == 5 || reg >= 8)
6756
                goto illegal_op;
6757
            if (b & 2) {
6758
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6759
                gen_op_mov_TN_reg(ot, 0, rm);
6760
                tcg_gen_helper_0_2(helper_movl_drN_T0,
6761
                                   tcg_const_i32(reg), cpu_T[0]);
6762
                gen_jmp_im(s->pc - s->cs_base);
6763
                gen_eob(s);
6764
            } else {
6765
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6766
                gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6767
                gen_op_mov_reg_T0(ot, rm);
6768
            }
6769
        }
6770
        break;
6771
    case 0x106: /* clts */
6772
        if (s->cpl != 0) {
6773
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6774
        } else {
6775
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6776
            tcg_gen_helper_0_0(helper_clts);
6777
            /* abort block because static cpu state changed */
6778
            gen_jmp_im(s->pc - s->cs_base);
6779
            gen_eob(s);
6780
        }
6781
        break;
6782
    /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6783
    case 0x1c3: /* MOVNTI reg, mem */
6784
        if (!(s->cpuid_features & CPUID_SSE2))
6785
            goto illegal_op;
6786
        ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6787
        modrm = ldub_code(s->pc++);
6788
        mod = (modrm >> 6) & 3;
6789
        if (mod == 3)
6790
            goto illegal_op;
6791
        reg = ((modrm >> 3) & 7) | rex_r;
6792
        /* generate a generic store */
6793
        gen_ldst_modrm(s, modrm, ot, reg, 1);
6794
        break;
6795
    case 0x1ae:
6796
        modrm = ldub_code(s->pc++);
6797
        mod = (modrm >> 6) & 3;
6798
        op = (modrm >> 3) & 7;
6799
        switch(op) {
6800
        case 0: /* fxsave */
6801
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6802
                (s->flags & HF_EM_MASK))
6803
                goto illegal_op;
6804
            if (s->flags & HF_TS_MASK) {
6805
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6806
                break;
6807
            }
6808
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6809
            if (s->cc_op != CC_OP_DYNAMIC)
6810
                gen_op_set_cc_op(s->cc_op);
6811
            gen_jmp_im(pc_start - s->cs_base);
6812
            tcg_gen_helper_0_2(helper_fxsave, 
6813
                               cpu_A0, tcg_const_i32((s->dflag == 2)));
6814
            break;
6815
        case 1: /* fxrstor */
6816
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6817
                (s->flags & HF_EM_MASK))
6818
                goto illegal_op;
6819
            if (s->flags & HF_TS_MASK) {
6820
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6821
                break;
6822
            }
6823
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6824
            if (s->cc_op != CC_OP_DYNAMIC)
6825
                gen_op_set_cc_op(s->cc_op);
6826
            gen_jmp_im(pc_start - s->cs_base);
6827
            tcg_gen_helper_0_2(helper_fxrstor,
6828
                               cpu_A0, tcg_const_i32((s->dflag == 2)));
6829
            break;
6830
        case 2: /* ldmxcsr */
6831
        case 3: /* stmxcsr */
6832
            if (s->flags & HF_TS_MASK) {
6833
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6834
                break;
6835
            }
6836
            if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6837
                mod == 3)
6838
                goto illegal_op;
6839
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6840
            if (op == 2) {
6841
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6842
                gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6843
            } else {
6844
                gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6845
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
6846
            }
6847
            break;
6848
        case 5: /* lfence */
6849
        case 6: /* mfence */
6850
            if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6851
                goto illegal_op;
6852
            break;
6853
        case 7: /* sfence / clflush */
6854
            if ((modrm & 0xc7) == 0xc0) {
6855
                /* sfence */
6856
                /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6857
                if (!(s->cpuid_features & CPUID_SSE))
6858
                    goto illegal_op;
6859
            } else {
6860
                /* clflush */
6861
                if (!(s->cpuid_features & CPUID_CLFLUSH))
6862
                    goto illegal_op;
6863
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6864
            }
6865
            break;
6866
        default:
6867
            goto illegal_op;
6868
        }
6869
        break;
6870
    case 0x10d: /* 3DNow! prefetch(w) */
6871
        modrm = ldub_code(s->pc++);
6872
        mod = (modrm >> 6) & 3;
6873
        if (mod == 3)
6874
            goto illegal_op;
6875
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6876
        /* ignore for now */
6877
        break;
6878
    case 0x1aa: /* rsm */
6879
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6880
            break;
6881
        if (!(s->flags & HF_SMM_MASK))
6882
            goto illegal_op;
6883
        if (s->cc_op != CC_OP_DYNAMIC) {
6884
            gen_op_set_cc_op(s->cc_op);
6885
            s->cc_op = CC_OP_DYNAMIC;
6886
        }
6887
        gen_jmp_im(s->pc - s->cs_base);
6888
        tcg_gen_helper_0_0(helper_rsm);
6889
        gen_eob(s);
6890
        break;
6891
    case 0x10e ... 0x10f:
6892
        /* 3DNow! instructions, ignore prefixes */
6893
        s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
6894
    case 0x110 ... 0x117:
6895
    case 0x128 ... 0x12f:
6896
    case 0x150 ... 0x177:
6897
    case 0x17c ... 0x17f:
6898
    case 0x1c2:
6899
    case 0x1c4 ... 0x1c6:
6900
    case 0x1d0 ... 0x1fe:
6901
        gen_sse(s, b, pc_start, rex_r);
6902
        break;
6903
    default:
6904
        goto illegal_op;
6905
    }
6906
    /* lock generation */
6907
    if (s->prefix & PREFIX_LOCK)
6908
        tcg_gen_helper_0_0(helper_unlock);
6909
    return s->pc;
6910
 illegal_op:
6911
    if (s->prefix & PREFIX_LOCK)
6912
        tcg_gen_helper_0_0(helper_unlock);
6913
    /* XXX: ensure that no lock was generated */
6914
    gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6915
    return s->pc;
6916
}
6917

    
6918
static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
6919
{
6920
    switch(macro_id) {
6921
#ifdef MACRO_TEST
6922
    case MACRO_TEST:
6923
        tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
6924
        break;
6925
#endif
6926
    }
6927
}
6928

    
6929
void optimize_flags_init(void)
6930
{
6931
#if TCG_TARGET_REG_BITS == 32
6932
    assert(sizeof(CCTable) == (1 << 3));
6933
#else
6934
    assert(sizeof(CCTable) == (1 << 4));
6935
#endif
6936
    tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
6937

    
6938
    cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
6939
#if TARGET_LONG_BITS > HOST_LONG_BITS
6940
    cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL, 
6941
                                  TCG_AREG0, offsetof(CPUState, t0), "T0");
6942
    cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
6943
                                  TCG_AREG0, offsetof(CPUState, t1), "T1");
6944
    cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
6945
                                TCG_AREG0, offsetof(CPUState, t2), "A0");
6946
#else
6947
    cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
6948
    cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
6949
    cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
6950
#endif
6951
    cpu_T3 = tcg_global_mem_new(TCG_TYPE_TL,
6952
                                TCG_AREG0, offsetof(CPUState, t3), "T3");
6953
#if defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS)
6954
    /* XXX: must be suppressed once there are less fixed registers */
6955
    cpu_tmp1_i64 = tcg_global_reg2_new_hack(TCG_TYPE_I64, TCG_AREG1, TCG_AREG2, "tmp1");
6956
#endif
6957
    cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
6958
                                   TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
6959
    cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
6960
                                    TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
6961
    cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
6962
                                    TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
6963
}
6964

    
6965
/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6966
   basic block 'tb'. If search_pc is TRUE, also generate PC
6967
   information for each intermediate instruction. */
6968
static inline int gen_intermediate_code_internal(CPUState *env,
6969
                                                 TranslationBlock *tb,
6970
                                                 int search_pc)
6971
{
6972
    DisasContext dc1, *dc = &dc1;
6973
    target_ulong pc_ptr;
6974
    uint16_t *gen_opc_end;
6975
    int j, lj, cflags;
6976
    uint64_t flags;
6977
    target_ulong pc_start;
6978
    target_ulong cs_base;
6979

    
6980
    /* generate intermediate code */
6981
    pc_start = tb->pc;
6982
    cs_base = tb->cs_base;
6983
    flags = tb->flags;
6984
    cflags = tb->cflags;
6985

    
6986
    dc->pe = (flags >> HF_PE_SHIFT) & 1;
6987
    dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6988
    dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6989
    dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6990
    dc->f_st = 0;
6991
    dc->vm86 = (flags >> VM_SHIFT) & 1;
6992
    dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6993
    dc->iopl = (flags >> IOPL_SHIFT) & 3;
6994
    dc->tf = (flags >> TF_SHIFT) & 1;
6995
    dc->singlestep_enabled = env->singlestep_enabled;
6996
    dc->cc_op = CC_OP_DYNAMIC;
6997
    dc->cs_base = cs_base;
6998
    dc->tb = tb;
6999
    dc->popl_esp_hack = 0;
7000
    /* select memory access functions */
7001
    dc->mem_index = 0;
7002
    if (flags & HF_SOFTMMU_MASK) {
7003
        if (dc->cpl == 3)
7004
            dc->mem_index = 2 * 4;
7005
        else
7006
            dc->mem_index = 1 * 4;
7007
    }
7008
    dc->cpuid_features = env->cpuid_features;
7009
    dc->cpuid_ext_features = env->cpuid_ext_features;
7010
    dc->cpuid_ext2_features = env->cpuid_ext2_features;
7011
#ifdef TARGET_X86_64
7012
    dc->lma = (flags >> HF_LMA_SHIFT) & 1;
7013
    dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
7014
#endif
7015
    dc->flags = flags;
7016
    dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
7017
                    (flags & HF_INHIBIT_IRQ_MASK)
7018
#ifndef CONFIG_SOFTMMU
7019
                    || (flags & HF_SOFTMMU_MASK)
7020
#endif
7021
                    );
7022
#if 0
7023
    /* check addseg logic */
7024
    if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
7025
        printf("ERROR addseg\n");
7026
#endif
7027

    
7028
    cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
7029
#if !(defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS))
7030
    cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
7031
#endif
7032
    cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
7033
    cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
7034
    cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
7035
    cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
7036
    cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
7037
    cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
7038
    cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
7039

    
7040
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
7041

    
7042
    dc->is_jmp = DISAS_NEXT;
7043
    pc_ptr = pc_start;
7044
    lj = -1;
7045

    
7046
    for(;;) {
7047
        if (env->nb_breakpoints > 0) {
7048
            for(j = 0; j < env->nb_breakpoints; j++) {
7049
                if (env->breakpoints[j] == pc_ptr) {
7050
                    gen_debug(dc, pc_ptr - dc->cs_base);
7051
                    break;
7052
                }
7053
            }
7054
        }
7055
        if (search_pc) {
7056
            j = gen_opc_ptr - gen_opc_buf;
7057
            if (lj < j) {
7058
                lj++;
7059
                while (lj < j)
7060
                    gen_opc_instr_start[lj++] = 0;
7061
            }
7062
            gen_opc_pc[lj] = pc_ptr;
7063
            gen_opc_cc_op[lj] = dc->cc_op;
7064
            gen_opc_instr_start[lj] = 1;
7065
        }
7066
        pc_ptr = disas_insn(dc, pc_ptr);
7067
        /* stop translation if indicated */
7068
        if (dc->is_jmp)
7069
            break;
7070
        /* if single step mode, we generate only one instruction and
7071
           generate an exception */
7072
        /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
7073
           the flag and abort the translation to give the irqs a
7074
           change to be happen */
7075
        if (dc->tf || dc->singlestep_enabled ||
7076
            (flags & HF_INHIBIT_IRQ_MASK) ||
7077
            (cflags & CF_SINGLE_INSN)) {
7078
            gen_jmp_im(pc_ptr - dc->cs_base);
7079
            gen_eob(dc);
7080
            break;
7081
        }
7082
        /* if too long translation, stop generation too */
7083
        if (gen_opc_ptr >= gen_opc_end ||
7084
            (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
7085
            gen_jmp_im(pc_ptr - dc->cs_base);
7086
            gen_eob(dc);
7087
            break;
7088
        }
7089
    }
7090
    *gen_opc_ptr = INDEX_op_end;
7091
    /* we don't forget to fill the last values */
7092
    if (search_pc) {
7093
        j = gen_opc_ptr - gen_opc_buf;
7094
        lj++;
7095
        while (lj <= j)
7096
            gen_opc_instr_start[lj++] = 0;
7097
    }
7098

    
7099
#ifdef DEBUG_DISAS
7100
    if (loglevel & CPU_LOG_TB_CPU) {
7101
        cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
7102
    }
7103
    if (loglevel & CPU_LOG_TB_IN_ASM) {
7104
        int disas_flags;
7105
        fprintf(logfile, "----------------\n");
7106
        fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
7107
#ifdef TARGET_X86_64
7108
        if (dc->code64)
7109
            disas_flags = 2;
7110
        else
7111
#endif
7112
            disas_flags = !dc->code32;
7113
        target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
7114
        fprintf(logfile, "\n");
7115
        if (loglevel & CPU_LOG_TB_OP_OPT) {
7116
            fprintf(logfile, "OP before opt:\n");
7117
            tcg_dump_ops(&tcg_ctx, logfile);
7118
            fprintf(logfile, "\n");
7119
        }
7120
    }
7121
#endif
7122

    
7123
    if (!search_pc)
7124
        tb->size = pc_ptr - pc_start;
7125
    return 0;
7126
}
7127

    
7128
int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
7129
{
7130
    return gen_intermediate_code_internal(env, tb, 0);
7131
}
7132

    
7133
int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
7134
{
7135
    return gen_intermediate_code_internal(env, tb, 1);
7136
}
7137

    
7138
void gen_pc_load(CPUState *env, TranslationBlock *tb,
7139
                unsigned long searched_pc, int pc_pos, void *puc)
7140
{
7141
    int cc_op;
7142
#ifdef DEBUG_DISAS
7143
    if (loglevel & CPU_LOG_TB_OP) {
7144
        int i;
7145
        fprintf(logfile, "RESTORE:\n");
7146
        for(i = 0;i <= pc_pos; i++) {
7147
            if (gen_opc_instr_start[i]) {
7148
                fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7149
            }
7150
        }
7151
        fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7152
                searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7153
                (uint32_t)tb->cs_base);
7154
    }
7155
#endif
7156
    env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7157
    cc_op = gen_opc_cc_op[pc_pos];
7158
    if (cc_op != CC_OP_DYNAMIC)
7159
        env->cc_op = cc_op;
7160
}