Statistics
| Branch: | Revision:

root / target-i386 / translate.c @ 6191b059

History | View | Annotate | Download (220.1 kB)

1
/*
2
 *  i386 translation
3
 *
4
 *  Copyright (c) 2003 Fabrice Bellard
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20
#include <stdarg.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23
#include <string.h>
24
#include <inttypes.h>
25
#include <signal.h>
26
#include <assert.h>
27

    
28
#include "cpu.h"
29
#include "exec-all.h"
30
#include "disas.h"
31
#include "helper.h"
32
#include "tcg-op.h"
33

    
34
#define PREFIX_REPZ   0x01
35
#define PREFIX_REPNZ  0x02
36
#define PREFIX_LOCK   0x04
37
#define PREFIX_DATA   0x08
38
#define PREFIX_ADR    0x10
39

    
40
#ifdef TARGET_X86_64
41
#define X86_64_ONLY(x) x
42
#define X86_64_DEF(x...) x
43
#define CODE64(s) ((s)->code64)
44
#define REX_X(s) ((s)->rex_x)
45
#define REX_B(s) ((s)->rex_b)
46
/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
47
#if 1
48
#define BUGGY_64(x) NULL
49
#endif
50
#else
51
#define X86_64_ONLY(x) NULL
52
#define X86_64_DEF(x...)
53
#define CODE64(s) 0
54
#define REX_X(s) 0
55
#define REX_B(s) 0
56
#endif
57

    
58
//#define MACRO_TEST   1
59

    
60
/* global register indexes */
61
static TCGv cpu_env, cpu_T[2], cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst;
62
static TCGv cpu_T3;
63
/* local register indexes (only used inside old micro ops) */
64
static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
65
static TCGv cpu_tmp5, cpu_tmp6;
66

    
67
#ifdef TARGET_X86_64
68
static int x86_64_hregs;
69
#endif
70

    
71
typedef struct DisasContext {
72
    /* current insn context */
73
    int override; /* -1 if no override */
74
    int prefix;
75
    int aflag, dflag;
76
    target_ulong pc; /* pc = eip + cs_base */
77
    int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
78
                   static state change (stop translation) */
79
    /* current block context */
80
    target_ulong cs_base; /* base of CS segment */
81
    int pe;     /* protected mode */
82
    int code32; /* 32 bit code segment */
83
#ifdef TARGET_X86_64
84
    int lma;    /* long mode active */
85
    int code64; /* 64 bit code segment */
86
    int rex_x, rex_b;
87
#endif
88
    int ss32;   /* 32 bit stack segment */
89
    int cc_op;  /* current CC operation */
90
    int addseg; /* non zero if either DS/ES/SS have a non zero base */
91
    int f_st;   /* currently unused */
92
    int vm86;   /* vm86 mode */
93
    int cpl;
94
    int iopl;
95
    int tf;     /* TF cpu flag */
96
    int singlestep_enabled; /* "hardware" single step enabled */
97
    int jmp_opt; /* use direct block chaining for direct jumps */
98
    int mem_index; /* select memory access functions */
99
    uint64_t flags; /* all execution flags */
100
    struct TranslationBlock *tb;
101
    int popl_esp_hack; /* for correct popl with esp base handling */
102
    int rip_offset; /* only used in x86_64, but left for simplicity */
103
    int cpuid_features;
104
    int cpuid_ext_features;
105
    int cpuid_ext2_features;
106
} DisasContext;
107

    
108
static void gen_eob(DisasContext *s);
109
static void gen_jmp(DisasContext *s, target_ulong eip);
110
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
111

    
112
/* i386 arith/logic operations */
113
enum {
114
    OP_ADDL,
115
    OP_ORL,
116
    OP_ADCL,
117
    OP_SBBL,
118
    OP_ANDL,
119
    OP_SUBL,
120
    OP_XORL,
121
    OP_CMPL,
122
};
123

    
124
/* i386 shift ops */
125
enum {
126
    OP_ROL,
127
    OP_ROR,
128
    OP_RCL,
129
    OP_RCR,
130
    OP_SHL,
131
    OP_SHR,
132
    OP_SHL1, /* undocumented */
133
    OP_SAR = 7,
134
};
135

    
136
/* operand size */
137
enum {
138
    OT_BYTE = 0,
139
    OT_WORD,
140
    OT_LONG,
141
    OT_QUAD,
142
};
143

    
144
enum {
145
    /* I386 int registers */
146
    OR_EAX,   /* MUST be even numbered */
147
    OR_ECX,
148
    OR_EDX,
149
    OR_EBX,
150
    OR_ESP,
151
    OR_EBP,
152
    OR_ESI,
153
    OR_EDI,
154

    
155
    OR_TMP0 = 16,    /* temporary operand register */
156
    OR_TMP1,
157
    OR_A0, /* temporary register used when doing address evaluation */
158
};
159

    
160
static inline void gen_op_movl_T0_0(void)
161
{
162
    tcg_gen_movi_tl(cpu_T[0], 0);
163
}
164

    
165
static inline void gen_op_movl_T0_im(int32_t val)
166
{
167
    tcg_gen_movi_tl(cpu_T[0], val);
168
}
169

    
170
static inline void gen_op_movl_T0_imu(uint32_t val)
171
{
172
    tcg_gen_movi_tl(cpu_T[0], val);
173
}
174

    
175
static inline void gen_op_movl_T1_im(int32_t val)
176
{
177
    tcg_gen_movi_tl(cpu_T[1], val);
178
}
179

    
180
static inline void gen_op_movl_T1_imu(uint32_t val)
181
{
182
    tcg_gen_movi_tl(cpu_T[1], val);
183
}
184

    
185
static inline void gen_op_movl_A0_im(uint32_t val)
186
{
187
    tcg_gen_movi_tl(cpu_A0, val);
188
}
189

    
190
#ifdef TARGET_X86_64
191
static inline void gen_op_movq_A0_im(int64_t val)
192
{
193
    tcg_gen_movi_tl(cpu_A0, val);
194
}
195
#endif
196

    
197
static inline void gen_movtl_T0_im(target_ulong val)
198
{
199
    tcg_gen_movi_tl(cpu_T[0], val);
200
}
201

    
202
static inline void gen_movtl_T1_im(target_ulong val)
203
{
204
    tcg_gen_movi_tl(cpu_T[1], val);
205
}
206

    
207
static inline void gen_op_andl_T0_ffff(void)
208
{
209
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
210
}
211

    
212
static inline void gen_op_andl_T0_im(uint32_t val)
213
{
214
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
215
}
216

    
217
static inline void gen_op_movl_T0_T1(void)
218
{
219
    tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
220
}
221

    
222
static inline void gen_op_andl_A0_ffff(void)
223
{
224
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
225
}
226

    
227
#ifdef TARGET_X86_64
228

    
229
#define NB_OP_SIZES 4
230

    
231
#define DEF_REGS(prefix, suffix) \
232
  prefix ## EAX ## suffix,\
233
  prefix ## ECX ## suffix,\
234
  prefix ## EDX ## suffix,\
235
  prefix ## EBX ## suffix,\
236
  prefix ## ESP ## suffix,\
237
  prefix ## EBP ## suffix,\
238
  prefix ## ESI ## suffix,\
239
  prefix ## EDI ## suffix,\
240
  prefix ## R8 ## suffix,\
241
  prefix ## R9 ## suffix,\
242
  prefix ## R10 ## suffix,\
243
  prefix ## R11 ## suffix,\
244
  prefix ## R12 ## suffix,\
245
  prefix ## R13 ## suffix,\
246
  prefix ## R14 ## suffix,\
247
  prefix ## R15 ## suffix,
248

    
249
#else /* !TARGET_X86_64 */
250

    
251
#define NB_OP_SIZES 3
252

    
253
#define DEF_REGS(prefix, suffix) \
254
  prefix ## EAX ## suffix,\
255
  prefix ## ECX ## suffix,\
256
  prefix ## EDX ## suffix,\
257
  prefix ## EBX ## suffix,\
258
  prefix ## ESP ## suffix,\
259
  prefix ## EBP ## suffix,\
260
  prefix ## ESI ## suffix,\
261
  prefix ## EDI ## suffix,
262

    
263
#endif /* !TARGET_X86_64 */
264

    
265
#if defined(WORDS_BIGENDIAN)
266
#define REG_B_OFFSET (sizeof(target_ulong) - 1)
267
#define REG_H_OFFSET (sizeof(target_ulong) - 2)
268
#define REG_W_OFFSET (sizeof(target_ulong) - 2)
269
#define REG_L_OFFSET (sizeof(target_ulong) - 4)
270
#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
271
#else
272
#define REG_B_OFFSET 0
273
#define REG_H_OFFSET 1
274
#define REG_W_OFFSET 0
275
#define REG_L_OFFSET 0
276
#define REG_LH_OFFSET 4
277
#endif
278

    
279
static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
280
{
281
    switch(ot) {
282
    case OT_BYTE:
283
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
284
            tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
285
        } else {
286
            tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
287
        }
288
        break;
289
    case OT_WORD:
290
        tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
291
        break;
292
#ifdef TARGET_X86_64
293
    case OT_LONG:
294
        tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
295
        /* high part of register set to zero */
296
        tcg_gen_movi_tl(cpu_tmp0, 0);
297
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
298
        break;
299
    default:
300
    case OT_QUAD:
301
        tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
302
        break;
303
#else
304
    default:
305
    case OT_LONG:
306
        tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
307
        break;
308
#endif
309
    }
310
}
311

    
312
static inline void gen_op_mov_reg_T0(int ot, int reg)
313
{
314
    gen_op_mov_reg_TN(ot, 0, reg);
315
}
316

    
317
static inline void gen_op_mov_reg_T1(int ot, int reg)
318
{
319
    gen_op_mov_reg_TN(ot, 1, reg);
320
}
321

    
322
static inline void gen_op_mov_reg_A0(int size, int reg)
323
{
324
    switch(size) {
325
    case 0:
326
        tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
327
        break;
328
#ifdef TARGET_X86_64
329
    case 1:
330
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
331
        /* high part of register set to zero */
332
        tcg_gen_movi_tl(cpu_tmp0, 0);
333
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
334
        break;
335
    default:
336
    case 2:
337
        tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
338
        break;
339
#else
340
    default:
341
    case 1:
342
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
343
        break;
344
#endif
345
    }
346
}
347

    
348
static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
349
{
350
    switch(ot) {
351
    case OT_BYTE:
352
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
353
            goto std_case;
354
        } else {
355
            tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
356
        }
357
        break;
358
    default:
359
    std_case:
360
        tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
361
        break;
362
    }
363
}
364

    
365
static inline void gen_op_movl_A0_reg(int reg)
366
{
367
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
368
}
369

    
370
static inline void gen_op_addl_A0_im(int32_t val)
371
{
372
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
373
#ifdef TARGET_X86_64
374
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
375
#endif
376
}
377

    
378
#ifdef TARGET_X86_64
379
static inline void gen_op_addq_A0_im(int64_t val)
380
{
381
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
382
}
383
#endif
384
    
385
static void gen_add_A0_im(DisasContext *s, int val)
386
{
387
#ifdef TARGET_X86_64
388
    if (CODE64(s))
389
        gen_op_addq_A0_im(val);
390
    else
391
#endif
392
        gen_op_addl_A0_im(val);
393
}
394

    
395
static inline void gen_op_addl_T0_T1(void)
396
{
397
    tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
398
}
399

    
400
static inline void gen_op_jmp_T0(void)
401
{
402
    tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
403
}
404

    
405
static inline void gen_op_addw_ESP_im(int32_t val)
406
{
407
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
408
    tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
409
    tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]) + REG_W_OFFSET);
410
}
411

    
412
static inline void gen_op_addl_ESP_im(int32_t val)
413
{
414
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
415
    tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
416
#ifdef TARGET_X86_64
417
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
418
#endif
419
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
420
}
421

    
422
#ifdef TARGET_X86_64
423
static inline void gen_op_addq_ESP_im(int32_t val)
424
{
425
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
426
    tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
427
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
428
}
429
#endif
430

    
431
static inline void gen_op_set_cc_op(int32_t val)
432
{
433
    tcg_gen_movi_i32(cpu_cc_op, val);
434
}
435

    
436
static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
437
{
438
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
439
    if (shift != 0) 
440
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
441
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
442
#ifdef TARGET_X86_64
443
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
444
#endif
445
}
446

    
447
static inline void gen_op_movl_A0_seg(int reg)
448
{
449
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
450
}
451

    
452
static inline void gen_op_addl_A0_seg(int reg)
453
{
454
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
455
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
456
#ifdef TARGET_X86_64
457
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
458
#endif
459
}
460

    
461
#ifdef TARGET_X86_64
462
static inline void gen_op_movq_A0_seg(int reg)
463
{
464
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
465
}
466

    
467
static inline void gen_op_addq_A0_seg(int reg)
468
{
469
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
470
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
471
}
472

    
473
static inline void gen_op_movq_A0_reg(int reg)
474
{
475
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
476
}
477

    
478
static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
479
{
480
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
481
    if (shift != 0) 
482
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
483
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
484
}
485
#endif
486

    
487
static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
488
    [0] = {
489
        DEF_REGS(gen_op_cmovw_, _T1_T0)
490
    },
491
    [1] = {
492
        DEF_REGS(gen_op_cmovl_, _T1_T0)
493
    },
494
#ifdef TARGET_X86_64
495
    [2] = {
496
        DEF_REGS(gen_op_cmovq_, _T1_T0)
497
    },
498
#endif
499
};
500

    
501
static inline void gen_op_lds_T0_A0(int idx)
502
{
503
    int mem_index = (idx >> 2) - 1;
504
    switch(idx & 3) {
505
    case 0:
506
        tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
507
        break;
508
    case 1:
509
        tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
510
        break;
511
    default:
512
    case 2:
513
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
514
        break;
515
    }
516
}
517

    
518
/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
519
static inline void gen_op_ld_T0_A0(int idx)
520
{
521
    int mem_index = (idx >> 2) - 1;
522
    switch(idx & 3) {
523
    case 0:
524
        tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
525
        break;
526
    case 1:
527
        tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
528
        break;
529
    case 2:
530
        tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
531
        break;
532
    default:
533
    case 3:
534
        tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
535
        break;
536
    }
537
}
538

    
539
static inline void gen_op_ldu_T0_A0(int idx)
540
{
541
    gen_op_ld_T0_A0(idx);
542
}
543

    
544
static inline void gen_op_ld_T1_A0(int idx)
545
{
546
    int mem_index = (idx >> 2) - 1;
547
    switch(idx & 3) {
548
    case 0:
549
        tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
550
        break;
551
    case 1:
552
        tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
553
        break;
554
    case 2:
555
        tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
556
        break;
557
    default:
558
    case 3:
559
        tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
560
        break;
561
    }
562
}
563

    
564
static inline void gen_op_st_T0_A0(int idx)
565
{
566
    int mem_index = (idx >> 2) - 1;
567
    switch(idx & 3) {
568
    case 0:
569
        tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
570
        break;
571
    case 1:
572
        tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
573
        break;
574
    case 2:
575
        tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
576
        break;
577
    default:
578
    case 3:
579
        tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
580
        break;
581
    }
582
}
583

    
584
static inline void gen_op_st_T1_A0(int idx)
585
{
586
    int mem_index = (idx >> 2) - 1;
587
    switch(idx & 3) {
588
    case 0:
589
        tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
590
        break;
591
    case 1:
592
        tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
593
        break;
594
    case 2:
595
        tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
596
        break;
597
    default:
598
    case 3:
599
        tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
600
        break;
601
    }
602
}
603

    
604
static inline void gen_jmp_im(target_ulong pc)
605
{
606
    tcg_gen_movi_tl(cpu_tmp0, pc);
607
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
608
}
609

    
610
static inline void gen_string_movl_A0_ESI(DisasContext *s)
611
{
612
    int override;
613

    
614
    override = s->override;
615
#ifdef TARGET_X86_64
616
    if (s->aflag == 2) {
617
        if (override >= 0) {
618
            gen_op_movq_A0_seg(override);
619
            gen_op_addq_A0_reg_sN(0, R_ESI);
620
        } else {
621
            gen_op_movq_A0_reg(R_ESI);
622
        }
623
    } else
624
#endif
625
    if (s->aflag) {
626
        /* 32 bit address */
627
        if (s->addseg && override < 0)
628
            override = R_DS;
629
        if (override >= 0) {
630
            gen_op_movl_A0_seg(override);
631
            gen_op_addl_A0_reg_sN(0, R_ESI);
632
        } else {
633
            gen_op_movl_A0_reg(R_ESI);
634
        }
635
    } else {
636
        /* 16 address, always override */
637
        if (override < 0)
638
            override = R_DS;
639
        gen_op_movl_A0_reg(R_ESI);
640
        gen_op_andl_A0_ffff();
641
        gen_op_addl_A0_seg(override);
642
    }
643
}
644

    
645
static inline void gen_string_movl_A0_EDI(DisasContext *s)
646
{
647
#ifdef TARGET_X86_64
648
    if (s->aflag == 2) {
649
        gen_op_movq_A0_reg(R_EDI);
650
    } else
651
#endif
652
    if (s->aflag) {
653
        if (s->addseg) {
654
            gen_op_movl_A0_seg(R_ES);
655
            gen_op_addl_A0_reg_sN(0, R_EDI);
656
        } else {
657
            gen_op_movl_A0_reg(R_EDI);
658
        }
659
    } else {
660
        gen_op_movl_A0_reg(R_EDI);
661
        gen_op_andl_A0_ffff();
662
        gen_op_addl_A0_seg(R_ES);
663
    }
664
}
665

    
666
static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
667
    gen_op_movl_T0_Dshiftb,
668
    gen_op_movl_T0_Dshiftw,
669
    gen_op_movl_T0_Dshiftl,
670
    X86_64_ONLY(gen_op_movl_T0_Dshiftq),
671
};
672

    
673
static GenOpFunc1 *gen_op_jnz_ecx[3] = {
674
    gen_op_jnz_ecxw,
675
    gen_op_jnz_ecxl,
676
    X86_64_ONLY(gen_op_jnz_ecxq),
677
};
678

    
679
static GenOpFunc1 *gen_op_jz_ecx[3] = {
680
    gen_op_jz_ecxw,
681
    gen_op_jz_ecxl,
682
    X86_64_ONLY(gen_op_jz_ecxq),
683
};
684

    
685
static GenOpFunc *gen_op_dec_ECX[3] = {
686
    gen_op_decw_ECX,
687
    gen_op_decl_ECX,
688
    X86_64_ONLY(gen_op_decq_ECX),
689
};
690

    
691
static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
692
    {
693
        gen_op_jnz_subb,
694
        gen_op_jnz_subw,
695
        gen_op_jnz_subl,
696
        X86_64_ONLY(gen_op_jnz_subq),
697
    },
698
    {
699
        gen_op_jz_subb,
700
        gen_op_jz_subw,
701
        gen_op_jz_subl,
702
        X86_64_ONLY(gen_op_jz_subq),
703
    },
704
};
705

    
706
static void *helper_in_func[3] = {
707
    helper_inb,
708
    helper_inw,
709
    helper_inl,
710
};
711

    
712
static void *helper_out_func[3] = {
713
    helper_outb,
714
    helper_outw,
715
    helper_outl,
716
};
717

    
718
static void *gen_check_io_func[3] = {
719
    helper_check_iob,
720
    helper_check_iow,
721
    helper_check_iol,
722
};
723

    
724
static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
725
                         uint32_t svm_flags)
726
{
727
    int state_saved;
728
    target_ulong next_eip;
729

    
730
    state_saved = 0;
731
    if (s->pe && (s->cpl > s->iopl || s->vm86)) {
732
        if (s->cc_op != CC_OP_DYNAMIC)
733
            gen_op_set_cc_op(s->cc_op);
734
        gen_jmp_im(cur_eip);
735
        state_saved = 1;
736
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
737
        tcg_gen_helper_0_1(gen_check_io_func[ot],
738
                           cpu_tmp2_i32);
739
    }
740
    if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
741
        if (!state_saved) {
742
            if (s->cc_op != CC_OP_DYNAMIC)
743
                gen_op_set_cc_op(s->cc_op);
744
            gen_jmp_im(cur_eip);
745
            state_saved = 1;
746
        }
747
        svm_flags |= (1 << (4 + ot));
748
        next_eip = s->pc - s->cs_base;
749
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
750
        tcg_gen_helper_0_3(helper_svm_check_io,
751
                           cpu_tmp2_i32,
752
                           tcg_const_i32(svm_flags),
753
                           tcg_const_i32(next_eip - cur_eip));
754
    }
755
}
756

    
757
static inline void gen_movs(DisasContext *s, int ot)
758
{
759
    gen_string_movl_A0_ESI(s);
760
    gen_op_ld_T0_A0(ot + s->mem_index);
761
    gen_string_movl_A0_EDI(s);
762
    gen_op_st_T0_A0(ot + s->mem_index);
763
    gen_op_movl_T0_Dshift[ot]();
764
#ifdef TARGET_X86_64
765
    if (s->aflag == 2) {
766
        gen_op_addq_ESI_T0();
767
        gen_op_addq_EDI_T0();
768
    } else
769
#endif
770
    if (s->aflag) {
771
        gen_op_addl_ESI_T0();
772
        gen_op_addl_EDI_T0();
773
    } else {
774
        gen_op_addw_ESI_T0();
775
        gen_op_addw_EDI_T0();
776
    }
777
}
778

    
779
static inline void gen_update_cc_op(DisasContext *s)
780
{
781
    if (s->cc_op != CC_OP_DYNAMIC) {
782
        gen_op_set_cc_op(s->cc_op);
783
        s->cc_op = CC_OP_DYNAMIC;
784
    }
785
}
786

    
787
static void gen_op_update1_cc(void)
788
{
789
    tcg_gen_discard_tl(cpu_cc_src);
790
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
791
}
792

    
793
static void gen_op_update2_cc(void)
794
{
795
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
796
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
797
}
798

    
799
static inline void gen_op_cmpl_T0_T1_cc(void)
800
{
801
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
802
    tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
803
}
804

    
805
static inline void gen_op_testl_T0_T1_cc(void)
806
{
807
    tcg_gen_discard_tl(cpu_cc_src);
808
    tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
809
}
810

    
811
static void gen_op_update_neg_cc(void)
812
{
813
    tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
814
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
815
}
816

    
817
/* XXX: does not work with gdbstub "ice" single step - not a
818
   serious problem */
819
static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
820
{
821
    int l1, l2;
822

    
823
    l1 = gen_new_label();
824
    l2 = gen_new_label();
825
    gen_op_jnz_ecx[s->aflag](l1);
826
    gen_set_label(l2);
827
    gen_jmp_tb(s, next_eip, 1);
828
    gen_set_label(l1);
829
    return l2;
830
}
831

    
832
static inline void gen_stos(DisasContext *s, int ot)
833
{
834
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
835
    gen_string_movl_A0_EDI(s);
836
    gen_op_st_T0_A0(ot + s->mem_index);
837
    gen_op_movl_T0_Dshift[ot]();
838
#ifdef TARGET_X86_64
839
    if (s->aflag == 2) {
840
        gen_op_addq_EDI_T0();
841
    } else
842
#endif
843
    if (s->aflag) {
844
        gen_op_addl_EDI_T0();
845
    } else {
846
        gen_op_addw_EDI_T0();
847
    }
848
}
849

    
850
static inline void gen_lods(DisasContext *s, int ot)
851
{
852
    gen_string_movl_A0_ESI(s);
853
    gen_op_ld_T0_A0(ot + s->mem_index);
854
    gen_op_mov_reg_T0(ot, R_EAX);
855
    gen_op_movl_T0_Dshift[ot]();
856
#ifdef TARGET_X86_64
857
    if (s->aflag == 2) {
858
        gen_op_addq_ESI_T0();
859
    } else
860
#endif
861
    if (s->aflag) {
862
        gen_op_addl_ESI_T0();
863
    } else {
864
        gen_op_addw_ESI_T0();
865
    }
866
}
867

    
868
static inline void gen_scas(DisasContext *s, int ot)
869
{
870
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
871
    gen_string_movl_A0_EDI(s);
872
    gen_op_ld_T1_A0(ot + s->mem_index);
873
    gen_op_cmpl_T0_T1_cc();
874
    gen_op_movl_T0_Dshift[ot]();
875
#ifdef TARGET_X86_64
876
    if (s->aflag == 2) {
877
        gen_op_addq_EDI_T0();
878
    } else
879
#endif
880
    if (s->aflag) {
881
        gen_op_addl_EDI_T0();
882
    } else {
883
        gen_op_addw_EDI_T0();
884
    }
885
}
886

    
887
static inline void gen_cmps(DisasContext *s, int ot)
888
{
889
    gen_string_movl_A0_ESI(s);
890
    gen_op_ld_T0_A0(ot + s->mem_index);
891
    gen_string_movl_A0_EDI(s);
892
    gen_op_ld_T1_A0(ot + s->mem_index);
893
    gen_op_cmpl_T0_T1_cc();
894
    gen_op_movl_T0_Dshift[ot]();
895
#ifdef TARGET_X86_64
896
    if (s->aflag == 2) {
897
        gen_op_addq_ESI_T0();
898
        gen_op_addq_EDI_T0();
899
    } else
900
#endif
901
    if (s->aflag) {
902
        gen_op_addl_ESI_T0();
903
        gen_op_addl_EDI_T0();
904
    } else {
905
        gen_op_addw_ESI_T0();
906
        gen_op_addw_EDI_T0();
907
    }
908
}
909

    
910
static inline void gen_ins(DisasContext *s, int ot)
911
{
912
    gen_string_movl_A0_EDI(s);
913
    gen_op_movl_T0_0();
914
    gen_op_st_T0_A0(ot + s->mem_index);
915
    gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
916
    tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
917
    tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
918
    tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
919
    gen_op_st_T0_A0(ot + s->mem_index);
920
    gen_op_movl_T0_Dshift[ot]();
921
#ifdef TARGET_X86_64
922
    if (s->aflag == 2) {
923
        gen_op_addq_EDI_T0();
924
    } else
925
#endif
926
    if (s->aflag) {
927
        gen_op_addl_EDI_T0();
928
    } else {
929
        gen_op_addw_EDI_T0();
930
    }
931
}
932

    
933
static inline void gen_outs(DisasContext *s, int ot)
934
{
935
    gen_string_movl_A0_ESI(s);
936
    gen_op_ld_T0_A0(ot + s->mem_index);
937

    
938
    gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
939
    tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
940
    tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
941
    tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
942
    tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
943

    
944
    gen_op_movl_T0_Dshift[ot]();
945
#ifdef TARGET_X86_64
946
    if (s->aflag == 2) {
947
        gen_op_addq_ESI_T0();
948
    } else
949
#endif
950
    if (s->aflag) {
951
        gen_op_addl_ESI_T0();
952
    } else {
953
        gen_op_addw_ESI_T0();
954
    }
955
}
956

    
957
/* same method as Valgrind : we generate jumps to current or next
958
   instruction */
959
#define GEN_REPZ(op)                                                          \
960
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
961
                                 target_ulong cur_eip, target_ulong next_eip) \
962
{                                                                             \
963
    int l2;\
964
    gen_update_cc_op(s);                                                      \
965
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
966
    gen_ ## op(s, ot);                                                        \
967
    gen_op_dec_ECX[s->aflag]();                                               \
968
    /* a loop would cause two single step exceptions if ECX = 1               \
969
       before rep string_insn */                                              \
970
    if (!s->jmp_opt)                                                          \
971
        gen_op_jz_ecx[s->aflag](l2);                                          \
972
    gen_jmp(s, cur_eip);                                                      \
973
}
974

    
975
#define GEN_REPZ2(op)                                                         \
976
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
977
                                   target_ulong cur_eip,                      \
978
                                   target_ulong next_eip,                     \
979
                                   int nz)                                    \
980
{                                                                             \
981
    int l2;\
982
    gen_update_cc_op(s);                                                      \
983
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
984
    gen_ ## op(s, ot);                                                        \
985
    gen_op_dec_ECX[s->aflag]();                                               \
986
    gen_op_set_cc_op(CC_OP_SUBB + ot);                                        \
987
    gen_op_string_jnz_sub[nz][ot](l2);\
988
    if (!s->jmp_opt)                                                          \
989
        gen_op_jz_ecx[s->aflag](l2);                                          \
990
    gen_jmp(s, cur_eip);                                                      \
991
}
992

    
993
GEN_REPZ(movs)
994
GEN_REPZ(stos)
995
GEN_REPZ(lods)
996
GEN_REPZ(ins)
997
GEN_REPZ(outs)
998
GEN_REPZ2(scas)
999
GEN_REPZ2(cmps)
1000

    
1001
enum {
1002
    JCC_O,
1003
    JCC_B,
1004
    JCC_Z,
1005
    JCC_BE,
1006
    JCC_S,
1007
    JCC_P,
1008
    JCC_L,
1009
    JCC_LE,
1010
};
1011

    
1012
static GenOpFunc1 *gen_jcc_sub[4][8] = {
1013
    [OT_BYTE] = {
1014
        NULL,
1015
        gen_op_jb_subb,
1016
        gen_op_jz_subb,
1017
        gen_op_jbe_subb,
1018
        gen_op_js_subb,
1019
        NULL,
1020
        gen_op_jl_subb,
1021
        gen_op_jle_subb,
1022
    },
1023
    [OT_WORD] = {
1024
        NULL,
1025
        gen_op_jb_subw,
1026
        gen_op_jz_subw,
1027
        gen_op_jbe_subw,
1028
        gen_op_js_subw,
1029
        NULL,
1030
        gen_op_jl_subw,
1031
        gen_op_jle_subw,
1032
    },
1033
    [OT_LONG] = {
1034
        NULL,
1035
        gen_op_jb_subl,
1036
        gen_op_jz_subl,
1037
        gen_op_jbe_subl,
1038
        gen_op_js_subl,
1039
        NULL,
1040
        gen_op_jl_subl,
1041
        gen_op_jle_subl,
1042
    },
1043
#ifdef TARGET_X86_64
1044
    [OT_QUAD] = {
1045
        NULL,
1046
        BUGGY_64(gen_op_jb_subq),
1047
        gen_op_jz_subq,
1048
        BUGGY_64(gen_op_jbe_subq),
1049
        gen_op_js_subq,
1050
        NULL,
1051
        BUGGY_64(gen_op_jl_subq),
1052
        BUGGY_64(gen_op_jle_subq),
1053
    },
1054
#endif
1055
};
1056
static GenOpFunc1 *gen_op_loop[3][4] = {
1057
    [0] = {
1058
        gen_op_loopnzw,
1059
        gen_op_loopzw,
1060
        gen_op_jnz_ecxw,
1061
    },
1062
    [1] = {
1063
        gen_op_loopnzl,
1064
        gen_op_loopzl,
1065
        gen_op_jnz_ecxl,
1066
    },
1067
#ifdef TARGET_X86_64
1068
    [2] = {
1069
        gen_op_loopnzq,
1070
        gen_op_loopzq,
1071
        gen_op_jnz_ecxq,
1072
    },
1073
#endif
1074
};
1075

    
1076
static GenOpFunc *gen_setcc_slow[8] = {
1077
    gen_op_seto_T0_cc,
1078
    gen_op_setb_T0_cc,
1079
    gen_op_setz_T0_cc,
1080
    gen_op_setbe_T0_cc,
1081
    gen_op_sets_T0_cc,
1082
    gen_op_setp_T0_cc,
1083
    gen_op_setl_T0_cc,
1084
    gen_op_setle_T0_cc,
1085
};
1086

    
1087
static GenOpFunc *gen_setcc_sub[4][8] = {
1088
    [OT_BYTE] = {
1089
        NULL,
1090
        gen_op_setb_T0_subb,
1091
        gen_op_setz_T0_subb,
1092
        gen_op_setbe_T0_subb,
1093
        gen_op_sets_T0_subb,
1094
        NULL,
1095
        gen_op_setl_T0_subb,
1096
        gen_op_setle_T0_subb,
1097
    },
1098
    [OT_WORD] = {
1099
        NULL,
1100
        gen_op_setb_T0_subw,
1101
        gen_op_setz_T0_subw,
1102
        gen_op_setbe_T0_subw,
1103
        gen_op_sets_T0_subw,
1104
        NULL,
1105
        gen_op_setl_T0_subw,
1106
        gen_op_setle_T0_subw,
1107
    },
1108
    [OT_LONG] = {
1109
        NULL,
1110
        gen_op_setb_T0_subl,
1111
        gen_op_setz_T0_subl,
1112
        gen_op_setbe_T0_subl,
1113
        gen_op_sets_T0_subl,
1114
        NULL,
1115
        gen_op_setl_T0_subl,
1116
        gen_op_setle_T0_subl,
1117
    },
1118
#ifdef TARGET_X86_64
1119
    [OT_QUAD] = {
1120
        NULL,
1121
        gen_op_setb_T0_subq,
1122
        gen_op_setz_T0_subq,
1123
        gen_op_setbe_T0_subq,
1124
        gen_op_sets_T0_subq,
1125
        NULL,
1126
        gen_op_setl_T0_subq,
1127
        gen_op_setle_T0_subq,
1128
    },
1129
#endif
1130
};
1131

    
1132
static void *helper_fp_arith_ST0_FT0[8] = {
1133
    helper_fadd_ST0_FT0,
1134
    helper_fmul_ST0_FT0,
1135
    helper_fcom_ST0_FT0,
1136
    helper_fcom_ST0_FT0,
1137
    helper_fsub_ST0_FT0,
1138
    helper_fsubr_ST0_FT0,
1139
    helper_fdiv_ST0_FT0,
1140
    helper_fdivr_ST0_FT0,
1141
};
1142

    
1143
/* NOTE the exception in "r" op ordering */
1144
static void *helper_fp_arith_STN_ST0[8] = {
1145
    helper_fadd_STN_ST0,
1146
    helper_fmul_STN_ST0,
1147
    NULL,
1148
    NULL,
1149
    helper_fsubr_STN_ST0,
1150
    helper_fsub_STN_ST0,
1151
    helper_fdivr_STN_ST0,
1152
    helper_fdiv_STN_ST0,
1153
};
1154

    
1155
/* compute eflags.C to reg */
1156
static void gen_compute_eflags_c(TCGv reg)
1157
{
1158
#if TCG_TARGET_REG_BITS == 32
1159
    tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1160
    tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 
1161
                     (long)cc_table + offsetof(CCTable, compute_c));
1162
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1163
    tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE, 
1164
                 1, &cpu_tmp2_i32, 0, NULL);
1165
#else
1166
    tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1167
    tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1168
    tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64, 
1169
                     (long)cc_table + offsetof(CCTable, compute_c));
1170
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1171
    tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE, 
1172
                 1, &cpu_tmp2_i32, 0, NULL);
1173
#endif
1174
    tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1175
}
1176

    
1177
/* compute all eflags to cc_src */
1178
static void gen_compute_eflags(TCGv reg)
1179
{
1180
#if TCG_TARGET_REG_BITS == 32
1181
    tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1182
    tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 
1183
                     (long)cc_table + offsetof(CCTable, compute_all));
1184
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1185
    tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE, 
1186
                 1, &cpu_tmp2_i32, 0, NULL);
1187
#else
1188
    tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1189
    tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1190
    tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64, 
1191
                     (long)cc_table + offsetof(CCTable, compute_all));
1192
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1193
    tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE, 
1194
                 1, &cpu_tmp2_i32, 0, NULL);
1195
#endif
1196
    tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1197
}
1198

    
1199
/* if d == OR_TMP0, it means memory operand (address in A0) */
1200
static void gen_op(DisasContext *s1, int op, int ot, int d)
1201
{
1202
    if (d != OR_TMP0) {
1203
        gen_op_mov_TN_reg(ot, 0, d);
1204
    } else {
1205
        gen_op_ld_T0_A0(ot + s1->mem_index);
1206
    }
1207
    switch(op) {
1208
    case OP_ADCL:
1209
        if (s1->cc_op != CC_OP_DYNAMIC)
1210
            gen_op_set_cc_op(s1->cc_op);
1211
        gen_compute_eflags_c(cpu_tmp4);
1212
        tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1213
        tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1214
        if (d != OR_TMP0)
1215
            gen_op_mov_reg_T0(ot, d);
1216
        else
1217
            gen_op_st_T0_A0(ot + s1->mem_index);
1218
        tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1219
        tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1220
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1221
        tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1222
        tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1223
        s1->cc_op = CC_OP_DYNAMIC;
1224
        break;
1225
    case OP_SBBL:
1226
        if (s1->cc_op != CC_OP_DYNAMIC)
1227
            gen_op_set_cc_op(s1->cc_op);
1228
        gen_compute_eflags_c(cpu_tmp4);
1229
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1230
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1231
        if (d != OR_TMP0)
1232
            gen_op_mov_reg_T0(ot, d);
1233
        else
1234
            gen_op_st_T0_A0(ot + s1->mem_index);
1235
        tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1236
        tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1237
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1238
        tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1239
        tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1240
        s1->cc_op = CC_OP_DYNAMIC;
1241
        break;
1242
    case OP_ADDL:
1243
        gen_op_addl_T0_T1();
1244
        if (d != OR_TMP0)
1245
            gen_op_mov_reg_T0(ot, d);
1246
        else
1247
            gen_op_st_T0_A0(ot + s1->mem_index);
1248
        gen_op_update2_cc();
1249
        s1->cc_op = CC_OP_ADDB + ot;
1250
        break;
1251
    case OP_SUBL:
1252
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1253
        if (d != OR_TMP0)
1254
            gen_op_mov_reg_T0(ot, d);
1255
        else
1256
            gen_op_st_T0_A0(ot + s1->mem_index);
1257
        gen_op_update2_cc();
1258
        s1->cc_op = CC_OP_SUBB + ot;
1259
        break;
1260
    default:
1261
    case OP_ANDL:
1262
        tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1263
        if (d != OR_TMP0)
1264
            gen_op_mov_reg_T0(ot, d);
1265
        else
1266
            gen_op_st_T0_A0(ot + s1->mem_index);
1267
        gen_op_update1_cc();
1268
        s1->cc_op = CC_OP_LOGICB + ot;
1269
        break;
1270
    case OP_ORL:
1271
        tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1272
        if (d != OR_TMP0)
1273
            gen_op_mov_reg_T0(ot, d);
1274
        else
1275
            gen_op_st_T0_A0(ot + s1->mem_index);
1276
        gen_op_update1_cc();
1277
        s1->cc_op = CC_OP_LOGICB + ot;
1278
        break;
1279
    case OP_XORL:
1280
        tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1281
        if (d != OR_TMP0)
1282
            gen_op_mov_reg_T0(ot, d);
1283
        else
1284
            gen_op_st_T0_A0(ot + s1->mem_index);
1285
        gen_op_update1_cc();
1286
        s1->cc_op = CC_OP_LOGICB + ot;
1287
        break;
1288
    case OP_CMPL:
1289
        gen_op_cmpl_T0_T1_cc();
1290
        s1->cc_op = CC_OP_SUBB + ot;
1291
        break;
1292
    }
1293
}
1294

    
1295
/* if d == OR_TMP0, it means memory operand (address in A0) */
1296
static void gen_inc(DisasContext *s1, int ot, int d, int c)
1297
{
1298
    if (d != OR_TMP0)
1299
        gen_op_mov_TN_reg(ot, 0, d);
1300
    else
1301
        gen_op_ld_T0_A0(ot + s1->mem_index);
1302
    if (s1->cc_op != CC_OP_DYNAMIC)
1303
        gen_op_set_cc_op(s1->cc_op);
1304
    if (c > 0) {
1305
        tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1306
        s1->cc_op = CC_OP_INCB + ot;
1307
    } else {
1308
        tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1309
        s1->cc_op = CC_OP_DECB + ot;
1310
    }
1311
    if (d != OR_TMP0)
1312
        gen_op_mov_reg_T0(ot, d);
1313
    else
1314
        gen_op_st_T0_A0(ot + s1->mem_index);
1315
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1316
    gen_compute_eflags_c(cpu_cc_src);
1317
}
1318

    
1319
static void gen_extu(int ot, TCGv reg)
1320
{
1321
    switch(ot) {
1322
    case OT_BYTE:
1323
        tcg_gen_ext8u_tl(reg, reg);
1324
        break;
1325
    case OT_WORD:
1326
        tcg_gen_ext16u_tl(reg, reg);
1327
        break;
1328
    case OT_LONG:
1329
        tcg_gen_ext32u_tl(reg, reg);
1330
        break;
1331
    default:
1332
        break;
1333
    }
1334
}
1335

    
1336
static void gen_exts(int ot, TCGv reg)
1337
{
1338
    switch(ot) {
1339
    case OT_BYTE:
1340
        tcg_gen_ext8s_tl(reg, reg);
1341
        break;
1342
    case OT_WORD:
1343
        tcg_gen_ext16s_tl(reg, reg);
1344
        break;
1345
    case OT_LONG:
1346
        tcg_gen_ext32s_tl(reg, reg);
1347
        break;
1348
    default:
1349
        break;
1350
    }
1351
}
1352

    
1353
/* XXX: add faster immediate case */
1354
static void gen_shift_rm_T1(DisasContext *s, int ot, int op1, 
1355
                            int is_right, int is_arith)
1356
{
1357
    target_ulong mask;
1358
    int shift_label;
1359
    
1360
    if (ot == OT_QUAD)
1361
        mask = 0x3f;
1362
    else
1363
        mask = 0x1f;
1364

    
1365
    /* load */
1366
    if (op1 == OR_TMP0)
1367
        gen_op_ld_T0_A0(ot + s->mem_index);
1368
    else
1369
        gen_op_mov_TN_reg(ot, 0, op1);
1370

    
1371
    tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1372

    
1373
    tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1374

    
1375
    if (is_right) {
1376
        if (is_arith) {
1377
            gen_exts(ot, cpu_T[0]);
1378
            tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1379
            tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1380
        } else {
1381
            gen_extu(ot, cpu_T[0]);
1382
            tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1383
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1384
        }
1385
    } else {
1386
        tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1387
        tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1388
    }
1389

    
1390
    /* store */
1391
    if (op1 == OR_TMP0)
1392
        gen_op_st_T0_A0(ot + s->mem_index);
1393
    else
1394
        gen_op_mov_reg_T0(ot, op1);
1395
        
1396
    /* update eflags if non zero shift */
1397
    if (s->cc_op != CC_OP_DYNAMIC)
1398
        gen_op_set_cc_op(s->cc_op);
1399

    
1400
    shift_label = gen_new_label();
1401
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), shift_label);
1402

    
1403
    tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1404
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1405
    if (is_right)
1406
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1407
    else
1408
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1409
        
1410
    gen_set_label(shift_label);
1411
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1412
}
1413

    
1414
static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1415
{
1416
    if (arg2 >= 0)
1417
        tcg_gen_shli_tl(ret, arg1, arg2);
1418
    else
1419
        tcg_gen_shri_tl(ret, arg1, -arg2);
1420
}
1421

    
1422
/* XXX: add faster immediate case */
1423
static void gen_rot_rm_T1(DisasContext *s, int ot, int op1, 
1424
                          int is_right)
1425
{
1426
    target_ulong mask;
1427
    int label1, label2, data_bits;
1428
    
1429
    if (ot == OT_QUAD)
1430
        mask = 0x3f;
1431
    else
1432
        mask = 0x1f;
1433

    
1434
    /* load */
1435
    if (op1 == OR_TMP0)
1436
        gen_op_ld_T0_A0(ot + s->mem_index);
1437
    else
1438
        gen_op_mov_TN_reg(ot, 0, op1);
1439

    
1440
    tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1441

    
1442
    /* Must test zero case to avoid using undefined behaviour in TCG
1443
       shifts. */
1444
    label1 = gen_new_label();
1445
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label1);
1446
    
1447
    if (ot <= OT_WORD)
1448
        tcg_gen_andi_tl(cpu_tmp0, cpu_T[1], (1 << (3 + ot)) - 1);
1449
    else
1450
        tcg_gen_mov_tl(cpu_tmp0, cpu_T[1]);
1451
    
1452
    gen_extu(ot, cpu_T[0]);
1453
    tcg_gen_mov_tl(cpu_T3, cpu_T[0]);
1454

    
1455
    data_bits = 8 << ot;
1456
    /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1457
       fix TCG definition) */
1458
    if (is_right) {
1459
        tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1460
        tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1461
        tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1462
    } else {
1463
        tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1464
        tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1465
        tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1466
    }
1467
    tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1468

    
1469
    gen_set_label(label1);
1470
    /* store */
1471
    if (op1 == OR_TMP0)
1472
        gen_op_st_T0_A0(ot + s->mem_index);
1473
    else
1474
        gen_op_mov_reg_T0(ot, op1);
1475
    
1476
    /* update eflags */
1477
    if (s->cc_op != CC_OP_DYNAMIC)
1478
        gen_op_set_cc_op(s->cc_op);
1479

    
1480
    label2 = gen_new_label();
1481
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label2);
1482

    
1483
    gen_compute_eflags(cpu_cc_src);
1484
    tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1485
    tcg_gen_xor_tl(cpu_tmp0, cpu_T3, cpu_T[0]);
1486
    tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1487
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1488
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1489
    if (is_right) {
1490
        tcg_gen_shri_tl(cpu_T[0], cpu_T[0], data_bits - 1);
1491
    }
1492
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_C);
1493
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
1494
    
1495
    tcg_gen_discard_tl(cpu_cc_dst);
1496
    tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1497
        
1498
    gen_set_label(label2);
1499
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1500
}
1501

    
1502
static void *helper_rotc[8] = {
1503
    helper_rclb,
1504
    helper_rclw,
1505
    helper_rcll,
1506
    X86_64_ONLY(helper_rclq),
1507
    helper_rcrb,
1508
    helper_rcrw,
1509
    helper_rcrl,
1510
    X86_64_ONLY(helper_rcrq),
1511
};
1512

    
1513
/* XXX: add faster immediate = 1 case */
1514
static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1, 
1515
                           int is_right)
1516
{
1517
    int label1;
1518

    
1519
    if (s->cc_op != CC_OP_DYNAMIC)
1520
        gen_op_set_cc_op(s->cc_op);
1521

    
1522
    /* load */
1523
    if (op1 == OR_TMP0)
1524
        gen_op_ld_T0_A0(ot + s->mem_index);
1525
    else
1526
        gen_op_mov_TN_reg(ot, 0, op1);
1527
    
1528
    tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
1529
                       cpu_T[0], cpu_T[0], cpu_T[1]);
1530
    /* store */
1531
    if (op1 == OR_TMP0)
1532
        gen_op_st_T0_A0(ot + s->mem_index);
1533
    else
1534
        gen_op_mov_reg_T0(ot, op1);
1535

    
1536
    /* update eflags */
1537
    label1 = gen_new_label();
1538
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(-1), label1);
1539

    
1540
    tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1541
    tcg_gen_discard_tl(cpu_cc_dst);
1542
    tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1543
        
1544
    gen_set_label(label1);
1545
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1546
}
1547

    
1548
/* XXX: add faster immediate case */
1549
static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1, 
1550
                                int is_right)
1551
{
1552
    int label1, label2, data_bits;
1553
    target_ulong mask;
1554

    
1555
    if (ot == OT_QUAD)
1556
        mask = 0x3f;
1557
    else
1558
        mask = 0x1f;
1559

    
1560
    /* load */
1561
    if (op1 == OR_TMP0)
1562
        gen_op_ld_T0_A0(ot + s->mem_index);
1563
    else
1564
        gen_op_mov_TN_reg(ot, 0, op1);
1565

    
1566
    tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
1567
    /* Must test zero case to avoid using undefined behaviour in TCG
1568
       shifts. */
1569
    label1 = gen_new_label();
1570
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
1571
    
1572
    tcg_gen_addi_tl(cpu_tmp5, cpu_T3, -1);
1573
    if (ot == OT_WORD) {
1574
        /* Note: we implement the Intel behaviour for shift count > 16 */
1575
        if (is_right) {
1576
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1577
            tcg_gen_shli_tl(cpu_tmp0, cpu_T[1], 16);
1578
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1579
            tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1580

    
1581
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1582
            
1583
            /* only needed if count > 16, but a test would complicate */
1584
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1585
            tcg_gen_shl_tl(cpu_tmp0, cpu_T[0], cpu_tmp5);
1586

    
1587
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1588

    
1589
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1590
        } else {
1591
            /* XXX: not optimal */
1592
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1593
            tcg_gen_shli_tl(cpu_T[1], cpu_T[1], 16);
1594
            tcg_gen_or_tl(cpu_T[1], cpu_T[1], cpu_T[0]);
1595
            tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1596
            
1597
            tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1598
            tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
1599
            tcg_gen_shr_tl(cpu_tmp6, cpu_T[1], cpu_tmp0);
1600
            tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
1601

    
1602
            tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1603
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1604
            tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1605
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1606
        }
1607
    } else {
1608
        data_bits = 8 << ot;
1609
        if (is_right) {
1610
            if (ot == OT_LONG)
1611
                tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1612

    
1613
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1614

    
1615
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1616
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1617
            tcg_gen_shl_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1618
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1619
            
1620
        } else {
1621
            if (ot == OT_LONG)
1622
                tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1623

    
1624
            tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1625
            
1626
            tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1627
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1628
            tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1629
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1630
        }
1631
    }
1632
    tcg_gen_mov_tl(cpu_T[1], cpu_tmp4);
1633

    
1634
    gen_set_label(label1);
1635
    /* store */
1636
    if (op1 == OR_TMP0)
1637
        gen_op_st_T0_A0(ot + s->mem_index);
1638
    else
1639
        gen_op_mov_reg_T0(ot, op1);
1640
    
1641
    /* update eflags */
1642
    if (s->cc_op != CC_OP_DYNAMIC)
1643
        gen_op_set_cc_op(s->cc_op);
1644

    
1645
    label2 = gen_new_label();
1646
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label2);
1647

    
1648
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1649
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1650
    if (is_right) {
1651
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1652
    } else {
1653
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1654
    }
1655
    gen_set_label(label2);
1656
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1657
}
1658

    
1659
static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1660
{
1661
    if (s != OR_TMP1)
1662
        gen_op_mov_TN_reg(ot, 1, s);
1663
    switch(op) {
1664
    case OP_ROL:
1665
        gen_rot_rm_T1(s1, ot, d, 0);
1666
        break;
1667
    case OP_ROR:
1668
        gen_rot_rm_T1(s1, ot, d, 1);
1669
        break;
1670
    case OP_SHL:
1671
    case OP_SHL1:
1672
        gen_shift_rm_T1(s1, ot, d, 0, 0);
1673
        break;
1674
    case OP_SHR:
1675
        gen_shift_rm_T1(s1, ot, d, 1, 0);
1676
        break;
1677
    case OP_SAR:
1678
        gen_shift_rm_T1(s1, ot, d, 1, 1);
1679
        break;
1680
    case OP_RCL:
1681
        gen_rotc_rm_T1(s1, ot, d, 0);
1682
        break;
1683
    case OP_RCR:
1684
        gen_rotc_rm_T1(s1, ot, d, 1);
1685
        break;
1686
    }
1687
}
1688

    
1689
static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1690
{
1691
    /* currently not optimized */
1692
    gen_op_movl_T1_im(c);
1693
    gen_shift(s1, op, ot, d, OR_TMP1);
1694
}
1695

    
1696
static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1697
{
1698
    target_long disp;
1699
    int havesib;
1700
    int base;
1701
    int index;
1702
    int scale;
1703
    int opreg;
1704
    int mod, rm, code, override, must_add_seg;
1705

    
1706
    override = s->override;
1707
    must_add_seg = s->addseg;
1708
    if (override >= 0)
1709
        must_add_seg = 1;
1710
    mod = (modrm >> 6) & 3;
1711
    rm = modrm & 7;
1712

    
1713
    if (s->aflag) {
1714

    
1715
        havesib = 0;
1716
        base = rm;
1717
        index = 0;
1718
        scale = 0;
1719

    
1720
        if (base == 4) {
1721
            havesib = 1;
1722
            code = ldub_code(s->pc++);
1723
            scale = (code >> 6) & 3;
1724
            index = ((code >> 3) & 7) | REX_X(s);
1725
            base = (code & 7);
1726
        }
1727
        base |= REX_B(s);
1728

    
1729
        switch (mod) {
1730
        case 0:
1731
            if ((base & 7) == 5) {
1732
                base = -1;
1733
                disp = (int32_t)ldl_code(s->pc);
1734
                s->pc += 4;
1735
                if (CODE64(s) && !havesib) {
1736
                    disp += s->pc + s->rip_offset;
1737
                }
1738
            } else {
1739
                disp = 0;
1740
            }
1741
            break;
1742
        case 1:
1743
            disp = (int8_t)ldub_code(s->pc++);
1744
            break;
1745
        default:
1746
        case 2:
1747
            disp = ldl_code(s->pc);
1748
            s->pc += 4;
1749
            break;
1750
        }
1751

    
1752
        if (base >= 0) {
1753
            /* for correct popl handling with esp */
1754
            if (base == 4 && s->popl_esp_hack)
1755
                disp += s->popl_esp_hack;
1756
#ifdef TARGET_X86_64
1757
            if (s->aflag == 2) {
1758
                gen_op_movq_A0_reg(base);
1759
                if (disp != 0) {
1760
                    gen_op_addq_A0_im(disp);
1761
                }
1762
            } else
1763
#endif
1764
            {
1765
                gen_op_movl_A0_reg(base);
1766
                if (disp != 0)
1767
                    gen_op_addl_A0_im(disp);
1768
            }
1769
        } else {
1770
#ifdef TARGET_X86_64
1771
            if (s->aflag == 2) {
1772
                gen_op_movq_A0_im(disp);
1773
            } else
1774
#endif
1775
            {
1776
                gen_op_movl_A0_im(disp);
1777
            }
1778
        }
1779
        /* XXX: index == 4 is always invalid */
1780
        if (havesib && (index != 4 || scale != 0)) {
1781
#ifdef TARGET_X86_64
1782
            if (s->aflag == 2) {
1783
                gen_op_addq_A0_reg_sN(scale, index);
1784
            } else
1785
#endif
1786
            {
1787
                gen_op_addl_A0_reg_sN(scale, index);
1788
            }
1789
        }
1790
        if (must_add_seg) {
1791
            if (override < 0) {
1792
                if (base == R_EBP || base == R_ESP)
1793
                    override = R_SS;
1794
                else
1795
                    override = R_DS;
1796
            }
1797
#ifdef TARGET_X86_64
1798
            if (s->aflag == 2) {
1799
                gen_op_addq_A0_seg(override);
1800
            } else
1801
#endif
1802
            {
1803
                gen_op_addl_A0_seg(override);
1804
            }
1805
        }
1806
    } else {
1807
        switch (mod) {
1808
        case 0:
1809
            if (rm == 6) {
1810
                disp = lduw_code(s->pc);
1811
                s->pc += 2;
1812
                gen_op_movl_A0_im(disp);
1813
                rm = 0; /* avoid SS override */
1814
                goto no_rm;
1815
            } else {
1816
                disp = 0;
1817
            }
1818
            break;
1819
        case 1:
1820
            disp = (int8_t)ldub_code(s->pc++);
1821
            break;
1822
        default:
1823
        case 2:
1824
            disp = lduw_code(s->pc);
1825
            s->pc += 2;
1826
            break;
1827
        }
1828
        switch(rm) {
1829
        case 0:
1830
            gen_op_movl_A0_reg(R_EBX);
1831
            gen_op_addl_A0_reg_sN(0, R_ESI);
1832
            break;
1833
        case 1:
1834
            gen_op_movl_A0_reg(R_EBX);
1835
            gen_op_addl_A0_reg_sN(0, R_EDI);
1836
            break;
1837
        case 2:
1838
            gen_op_movl_A0_reg(R_EBP);
1839
            gen_op_addl_A0_reg_sN(0, R_ESI);
1840
            break;
1841
        case 3:
1842
            gen_op_movl_A0_reg(R_EBP);
1843
            gen_op_addl_A0_reg_sN(0, R_EDI);
1844
            break;
1845
        case 4:
1846
            gen_op_movl_A0_reg(R_ESI);
1847
            break;
1848
        case 5:
1849
            gen_op_movl_A0_reg(R_EDI);
1850
            break;
1851
        case 6:
1852
            gen_op_movl_A0_reg(R_EBP);
1853
            break;
1854
        default:
1855
        case 7:
1856
            gen_op_movl_A0_reg(R_EBX);
1857
            break;
1858
        }
1859
        if (disp != 0)
1860
            gen_op_addl_A0_im(disp);
1861
        gen_op_andl_A0_ffff();
1862
    no_rm:
1863
        if (must_add_seg) {
1864
            if (override < 0) {
1865
                if (rm == 2 || rm == 3 || rm == 6)
1866
                    override = R_SS;
1867
                else
1868
                    override = R_DS;
1869
            }
1870
            gen_op_addl_A0_seg(override);
1871
        }
1872
    }
1873

    
1874
    opreg = OR_A0;
1875
    disp = 0;
1876
    *reg_ptr = opreg;
1877
    *offset_ptr = disp;
1878
}
1879

    
1880
static void gen_nop_modrm(DisasContext *s, int modrm)
1881
{
1882
    int mod, rm, base, code;
1883

    
1884
    mod = (modrm >> 6) & 3;
1885
    if (mod == 3)
1886
        return;
1887
    rm = modrm & 7;
1888

    
1889
    if (s->aflag) {
1890

    
1891
        base = rm;
1892

    
1893
        if (base == 4) {
1894
            code = ldub_code(s->pc++);
1895
            base = (code & 7);
1896
        }
1897

    
1898
        switch (mod) {
1899
        case 0:
1900
            if (base == 5) {
1901
                s->pc += 4;
1902
            }
1903
            break;
1904
        case 1:
1905
            s->pc++;
1906
            break;
1907
        default:
1908
        case 2:
1909
            s->pc += 4;
1910
            break;
1911
        }
1912
    } else {
1913
        switch (mod) {
1914
        case 0:
1915
            if (rm == 6) {
1916
                s->pc += 2;
1917
            }
1918
            break;
1919
        case 1:
1920
            s->pc++;
1921
            break;
1922
        default:
1923
        case 2:
1924
            s->pc += 2;
1925
            break;
1926
        }
1927
    }
1928
}
1929

    
1930
/* used for LEA and MOV AX, mem */
1931
static void gen_add_A0_ds_seg(DisasContext *s)
1932
{
1933
    int override, must_add_seg;
1934
    must_add_seg = s->addseg;
1935
    override = R_DS;
1936
    if (s->override >= 0) {
1937
        override = s->override;
1938
        must_add_seg = 1;
1939
    } else {
1940
        override = R_DS;
1941
    }
1942
    if (must_add_seg) {
1943
#ifdef TARGET_X86_64
1944
        if (CODE64(s)) {
1945
            gen_op_addq_A0_seg(override);
1946
        } else
1947
#endif
1948
        {
1949
            gen_op_addl_A0_seg(override);
1950
        }
1951
    }
1952
}
1953

    
1954
/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1955
   OR_TMP0 */
1956
static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1957
{
1958
    int mod, rm, opreg, disp;
1959

    
1960
    mod = (modrm >> 6) & 3;
1961
    rm = (modrm & 7) | REX_B(s);
1962
    if (mod == 3) {
1963
        if (is_store) {
1964
            if (reg != OR_TMP0)
1965
                gen_op_mov_TN_reg(ot, 0, reg);
1966
            gen_op_mov_reg_T0(ot, rm);
1967
        } else {
1968
            gen_op_mov_TN_reg(ot, 0, rm);
1969
            if (reg != OR_TMP0)
1970
                gen_op_mov_reg_T0(ot, reg);
1971
        }
1972
    } else {
1973
        gen_lea_modrm(s, modrm, &opreg, &disp);
1974
        if (is_store) {
1975
            if (reg != OR_TMP0)
1976
                gen_op_mov_TN_reg(ot, 0, reg);
1977
            gen_op_st_T0_A0(ot + s->mem_index);
1978
        } else {
1979
            gen_op_ld_T0_A0(ot + s->mem_index);
1980
            if (reg != OR_TMP0)
1981
                gen_op_mov_reg_T0(ot, reg);
1982
        }
1983
    }
1984
}
1985

    
1986
static inline uint32_t insn_get(DisasContext *s, int ot)
1987
{
1988
    uint32_t ret;
1989

    
1990
    switch(ot) {
1991
    case OT_BYTE:
1992
        ret = ldub_code(s->pc);
1993
        s->pc++;
1994
        break;
1995
    case OT_WORD:
1996
        ret = lduw_code(s->pc);
1997
        s->pc += 2;
1998
        break;
1999
    default:
2000
    case OT_LONG:
2001
        ret = ldl_code(s->pc);
2002
        s->pc += 4;
2003
        break;
2004
    }
2005
    return ret;
2006
}
2007

    
2008
static inline int insn_const_size(unsigned int ot)
2009
{
2010
    if (ot <= OT_LONG)
2011
        return 1 << ot;
2012
    else
2013
        return 4;
2014
}
2015

    
2016
static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2017
{
2018
    TranslationBlock *tb;
2019
    target_ulong pc;
2020

    
2021
    pc = s->cs_base + eip;
2022
    tb = s->tb;
2023
    /* NOTE: we handle the case where the TB spans two pages here */
2024
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2025
        (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK))  {
2026
        /* jump to same page: we can use a direct jump */
2027
        tcg_gen_goto_tb(tb_num);
2028
        gen_jmp_im(eip);
2029
        tcg_gen_exit_tb((long)tb + tb_num);
2030
    } else {
2031
        /* jump to another page: currently not optimized */
2032
        gen_jmp_im(eip);
2033
        gen_eob(s);
2034
    }
2035
}
2036

    
2037
static inline void gen_jcc(DisasContext *s, int b,
2038
                           target_ulong val, target_ulong next_eip)
2039
{
2040
    TranslationBlock *tb;
2041
    int inv, jcc_op;
2042
    GenOpFunc1 *func;
2043
    target_ulong tmp;
2044
    int l1, l2;
2045

    
2046
    inv = b & 1;
2047
    jcc_op = (b >> 1) & 7;
2048

    
2049
    if (s->jmp_opt) {
2050
        switch(s->cc_op) {
2051
            /* we optimize the cmp/jcc case */
2052
        case CC_OP_SUBB:
2053
        case CC_OP_SUBW:
2054
        case CC_OP_SUBL:
2055
        case CC_OP_SUBQ:
2056
            func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
2057
            break;
2058

    
2059
            /* some jumps are easy to compute */
2060
        case CC_OP_ADDB:
2061
        case CC_OP_ADDW:
2062
        case CC_OP_ADDL:
2063
        case CC_OP_ADDQ:
2064

    
2065
        case CC_OP_ADCB:
2066
        case CC_OP_ADCW:
2067
        case CC_OP_ADCL:
2068
        case CC_OP_ADCQ:
2069

    
2070
        case CC_OP_SBBB:
2071
        case CC_OP_SBBW:
2072
        case CC_OP_SBBL:
2073
        case CC_OP_SBBQ:
2074

    
2075
        case CC_OP_LOGICB:
2076
        case CC_OP_LOGICW:
2077
        case CC_OP_LOGICL:
2078
        case CC_OP_LOGICQ:
2079

    
2080
        case CC_OP_INCB:
2081
        case CC_OP_INCW:
2082
        case CC_OP_INCL:
2083
        case CC_OP_INCQ:
2084

    
2085
        case CC_OP_DECB:
2086
        case CC_OP_DECW:
2087
        case CC_OP_DECL:
2088
        case CC_OP_DECQ:
2089

    
2090
        case CC_OP_SHLB:
2091
        case CC_OP_SHLW:
2092
        case CC_OP_SHLL:
2093
        case CC_OP_SHLQ:
2094

    
2095
        case CC_OP_SARB:
2096
        case CC_OP_SARW:
2097
        case CC_OP_SARL:
2098
        case CC_OP_SARQ:
2099
            switch(jcc_op) {
2100
            case JCC_Z:
2101
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2102
                break;
2103
            case JCC_S:
2104
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2105
                break;
2106
            default:
2107
                func = NULL;
2108
                break;
2109
            }
2110
            break;
2111
        default:
2112
            func = NULL;
2113
            break;
2114
        }
2115

    
2116
        if (s->cc_op != CC_OP_DYNAMIC) {
2117
            gen_op_set_cc_op(s->cc_op);
2118
            s->cc_op = CC_OP_DYNAMIC;
2119
        }
2120

    
2121
        if (!func) {
2122
            gen_setcc_slow[jcc_op]();
2123
            func = gen_op_jnz_T0_label;
2124
        }
2125

    
2126
        if (inv) {
2127
            tmp = val;
2128
            val = next_eip;
2129
            next_eip = tmp;
2130
        }
2131
        tb = s->tb;
2132

    
2133
        l1 = gen_new_label();
2134
        func(l1);
2135

    
2136
        gen_goto_tb(s, 0, next_eip);
2137

    
2138
        gen_set_label(l1);
2139
        gen_goto_tb(s, 1, val);
2140

    
2141
        s->is_jmp = 3;
2142
    } else {
2143

    
2144
        if (s->cc_op != CC_OP_DYNAMIC) {
2145
            gen_op_set_cc_op(s->cc_op);
2146
            s->cc_op = CC_OP_DYNAMIC;
2147
        }
2148
        gen_setcc_slow[jcc_op]();
2149
        if (inv) {
2150
            tmp = val;
2151
            val = next_eip;
2152
            next_eip = tmp;
2153
        }
2154
        l1 = gen_new_label();
2155
        l2 = gen_new_label();
2156
        gen_op_jnz_T0_label(l1);
2157
        gen_jmp_im(next_eip);
2158
        gen_op_jmp_label(l2);
2159
        gen_set_label(l1);
2160
        gen_jmp_im(val);
2161
        gen_set_label(l2);
2162
        gen_eob(s);
2163
    }
2164
}
2165

    
2166
static void gen_setcc(DisasContext *s, int b)
2167
{
2168
    int inv, jcc_op;
2169
    GenOpFunc *func;
2170

    
2171
    inv = b & 1;
2172
    jcc_op = (b >> 1) & 7;
2173
    switch(s->cc_op) {
2174
        /* we optimize the cmp/jcc case */
2175
    case CC_OP_SUBB:
2176
    case CC_OP_SUBW:
2177
    case CC_OP_SUBL:
2178
    case CC_OP_SUBQ:
2179
        func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
2180
        if (!func)
2181
            goto slow_jcc;
2182
        break;
2183

    
2184
        /* some jumps are easy to compute */
2185
    case CC_OP_ADDB:
2186
    case CC_OP_ADDW:
2187
    case CC_OP_ADDL:
2188
    case CC_OP_ADDQ:
2189

    
2190
    case CC_OP_LOGICB:
2191
    case CC_OP_LOGICW:
2192
    case CC_OP_LOGICL:
2193
    case CC_OP_LOGICQ:
2194

    
2195
    case CC_OP_INCB:
2196
    case CC_OP_INCW:
2197
    case CC_OP_INCL:
2198
    case CC_OP_INCQ:
2199

    
2200
    case CC_OP_DECB:
2201
    case CC_OP_DECW:
2202
    case CC_OP_DECL:
2203
    case CC_OP_DECQ:
2204

    
2205
    case CC_OP_SHLB:
2206
    case CC_OP_SHLW:
2207
    case CC_OP_SHLL:
2208
    case CC_OP_SHLQ:
2209
        switch(jcc_op) {
2210
        case JCC_Z:
2211
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2212
            break;
2213
        case JCC_S:
2214
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2215
            break;
2216
        default:
2217
            goto slow_jcc;
2218
        }
2219
        break;
2220
    default:
2221
    slow_jcc:
2222
        if (s->cc_op != CC_OP_DYNAMIC)
2223
            gen_op_set_cc_op(s->cc_op);
2224
        func = gen_setcc_slow[jcc_op];
2225
        break;
2226
    }
2227
    func();
2228
    if (inv) {
2229
        gen_op_xor_T0_1();
2230
    }
2231
}
2232

    
2233
/* move T0 to seg_reg and compute if the CPU state may change. Never
2234
   call this function with seg_reg == R_CS */
2235
static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2236
{
2237
    if (s->pe && !s->vm86) {
2238
        /* XXX: optimize by finding processor state dynamically */
2239
        if (s->cc_op != CC_OP_DYNAMIC)
2240
            gen_op_set_cc_op(s->cc_op);
2241
        gen_jmp_im(cur_eip);
2242
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2243
        tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2244
        /* abort translation because the addseg value may change or
2245
           because ss32 may change. For R_SS, translation must always
2246
           stop as a special handling must be done to disable hardware
2247
           interrupts for the next instruction */
2248
        if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2249
            s->is_jmp = 3;
2250
    } else {
2251
        gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
2252
        if (seg_reg == R_SS)
2253
            s->is_jmp = 3;
2254
    }
2255
}
2256

    
2257
static inline int svm_is_rep(int prefixes)
2258
{
2259
    return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2260
}
2261

    
2262
static inline int
2263
gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2264
                              uint32_t type, uint64_t param)
2265
{
2266
    if(!(s->flags & (INTERCEPT_SVM_MASK)))
2267
        /* no SVM activated */
2268
        return 0;
2269
    switch(type) {
2270
        /* CRx and DRx reads/writes */
2271
        case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2272
            if (s->cc_op != CC_OP_DYNAMIC) {
2273
                gen_op_set_cc_op(s->cc_op);
2274
            }
2275
            gen_jmp_im(pc_start - s->cs_base);
2276
            tcg_gen_helper_0_2(helper_svm_check_intercept_param, 
2277
                               tcg_const_i32(type), tcg_const_i64(param));
2278
            /* this is a special case as we do not know if the interception occurs
2279
               so we assume there was none */
2280
            return 0;
2281
        case SVM_EXIT_MSR:
2282
            if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2283
                if (s->cc_op != CC_OP_DYNAMIC) {
2284
                    gen_op_set_cc_op(s->cc_op);
2285
                }
2286
                gen_jmp_im(pc_start - s->cs_base);
2287
                tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2288
                                   tcg_const_i32(type), tcg_const_i64(param));
2289
                /* this is a special case as we do not know if the interception occurs
2290
                   so we assume there was none */
2291
                return 0;
2292
            }
2293
            break;
2294
        default:
2295
            if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2296
                if (s->cc_op != CC_OP_DYNAMIC) {
2297
                    gen_op_set_cc_op(s->cc_op);
2298
                }
2299
                gen_jmp_im(pc_start - s->cs_base);
2300
                tcg_gen_helper_0_2(helper_vmexit,
2301
                                   tcg_const_i32(type), tcg_const_i64(param));
2302
                /* we can optimize this one so TBs don't get longer
2303
                   than up to vmexit */
2304
                gen_eob(s);
2305
                return 1;
2306
            }
2307
    }
2308
    return 0;
2309
}
2310

    
2311
static inline int
2312
gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2313
{
2314
    return gen_svm_check_intercept_param(s, pc_start, type, 0);
2315
}
2316

    
2317
static inline void gen_stack_update(DisasContext *s, int addend)
2318
{
2319
#ifdef TARGET_X86_64
2320
    if (CODE64(s)) {
2321
        gen_op_addq_ESP_im(addend);
2322
    } else
2323
#endif
2324
    if (s->ss32) {
2325
        gen_op_addl_ESP_im(addend);
2326
    } else {
2327
        gen_op_addw_ESP_im(addend);
2328
    }
2329
}
2330

    
2331
/* generate a push. It depends on ss32, addseg and dflag */
2332
static void gen_push_T0(DisasContext *s)
2333
{
2334
#ifdef TARGET_X86_64
2335
    if (CODE64(s)) {
2336
        gen_op_movq_A0_reg(R_ESP);
2337
        if (s->dflag) {
2338
            gen_op_addq_A0_im(-8);
2339
            gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2340
        } else {
2341
            gen_op_addq_A0_im(-2);
2342
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2343
        }
2344
        gen_op_mov_reg_A0(2, R_ESP);
2345
    } else
2346
#endif
2347
    {
2348
        gen_op_movl_A0_reg(R_ESP);
2349
        if (!s->dflag)
2350
            gen_op_addl_A0_im(-2);
2351
        else
2352
            gen_op_addl_A0_im(-4);
2353
        if (s->ss32) {
2354
            if (s->addseg) {
2355
                gen_op_movl_T1_A0();
2356
                gen_op_addl_A0_seg(R_SS);
2357
            }
2358
        } else {
2359
            gen_op_andl_A0_ffff();
2360
            gen_op_movl_T1_A0();
2361
            gen_op_addl_A0_seg(R_SS);
2362
        }
2363
        gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2364
        if (s->ss32 && !s->addseg)
2365
            gen_op_mov_reg_A0(1, R_ESP);
2366
        else
2367
            gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2368
    }
2369
}
2370

    
2371
/* generate a push. It depends on ss32, addseg and dflag */
2372
/* slower version for T1, only used for call Ev */
2373
static void gen_push_T1(DisasContext *s)
2374
{
2375
#ifdef TARGET_X86_64
2376
    if (CODE64(s)) {
2377
        gen_op_movq_A0_reg(R_ESP);
2378
        if (s->dflag) {
2379
            gen_op_addq_A0_im(-8);
2380
            gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2381
        } else {
2382
            gen_op_addq_A0_im(-2);
2383
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2384
        }
2385
        gen_op_mov_reg_A0(2, R_ESP);
2386
    } else
2387
#endif
2388
    {
2389
        gen_op_movl_A0_reg(R_ESP);
2390
        if (!s->dflag)
2391
            gen_op_addl_A0_im(-2);
2392
        else
2393
            gen_op_addl_A0_im(-4);
2394
        if (s->ss32) {
2395
            if (s->addseg) {
2396
                gen_op_addl_A0_seg(R_SS);
2397
            }
2398
        } else {
2399
            gen_op_andl_A0_ffff();
2400
            gen_op_addl_A0_seg(R_SS);
2401
        }
2402
        gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2403

    
2404
        if (s->ss32 && !s->addseg)
2405
            gen_op_mov_reg_A0(1, R_ESP);
2406
        else
2407
            gen_stack_update(s, (-2) << s->dflag);
2408
    }
2409
}
2410

    
2411
/* two step pop is necessary for precise exceptions */
2412
static void gen_pop_T0(DisasContext *s)
2413
{
2414
#ifdef TARGET_X86_64
2415
    if (CODE64(s)) {
2416
        gen_op_movq_A0_reg(R_ESP);
2417
        gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2418
    } else
2419
#endif
2420
    {
2421
        gen_op_movl_A0_reg(R_ESP);
2422
        if (s->ss32) {
2423
            if (s->addseg)
2424
                gen_op_addl_A0_seg(R_SS);
2425
        } else {
2426
            gen_op_andl_A0_ffff();
2427
            gen_op_addl_A0_seg(R_SS);
2428
        }
2429
        gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2430
    }
2431
}
2432

    
2433
static void gen_pop_update(DisasContext *s)
2434
{
2435
#ifdef TARGET_X86_64
2436
    if (CODE64(s) && s->dflag) {
2437
        gen_stack_update(s, 8);
2438
    } else
2439
#endif
2440
    {
2441
        gen_stack_update(s, 2 << s->dflag);
2442
    }
2443
}
2444

    
2445
static void gen_stack_A0(DisasContext *s)
2446
{
2447
    gen_op_movl_A0_reg(R_ESP);
2448
    if (!s->ss32)
2449
        gen_op_andl_A0_ffff();
2450
    gen_op_movl_T1_A0();
2451
    if (s->addseg)
2452
        gen_op_addl_A0_seg(R_SS);
2453
}
2454

    
2455
/* NOTE: wrap around in 16 bit not fully handled */
2456
static void gen_pusha(DisasContext *s)
2457
{
2458
    int i;
2459
    gen_op_movl_A0_reg(R_ESP);
2460
    gen_op_addl_A0_im(-16 <<  s->dflag);
2461
    if (!s->ss32)
2462
        gen_op_andl_A0_ffff();
2463
    gen_op_movl_T1_A0();
2464
    if (s->addseg)
2465
        gen_op_addl_A0_seg(R_SS);
2466
    for(i = 0;i < 8; i++) {
2467
        gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2468
        gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2469
        gen_op_addl_A0_im(2 <<  s->dflag);
2470
    }
2471
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2472
}
2473

    
2474
/* NOTE: wrap around in 16 bit not fully handled */
2475
static void gen_popa(DisasContext *s)
2476
{
2477
    int i;
2478
    gen_op_movl_A0_reg(R_ESP);
2479
    if (!s->ss32)
2480
        gen_op_andl_A0_ffff();
2481
    gen_op_movl_T1_A0();
2482
    gen_op_addl_T1_im(16 <<  s->dflag);
2483
    if (s->addseg)
2484
        gen_op_addl_A0_seg(R_SS);
2485
    for(i = 0;i < 8; i++) {
2486
        /* ESP is not reloaded */
2487
        if (i != 3) {
2488
            gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2489
            gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2490
        }
2491
        gen_op_addl_A0_im(2 <<  s->dflag);
2492
    }
2493
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2494
}
2495

    
2496
static void gen_enter(DisasContext *s, int esp_addend, int level)
2497
{
2498
    int ot, opsize;
2499

    
2500
    level &= 0x1f;
2501
#ifdef TARGET_X86_64
2502
    if (CODE64(s)) {
2503
        ot = s->dflag ? OT_QUAD : OT_WORD;
2504
        opsize = 1 << ot;
2505

    
2506
        gen_op_movl_A0_reg(R_ESP);
2507
        gen_op_addq_A0_im(-opsize);
2508
        gen_op_movl_T1_A0();
2509

    
2510
        /* push bp */
2511
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2512
        gen_op_st_T0_A0(ot + s->mem_index);
2513
        if (level) {
2514
            /* XXX: must save state */
2515
            tcg_gen_helper_0_3(helper_enter64_level,
2516
                               tcg_const_i32(level),
2517
                               tcg_const_i32((ot == OT_QUAD)),
2518
                               cpu_T[1]);
2519
        }
2520
        gen_op_mov_reg_T1(ot, R_EBP);
2521
        gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2522
        gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2523
    } else
2524
#endif
2525
    {
2526
        ot = s->dflag + OT_WORD;
2527
        opsize = 2 << s->dflag;
2528

    
2529
        gen_op_movl_A0_reg(R_ESP);
2530
        gen_op_addl_A0_im(-opsize);
2531
        if (!s->ss32)
2532
            gen_op_andl_A0_ffff();
2533
        gen_op_movl_T1_A0();
2534
        if (s->addseg)
2535
            gen_op_addl_A0_seg(R_SS);
2536
        /* push bp */
2537
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2538
        gen_op_st_T0_A0(ot + s->mem_index);
2539
        if (level) {
2540
            /* XXX: must save state */
2541
            tcg_gen_helper_0_3(helper_enter_level,
2542
                               tcg_const_i32(level),
2543
                               tcg_const_i32(s->dflag),
2544
                               cpu_T[1]);
2545
        }
2546
        gen_op_mov_reg_T1(ot, R_EBP);
2547
        gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2548
        gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2549
    }
2550
}
2551

    
2552
static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2553
{
2554
    if (s->cc_op != CC_OP_DYNAMIC)
2555
        gen_op_set_cc_op(s->cc_op);
2556
    gen_jmp_im(cur_eip);
2557
    tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
2558
    s->is_jmp = 3;
2559
}
2560

    
2561
/* an interrupt is different from an exception because of the
2562
   privilege checks */
2563
static void gen_interrupt(DisasContext *s, int intno,
2564
                          target_ulong cur_eip, target_ulong next_eip)
2565
{
2566
    if (s->cc_op != CC_OP_DYNAMIC)
2567
        gen_op_set_cc_op(s->cc_op);
2568
    gen_jmp_im(cur_eip);
2569
    tcg_gen_helper_0_2(helper_raise_interrupt, 
2570
                       tcg_const_i32(intno), 
2571
                       tcg_const_i32(next_eip - cur_eip));
2572
    s->is_jmp = 3;
2573
}
2574

    
2575
static void gen_debug(DisasContext *s, target_ulong cur_eip)
2576
{
2577
    if (s->cc_op != CC_OP_DYNAMIC)
2578
        gen_op_set_cc_op(s->cc_op);
2579
    gen_jmp_im(cur_eip);
2580
    tcg_gen_helper_0_0(helper_debug);
2581
    s->is_jmp = 3;
2582
}
2583

    
2584
/* generate a generic end of block. Trace exception is also generated
2585
   if needed */
2586
static void gen_eob(DisasContext *s)
2587
{
2588
    if (s->cc_op != CC_OP_DYNAMIC)
2589
        gen_op_set_cc_op(s->cc_op);
2590
    if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2591
        tcg_gen_helper_0_0(helper_reset_inhibit_irq);
2592
    }
2593
    if (s->singlestep_enabled) {
2594
        tcg_gen_helper_0_0(helper_debug);
2595
    } else if (s->tf) {
2596
        tcg_gen_helper_0_0(helper_single_step);
2597
    } else {
2598
        tcg_gen_exit_tb(0);
2599
    }
2600
    s->is_jmp = 3;
2601
}
2602

    
2603
/* generate a jump to eip. No segment change must happen before as a
2604
   direct call to the next block may occur */
2605
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2606
{
2607
    if (s->jmp_opt) {
2608
        if (s->cc_op != CC_OP_DYNAMIC) {
2609
            gen_op_set_cc_op(s->cc_op);
2610
            s->cc_op = CC_OP_DYNAMIC;
2611
        }
2612
        gen_goto_tb(s, tb_num, eip);
2613
        s->is_jmp = 3;
2614
    } else {
2615
        gen_jmp_im(eip);
2616
        gen_eob(s);
2617
    }
2618
}
2619

    
2620
static void gen_jmp(DisasContext *s, target_ulong eip)
2621
{
2622
    gen_jmp_tb(s, eip, 0);
2623
}
2624

    
2625
static inline void gen_ldq_env_A0(int idx, int offset)
2626
{
2627
    int mem_index = (idx >> 2) - 1;
2628
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2629
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
2630
}
2631

    
2632
static inline void gen_stq_env_A0(int idx, int offset)
2633
{
2634
    int mem_index = (idx >> 2) - 1;
2635
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2636
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2637
}
2638

    
2639
static inline void gen_ldo_env_A0(int idx, int offset)
2640
{
2641
    int mem_index = (idx >> 2) - 1;
2642
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2643
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2644
    tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2645
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2646
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2647
}
2648

    
2649
static inline void gen_sto_env_A0(int idx, int offset)
2650
{
2651
    int mem_index = (idx >> 2) - 1;
2652
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2653
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2654
    tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2655
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2656
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2657
}
2658

    
2659
static inline void gen_op_movo(int d_offset, int s_offset)
2660
{
2661
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2662
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2663
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2664
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
2665
}
2666

    
2667
static inline void gen_op_movq(int d_offset, int s_offset)
2668
{
2669
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2670
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2671
}
2672

    
2673
static inline void gen_op_movl(int d_offset, int s_offset)
2674
{
2675
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2676
    tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
2677
}
2678

    
2679
static inline void gen_op_movq_env_0(int d_offset)
2680
{
2681
    tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2682
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2683
}
2684

    
2685
#define SSE_SPECIAL ((void *)1)
2686
#define SSE_DUMMY ((void *)2)
2687

    
2688
#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2689
#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2690
                     helper_ ## x ## ss, helper_ ## x ## sd, }
2691

    
2692
static void *sse_op_table1[256][4] = {
2693
    /* 3DNow! extensions */
2694
    [0x0e] = { SSE_DUMMY }, /* femms */
2695
    [0x0f] = { SSE_DUMMY }, /* pf... */
2696
    /* pure SSE operations */
2697
    [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2698
    [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2699
    [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2700
    [0x13] = { SSE_SPECIAL, SSE_SPECIAL },  /* movlps, movlpd */
2701
    [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2702
    [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2703
    [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd, movshdup */
2704
    [0x17] = { SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd */
2705

    
2706
    [0x28] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2707
    [0x29] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2708
    [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2709
    [0x2b] = { SSE_SPECIAL, SSE_SPECIAL },  /* movntps, movntpd */
2710
    [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2711
    [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2712
    [0x2e] = { helper_ucomiss, helper_ucomisd },
2713
    [0x2f] = { helper_comiss, helper_comisd },
2714
    [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2715
    [0x51] = SSE_FOP(sqrt),
2716
    [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
2717
    [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
2718
    [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
2719
    [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
2720
    [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
2721
    [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
2722
    [0x58] = SSE_FOP(add),
2723
    [0x59] = SSE_FOP(mul),
2724
    [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
2725
               helper_cvtss2sd, helper_cvtsd2ss },
2726
    [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
2727
    [0x5c] = SSE_FOP(sub),
2728
    [0x5d] = SSE_FOP(min),
2729
    [0x5e] = SSE_FOP(div),
2730
    [0x5f] = SSE_FOP(max),
2731

    
2732
    [0xc2] = SSE_FOP(cmpeq),
2733
    [0xc6] = { helper_shufps, helper_shufpd },
2734

    
2735
    /* MMX ops and their SSE extensions */
2736
    [0x60] = MMX_OP2(punpcklbw),
2737
    [0x61] = MMX_OP2(punpcklwd),
2738
    [0x62] = MMX_OP2(punpckldq),
2739
    [0x63] = MMX_OP2(packsswb),
2740
    [0x64] = MMX_OP2(pcmpgtb),
2741
    [0x65] = MMX_OP2(pcmpgtw),
2742
    [0x66] = MMX_OP2(pcmpgtl),
2743
    [0x67] = MMX_OP2(packuswb),
2744
    [0x68] = MMX_OP2(punpckhbw),
2745
    [0x69] = MMX_OP2(punpckhwd),
2746
    [0x6a] = MMX_OP2(punpckhdq),
2747
    [0x6b] = MMX_OP2(packssdw),
2748
    [0x6c] = { NULL, helper_punpcklqdq_xmm },
2749
    [0x6d] = { NULL, helper_punpckhqdq_xmm },
2750
    [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2751
    [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2752
    [0x70] = { helper_pshufw_mmx,
2753
               helper_pshufd_xmm,
2754
               helper_pshufhw_xmm,
2755
               helper_pshuflw_xmm },
2756
    [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2757
    [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2758
    [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2759
    [0x74] = MMX_OP2(pcmpeqb),
2760
    [0x75] = MMX_OP2(pcmpeqw),
2761
    [0x76] = MMX_OP2(pcmpeql),
2762
    [0x77] = { SSE_DUMMY }, /* emms */
2763
    [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
2764
    [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
2765
    [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2766
    [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2767
    [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2768
    [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2769
    [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
2770
    [0xd1] = MMX_OP2(psrlw),
2771
    [0xd2] = MMX_OP2(psrld),
2772
    [0xd3] = MMX_OP2(psrlq),
2773
    [0xd4] = MMX_OP2(paddq),
2774
    [0xd5] = MMX_OP2(pmullw),
2775
    [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2776
    [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2777
    [0xd8] = MMX_OP2(psubusb),
2778
    [0xd9] = MMX_OP2(psubusw),
2779
    [0xda] = MMX_OP2(pminub),
2780
    [0xdb] = MMX_OP2(pand),
2781
    [0xdc] = MMX_OP2(paddusb),
2782
    [0xdd] = MMX_OP2(paddusw),
2783
    [0xde] = MMX_OP2(pmaxub),
2784
    [0xdf] = MMX_OP2(pandn),
2785
    [0xe0] = MMX_OP2(pavgb),
2786
    [0xe1] = MMX_OP2(psraw),
2787
    [0xe2] = MMX_OP2(psrad),
2788
    [0xe3] = MMX_OP2(pavgw),
2789
    [0xe4] = MMX_OP2(pmulhuw),
2790
    [0xe5] = MMX_OP2(pmulhw),
2791
    [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
2792
    [0xe7] = { SSE_SPECIAL , SSE_SPECIAL },  /* movntq, movntq */
2793
    [0xe8] = MMX_OP2(psubsb),
2794
    [0xe9] = MMX_OP2(psubsw),
2795
    [0xea] = MMX_OP2(pminsw),
2796
    [0xeb] = MMX_OP2(por),
2797
    [0xec] = MMX_OP2(paddsb),
2798
    [0xed] = MMX_OP2(paddsw),
2799
    [0xee] = MMX_OP2(pmaxsw),
2800
    [0xef] = MMX_OP2(pxor),
2801
    [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2802
    [0xf1] = MMX_OP2(psllw),
2803
    [0xf2] = MMX_OP2(pslld),
2804
    [0xf3] = MMX_OP2(psllq),
2805
    [0xf4] = MMX_OP2(pmuludq),
2806
    [0xf5] = MMX_OP2(pmaddwd),
2807
    [0xf6] = MMX_OP2(psadbw),
2808
    [0xf7] = MMX_OP2(maskmov),
2809
    [0xf8] = MMX_OP2(psubb),
2810
    [0xf9] = MMX_OP2(psubw),
2811
    [0xfa] = MMX_OP2(psubl),
2812
    [0xfb] = MMX_OP2(psubq),
2813
    [0xfc] = MMX_OP2(paddb),
2814
    [0xfd] = MMX_OP2(paddw),
2815
    [0xfe] = MMX_OP2(paddl),
2816
};
2817

    
2818
static void *sse_op_table2[3 * 8][2] = {
2819
    [0 + 2] = MMX_OP2(psrlw),
2820
    [0 + 4] = MMX_OP2(psraw),
2821
    [0 + 6] = MMX_OP2(psllw),
2822
    [8 + 2] = MMX_OP2(psrld),
2823
    [8 + 4] = MMX_OP2(psrad),
2824
    [8 + 6] = MMX_OP2(pslld),
2825
    [16 + 2] = MMX_OP2(psrlq),
2826
    [16 + 3] = { NULL, helper_psrldq_xmm },
2827
    [16 + 6] = MMX_OP2(psllq),
2828
    [16 + 7] = { NULL, helper_pslldq_xmm },
2829
};
2830

    
2831
static void *sse_op_table3[4 * 3] = {
2832
    helper_cvtsi2ss,
2833
    helper_cvtsi2sd,
2834
    X86_64_ONLY(helper_cvtsq2ss),
2835
    X86_64_ONLY(helper_cvtsq2sd),
2836

    
2837
    helper_cvttss2si,
2838
    helper_cvttsd2si,
2839
    X86_64_ONLY(helper_cvttss2sq),
2840
    X86_64_ONLY(helper_cvttsd2sq),
2841

    
2842
    helper_cvtss2si,
2843
    helper_cvtsd2si,
2844
    X86_64_ONLY(helper_cvtss2sq),
2845
    X86_64_ONLY(helper_cvtsd2sq),
2846
};
2847

    
2848
static void *sse_op_table4[8][4] = {
2849
    SSE_FOP(cmpeq),
2850
    SSE_FOP(cmplt),
2851
    SSE_FOP(cmple),
2852
    SSE_FOP(cmpunord),
2853
    SSE_FOP(cmpneq),
2854
    SSE_FOP(cmpnlt),
2855
    SSE_FOP(cmpnle),
2856
    SSE_FOP(cmpord),
2857
};
2858

    
2859
static void *sse_op_table5[256] = {
2860
    [0x0c] = helper_pi2fw,
2861
    [0x0d] = helper_pi2fd,
2862
    [0x1c] = helper_pf2iw,
2863
    [0x1d] = helper_pf2id,
2864
    [0x8a] = helper_pfnacc,
2865
    [0x8e] = helper_pfpnacc,
2866
    [0x90] = helper_pfcmpge,
2867
    [0x94] = helper_pfmin,
2868
    [0x96] = helper_pfrcp,
2869
    [0x97] = helper_pfrsqrt,
2870
    [0x9a] = helper_pfsub,
2871
    [0x9e] = helper_pfadd,
2872
    [0xa0] = helper_pfcmpgt,
2873
    [0xa4] = helper_pfmax,
2874
    [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2875
    [0xa7] = helper_movq, /* pfrsqit1 */
2876
    [0xaa] = helper_pfsubr,
2877
    [0xae] = helper_pfacc,
2878
    [0xb0] = helper_pfcmpeq,
2879
    [0xb4] = helper_pfmul,
2880
    [0xb6] = helper_movq, /* pfrcpit2 */
2881
    [0xb7] = helper_pmulhrw_mmx,
2882
    [0xbb] = helper_pswapd,
2883
    [0xbf] = helper_pavgb_mmx /* pavgusb */
2884
};
2885

    
2886
static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2887
{
2888
    int b1, op1_offset, op2_offset, is_xmm, val, ot;
2889
    int modrm, mod, rm, reg, reg_addr, offset_addr;
2890
    void *sse_op2;
2891

    
2892
    b &= 0xff;
2893
    if (s->prefix & PREFIX_DATA)
2894
        b1 = 1;
2895
    else if (s->prefix & PREFIX_REPZ)
2896
        b1 = 2;
2897
    else if (s->prefix & PREFIX_REPNZ)
2898
        b1 = 3;
2899
    else
2900
        b1 = 0;
2901
    sse_op2 = sse_op_table1[b][b1];
2902
    if (!sse_op2)
2903
        goto illegal_op;
2904
    if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2905
        is_xmm = 1;
2906
    } else {
2907
        if (b1 == 0) {
2908
            /* MMX case */
2909
            is_xmm = 0;
2910
        } else {
2911
            is_xmm = 1;
2912
        }
2913
    }
2914
    /* simple MMX/SSE operation */
2915
    if (s->flags & HF_TS_MASK) {
2916
        gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2917
        return;
2918
    }
2919
    if (s->flags & HF_EM_MASK) {
2920
    illegal_op:
2921
        gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2922
        return;
2923
    }
2924
    if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2925
        goto illegal_op;
2926
    if (b == 0x0e) {
2927
        if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
2928
            goto illegal_op;
2929
        /* femms */
2930
        tcg_gen_helper_0_0(helper_emms);
2931
        return;
2932
    }
2933
    if (b == 0x77) {
2934
        /* emms */
2935
        tcg_gen_helper_0_0(helper_emms);
2936
        return;
2937
    }
2938
    /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2939
       the static cpu state) */
2940
    if (!is_xmm) {
2941
        tcg_gen_helper_0_0(helper_enter_mmx);
2942
    }
2943

    
2944
    modrm = ldub_code(s->pc++);
2945
    reg = ((modrm >> 3) & 7);
2946
    if (is_xmm)
2947
        reg |= rex_r;
2948
    mod = (modrm >> 6) & 3;
2949
    if (sse_op2 == SSE_SPECIAL) {
2950
        b |= (b1 << 8);
2951
        switch(b) {
2952
        case 0x0e7: /* movntq */
2953
            if (mod == 3)
2954
                goto illegal_op;
2955
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2956
            gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2957
            break;
2958
        case 0x1e7: /* movntdq */
2959
        case 0x02b: /* movntps */
2960
        case 0x12b: /* movntps */
2961
        case 0x3f0: /* lddqu */
2962
            if (mod == 3)
2963
                goto illegal_op;
2964
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2965
            gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2966
            break;
2967
        case 0x6e: /* movd mm, ea */
2968
#ifdef TARGET_X86_64
2969
            if (s->dflag == 2) {
2970
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2971
                tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
2972
            } else
2973
#endif
2974
            {
2975
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2976
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
2977
                                 offsetof(CPUX86State,fpregs[reg].mmx));
2978
                tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
2979
            }
2980
            break;
2981
        case 0x16e: /* movd xmm, ea */
2982
#ifdef TARGET_X86_64
2983
            if (s->dflag == 2) {
2984
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2985
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
2986
                                 offsetof(CPUX86State,xmm_regs[reg]));
2987
                tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
2988
            } else
2989
#endif
2990
            {
2991
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2992
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
2993
                                 offsetof(CPUX86State,xmm_regs[reg]));
2994
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2995
                tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
2996
            }
2997
            break;
2998
        case 0x6f: /* movq mm, ea */
2999
            if (mod != 3) {
3000
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3001
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3002
            } else {
3003
                rm = (modrm & 7);
3004
                tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3005
                               offsetof(CPUX86State,fpregs[rm].mmx));
3006
                tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3007
                               offsetof(CPUX86State,fpregs[reg].mmx));
3008
            }
3009
            break;
3010
        case 0x010: /* movups */
3011
        case 0x110: /* movupd */
3012
        case 0x028: /* movaps */
3013
        case 0x128: /* movapd */
3014
        case 0x16f: /* movdqa xmm, ea */
3015
        case 0x26f: /* movdqu xmm, ea */
3016
            if (mod != 3) {
3017
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3018
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3019
            } else {
3020
                rm = (modrm & 7) | REX_B(s);
3021
                gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3022
                            offsetof(CPUX86State,xmm_regs[rm]));
3023
            }
3024
            break;
3025
        case 0x210: /* movss xmm, ea */
3026
            if (mod != 3) {
3027
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3028
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3029
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3030
                gen_op_movl_T0_0();
3031
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3032
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3033
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3034
            } else {
3035
                rm = (modrm & 7) | REX_B(s);
3036
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3037
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3038
            }
3039
            break;
3040
        case 0x310: /* movsd xmm, ea */
3041
            if (mod != 3) {
3042
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3043
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3044
                gen_op_movl_T0_0();
3045
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3046
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3047
            } else {
3048
                rm = (modrm & 7) | REX_B(s);
3049
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3050
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3051
            }
3052
            break;
3053
        case 0x012: /* movlps */
3054
        case 0x112: /* movlpd */
3055
            if (mod != 3) {
3056
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3057
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3058
            } else {
3059
                /* movhlps */
3060
                rm = (modrm & 7) | REX_B(s);
3061
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3062
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3063
            }
3064
            break;
3065
        case 0x212: /* movsldup */
3066
            if (mod != 3) {
3067
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3068
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3069
            } else {
3070
                rm = (modrm & 7) | REX_B(s);
3071
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3072
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3073
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3074
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3075
            }
3076
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3077
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3078
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3079
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3080
            break;
3081
        case 0x312: /* movddup */
3082
            if (mod != 3) {
3083
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3084
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3085
            } else {
3086
                rm = (modrm & 7) | REX_B(s);
3087
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3088
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3089
            }
3090
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3091
                        offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3092
            break;
3093
        case 0x016: /* movhps */
3094
        case 0x116: /* movhpd */
3095
            if (mod != 3) {
3096
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3097
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3098
            } else {
3099
                /* movlhps */
3100
                rm = (modrm & 7) | REX_B(s);
3101
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3102
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3103
            }
3104
            break;
3105
        case 0x216: /* movshdup */
3106
            if (mod != 3) {
3107
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3108
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3109
            } else {
3110
                rm = (modrm & 7) | REX_B(s);
3111
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3112
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3113
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3114
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3115
            }
3116
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3117
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3118
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3119
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3120
            break;
3121
        case 0x7e: /* movd ea, mm */
3122
#ifdef TARGET_X86_64
3123
            if (s->dflag == 2) {
3124
                tcg_gen_ld_i64(cpu_T[0], cpu_env, 
3125
                               offsetof(CPUX86State,fpregs[reg].mmx));
3126
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3127
            } else
3128
#endif
3129
            {
3130
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
3131
                                 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3132
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3133
            }
3134
            break;
3135
        case 0x17e: /* movd ea, xmm */
3136
#ifdef TARGET_X86_64
3137
            if (s->dflag == 2) {
3138
                tcg_gen_ld_i64(cpu_T[0], cpu_env, 
3139
                               offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3140
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3141
            } else
3142
#endif
3143
            {
3144
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
3145
                                 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3146
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3147
            }
3148
            break;
3149
        case 0x27e: /* movq xmm, ea */
3150
            if (mod != 3) {
3151
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3152
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3153
            } else {
3154
                rm = (modrm & 7) | REX_B(s);
3155
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3156
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3157
            }
3158
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3159
            break;
3160
        case 0x7f: /* movq ea, mm */
3161
            if (mod != 3) {
3162
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3163
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3164
            } else {
3165
                rm = (modrm & 7);
3166
                gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3167
                            offsetof(CPUX86State,fpregs[reg].mmx));
3168
            }
3169
            break;
3170
        case 0x011: /* movups */
3171
        case 0x111: /* movupd */
3172
        case 0x029: /* movaps */
3173
        case 0x129: /* movapd */
3174
        case 0x17f: /* movdqa ea, xmm */
3175
        case 0x27f: /* movdqu ea, xmm */
3176
            if (mod != 3) {
3177
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3178
                gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3179
            } else {
3180
                rm = (modrm & 7) | REX_B(s);
3181
                gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3182
                            offsetof(CPUX86State,xmm_regs[reg]));
3183
            }
3184
            break;
3185
        case 0x211: /* movss ea, xmm */
3186
            if (mod != 3) {
3187
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3188
                gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3189
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
3190
            } else {
3191
                rm = (modrm & 7) | REX_B(s);
3192
                gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3193
                            offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3194
            }
3195
            break;
3196
        case 0x311: /* movsd ea, xmm */
3197
            if (mod != 3) {
3198
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3199
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3200
            } else {
3201
                rm = (modrm & 7) | REX_B(s);
3202
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3203
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3204
            }
3205
            break;
3206
        case 0x013: /* movlps */
3207
        case 0x113: /* movlpd */
3208
            if (mod != 3) {
3209
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3210
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3211
            } else {
3212
                goto illegal_op;
3213
            }
3214
            break;
3215
        case 0x017: /* movhps */
3216
        case 0x117: /* movhpd */
3217
            if (mod != 3) {
3218
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3219
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3220
            } else {
3221
                goto illegal_op;
3222
            }
3223
            break;
3224
        case 0x71: /* shift mm, im */
3225
        case 0x72:
3226
        case 0x73:
3227
        case 0x171: /* shift xmm, im */
3228
        case 0x172:
3229
        case 0x173:
3230
            val = ldub_code(s->pc++);
3231
            if (is_xmm) {
3232
                gen_op_movl_T0_im(val);
3233
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3234
                gen_op_movl_T0_0();
3235
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3236
                op1_offset = offsetof(CPUX86State,xmm_t0);
3237
            } else {
3238
                gen_op_movl_T0_im(val);
3239
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3240
                gen_op_movl_T0_0();
3241
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3242
                op1_offset = offsetof(CPUX86State,mmx_t0);
3243
            }
3244
            sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3245
            if (!sse_op2)
3246
                goto illegal_op;
3247
            if (is_xmm) {
3248
                rm = (modrm & 7) | REX_B(s);
3249
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3250
            } else {
3251
                rm = (modrm & 7);
3252
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3253
            }
3254
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3255
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3256
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3257
            break;
3258
        case 0x050: /* movmskps */
3259
            rm = (modrm & 7) | REX_B(s);
3260
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3261
                             offsetof(CPUX86State,xmm_regs[rm]));
3262
            tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3263
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3264
            gen_op_mov_reg_T0(OT_LONG, reg);
3265
            break;
3266
        case 0x150: /* movmskpd */
3267
            rm = (modrm & 7) | REX_B(s);
3268
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3269
                             offsetof(CPUX86State,xmm_regs[rm]));
3270
            tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3271
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3272
            gen_op_mov_reg_T0(OT_LONG, reg);
3273
            break;
3274
        case 0x02a: /* cvtpi2ps */
3275
        case 0x12a: /* cvtpi2pd */
3276
            tcg_gen_helper_0_0(helper_enter_mmx);
3277
            if (mod != 3) {
3278
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3279
                op2_offset = offsetof(CPUX86State,mmx_t0);
3280
                gen_ldq_env_A0(s->mem_index, op2_offset);
3281
            } else {
3282
                rm = (modrm & 7);
3283
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3284
            }
3285
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3286
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3287
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3288
            switch(b >> 8) {
3289
            case 0x0:
3290
                tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3291
                break;
3292
            default:
3293
            case 0x1:
3294
                tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3295
                break;
3296
            }
3297
            break;
3298
        case 0x22a: /* cvtsi2ss */
3299
        case 0x32a: /* cvtsi2sd */
3300
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3301
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3302
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3303
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3304
            sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3305
            tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3306
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3307
            break;
3308
        case 0x02c: /* cvttps2pi */
3309
        case 0x12c: /* cvttpd2pi */
3310
        case 0x02d: /* cvtps2pi */
3311
        case 0x12d: /* cvtpd2pi */
3312
            tcg_gen_helper_0_0(helper_enter_mmx);
3313
            if (mod != 3) {
3314
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3315
                op2_offset = offsetof(CPUX86State,xmm_t0);
3316
                gen_ldo_env_A0(s->mem_index, op2_offset);
3317
            } else {
3318
                rm = (modrm & 7) | REX_B(s);
3319
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3320
            }
3321
            op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3322
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3323
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3324
            switch(b) {
3325
            case 0x02c:
3326
                tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3327
                break;
3328
            case 0x12c:
3329
                tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3330
                break;
3331
            case 0x02d:
3332
                tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3333
                break;
3334
            case 0x12d:
3335
                tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3336
                break;
3337
            }
3338
            break;
3339
        case 0x22c: /* cvttss2si */
3340
        case 0x32c: /* cvttsd2si */
3341
        case 0x22d: /* cvtss2si */
3342
        case 0x32d: /* cvtsd2si */
3343
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3344
            if (mod != 3) {
3345
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3346
                if ((b >> 8) & 1) {
3347
                    gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3348
                } else {
3349
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3350
                    gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3351
                }
3352
                op2_offset = offsetof(CPUX86State,xmm_t0);
3353
            } else {
3354
                rm = (modrm & 7) | REX_B(s);
3355
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3356
            }
3357
            sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3358
                                    (b & 1) * 4];
3359
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3360
            if (ot == OT_LONG) {
3361
                tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
3362
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3363
            } else {
3364
                tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3365
            }
3366
            gen_op_mov_reg_T0(ot, reg);
3367
            break;
3368
        case 0xc4: /* pinsrw */
3369
        case 0x1c4:
3370
            s->rip_offset = 1;
3371
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3372
            val = ldub_code(s->pc++);
3373
            if (b1) {
3374
                val &= 7;
3375
                tcg_gen_st16_tl(cpu_T[0], cpu_env,
3376
                                offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3377
            } else {
3378
                val &= 3;
3379
                tcg_gen_st16_tl(cpu_T[0], cpu_env,
3380
                                offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3381
            }
3382
            break;
3383
        case 0xc5: /* pextrw */
3384
        case 0x1c5:
3385
            if (mod != 3)
3386
                goto illegal_op;
3387
            val = ldub_code(s->pc++);
3388
            if (b1) {
3389
                val &= 7;
3390
                rm = (modrm & 7) | REX_B(s);
3391
                tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3392
                                 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3393
            } else {
3394
                val &= 3;
3395
                rm = (modrm & 7);
3396
                tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3397
                                offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3398
            }
3399
            reg = ((modrm >> 3) & 7) | rex_r;
3400
            gen_op_mov_reg_T0(OT_LONG, reg);
3401
            break;
3402
        case 0x1d6: /* movq ea, xmm */
3403
            if (mod != 3) {
3404
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3405
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3406
            } else {
3407
                rm = (modrm & 7) | REX_B(s);
3408
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3409
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3410
                gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3411
            }
3412
            break;
3413
        case 0x2d6: /* movq2dq */
3414
            tcg_gen_helper_0_0(helper_enter_mmx);
3415
            rm = (modrm & 7);
3416
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3417
                        offsetof(CPUX86State,fpregs[rm].mmx));
3418
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3419
            break;
3420
        case 0x3d6: /* movdq2q */
3421
            tcg_gen_helper_0_0(helper_enter_mmx);
3422
            rm = (modrm & 7) | REX_B(s);
3423
            gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3424
                        offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3425
            break;
3426
        case 0xd7: /* pmovmskb */
3427
        case 0x1d7:
3428
            if (mod != 3)
3429
                goto illegal_op;
3430
            if (b1) {
3431
                rm = (modrm & 7) | REX_B(s);
3432
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3433
                tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
3434
            } else {
3435
                rm = (modrm & 7);
3436
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3437
                tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
3438
            }
3439
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3440
            reg = ((modrm >> 3) & 7) | rex_r;
3441
            gen_op_mov_reg_T0(OT_LONG, reg);
3442
            break;
3443
        default:
3444
            goto illegal_op;
3445
        }
3446
    } else {
3447
        /* generic MMX or SSE operation */
3448
        switch(b) {
3449
        case 0x70: /* pshufx insn */
3450
        case 0xc6: /* pshufx insn */
3451
        case 0xc2: /* compare insns */
3452
            s->rip_offset = 1;
3453
            break;
3454
        default:
3455
            break;
3456
        }
3457
        if (is_xmm) {
3458
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3459
            if (mod != 3) {
3460
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3461
                op2_offset = offsetof(CPUX86State,xmm_t0);
3462
                if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3463
                                b == 0xc2)) {
3464
                    /* specific case for SSE single instructions */
3465
                    if (b1 == 2) {
3466
                        /* 32 bit access */
3467
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3468
                        gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3469
                    } else {
3470
                        /* 64 bit access */
3471
                        gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3472
                    }
3473
                } else {
3474
                    gen_ldo_env_A0(s->mem_index, op2_offset);
3475
                }
3476
            } else {
3477
                rm = (modrm & 7) | REX_B(s);
3478
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3479
            }
3480
        } else {
3481
            op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3482
            if (mod != 3) {
3483
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3484
                op2_offset = offsetof(CPUX86State,mmx_t0);
3485
                gen_ldq_env_A0(s->mem_index, op2_offset);
3486
            } else {
3487
                rm = (modrm & 7);
3488
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3489
            }
3490
        }
3491
        switch(b) {
3492
        case 0x0f: /* 3DNow! data insns */
3493
            if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3494
                goto illegal_op;
3495
            val = ldub_code(s->pc++);
3496
            sse_op2 = sse_op_table5[val];
3497
            if (!sse_op2)
3498
                goto illegal_op;
3499
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3500
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3501
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3502
            break;
3503
        case 0x70: /* pshufx insn */
3504
        case 0xc6: /* pshufx insn */
3505
            val = ldub_code(s->pc++);
3506
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3507
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3508
            tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3509
            break;
3510
        case 0xc2:
3511
            /* compare insns */
3512
            val = ldub_code(s->pc++);
3513
            if (val >= 8)
3514
                goto illegal_op;
3515
            sse_op2 = sse_op_table4[val][b1];
3516
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3517
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3518
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3519
            break;
3520
        case 0xf7:
3521
            /* maskmov : we must prepare A0 */
3522
            if (mod != 3)
3523
                goto illegal_op;
3524
#ifdef TARGET_X86_64
3525
            if (s->aflag == 2) {
3526
                gen_op_movq_A0_reg(R_EDI);
3527
            } else
3528
#endif
3529
            {
3530
                gen_op_movl_A0_reg(R_EDI);
3531
                if (s->aflag == 0)
3532
                    gen_op_andl_A0_ffff();
3533
            }
3534
            gen_add_A0_ds_seg(s);
3535

    
3536
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3537
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3538
            tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
3539
            break;
3540
        default:
3541
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3542
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3543
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3544
            break;
3545
        }
3546
        if (b == 0x2e || b == 0x2f) {
3547
            /* just to keep the EFLAGS optimization correct */
3548
            gen_op_com_dummy();
3549
            s->cc_op = CC_OP_EFLAGS;
3550
        }
3551
    }
3552
}
3553

    
3554
/* convert one instruction. s->is_jmp is set if the translation must
3555
   be stopped. Return the next pc value */
3556
static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3557
{
3558
    int b, prefixes, aflag, dflag;
3559
    int shift, ot;
3560
    int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3561
    target_ulong next_eip, tval;
3562
    int rex_w, rex_r;
3563

    
3564
    s->pc = pc_start;
3565
    prefixes = 0;
3566
    aflag = s->code32;
3567
    dflag = s->code32;
3568
    s->override = -1;
3569
    rex_w = -1;
3570
    rex_r = 0;
3571
#ifdef TARGET_X86_64
3572
    s->rex_x = 0;
3573
    s->rex_b = 0;
3574
    x86_64_hregs = 0;
3575
#endif
3576
    s->rip_offset = 0; /* for relative ip address */
3577
 next_byte:
3578
    b = ldub_code(s->pc);
3579
    s->pc++;
3580
    /* check prefixes */
3581
#ifdef TARGET_X86_64
3582
    if (CODE64(s)) {
3583
        switch (b) {
3584
        case 0xf3:
3585
            prefixes |= PREFIX_REPZ;
3586
            goto next_byte;
3587
        case 0xf2:
3588
            prefixes |= PREFIX_REPNZ;
3589
            goto next_byte;
3590
        case 0xf0:
3591
            prefixes |= PREFIX_LOCK;
3592
            goto next_byte;
3593
        case 0x2e:
3594
            s->override = R_CS;
3595
            goto next_byte;
3596
        case 0x36:
3597
            s->override = R_SS;
3598
            goto next_byte;
3599
        case 0x3e:
3600
            s->override = R_DS;
3601
            goto next_byte;
3602
        case 0x26:
3603
            s->override = R_ES;
3604
            goto next_byte;
3605
        case 0x64:
3606
            s->override = R_FS;
3607
            goto next_byte;
3608
        case 0x65:
3609
            s->override = R_GS;
3610
            goto next_byte;
3611
        case 0x66:
3612
            prefixes |= PREFIX_DATA;
3613
            goto next_byte;
3614
        case 0x67:
3615
            prefixes |= PREFIX_ADR;
3616
            goto next_byte;
3617
        case 0x40 ... 0x4f:
3618
            /* REX prefix */
3619
            rex_w = (b >> 3) & 1;
3620
            rex_r = (b & 0x4) << 1;
3621
            s->rex_x = (b & 0x2) << 2;
3622
            REX_B(s) = (b & 0x1) << 3;
3623
            x86_64_hregs = 1; /* select uniform byte register addressing */
3624
            goto next_byte;
3625
        }
3626
        if (rex_w == 1) {
3627
            /* 0x66 is ignored if rex.w is set */
3628
            dflag = 2;
3629
        } else {
3630
            if (prefixes & PREFIX_DATA)
3631
                dflag ^= 1;
3632
        }
3633
        if (!(prefixes & PREFIX_ADR))
3634
            aflag = 2;
3635
    } else
3636
#endif
3637
    {
3638
        switch (b) {
3639
        case 0xf3:
3640
            prefixes |= PREFIX_REPZ;
3641
            goto next_byte;
3642
        case 0xf2:
3643
            prefixes |= PREFIX_REPNZ;
3644
            goto next_byte;
3645
        case 0xf0:
3646
            prefixes |= PREFIX_LOCK;
3647
            goto next_byte;
3648
        case 0x2e:
3649
            s->override = R_CS;
3650
            goto next_byte;
3651
        case 0x36:
3652
            s->override = R_SS;
3653
            goto next_byte;
3654
        case 0x3e:
3655
            s->override = R_DS;
3656
            goto next_byte;
3657
        case 0x26:
3658
            s->override = R_ES;
3659
            goto next_byte;
3660
        case 0x64:
3661
            s->override = R_FS;
3662
            goto next_byte;
3663
        case 0x65:
3664
            s->override = R_GS;
3665
            goto next_byte;
3666
        case 0x66:
3667
            prefixes |= PREFIX_DATA;
3668
            goto next_byte;
3669
        case 0x67:
3670
            prefixes |= PREFIX_ADR;
3671
            goto next_byte;
3672
        }
3673
        if (prefixes & PREFIX_DATA)
3674
            dflag ^= 1;
3675
        if (prefixes & PREFIX_ADR)
3676
            aflag ^= 1;
3677
    }
3678

    
3679
    s->prefix = prefixes;
3680
    s->aflag = aflag;
3681
    s->dflag = dflag;
3682

    
3683
    /* lock generation */
3684
    if (prefixes & PREFIX_LOCK)
3685
        tcg_gen_helper_0_0(helper_lock);
3686

    
3687
    /* now check op code */
3688
 reswitch:
3689
    switch(b) {
3690
    case 0x0f:
3691
        /**************************/
3692
        /* extended op code */
3693
        b = ldub_code(s->pc++) | 0x100;
3694
        goto reswitch;
3695

    
3696
        /**************************/
3697
        /* arith & logic */
3698
    case 0x00 ... 0x05:
3699
    case 0x08 ... 0x0d:
3700
    case 0x10 ... 0x15:
3701
    case 0x18 ... 0x1d:
3702
    case 0x20 ... 0x25:
3703
    case 0x28 ... 0x2d:
3704
    case 0x30 ... 0x35:
3705
    case 0x38 ... 0x3d:
3706
        {
3707
            int op, f, val;
3708
            op = (b >> 3) & 7;
3709
            f = (b >> 1) & 3;
3710

    
3711
            if ((b & 1) == 0)
3712
                ot = OT_BYTE;
3713
            else
3714
                ot = dflag + OT_WORD;
3715

    
3716
            switch(f) {
3717
            case 0: /* OP Ev, Gv */
3718
                modrm = ldub_code(s->pc++);
3719
                reg = ((modrm >> 3) & 7) | rex_r;
3720
                mod = (modrm >> 6) & 3;
3721
                rm = (modrm & 7) | REX_B(s);
3722
                if (mod != 3) {
3723
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3724
                    opreg = OR_TMP0;
3725
                } else if (op == OP_XORL && rm == reg) {
3726
                xor_zero:
3727
                    /* xor reg, reg optimisation */
3728
                    gen_op_movl_T0_0();
3729
                    s->cc_op = CC_OP_LOGICB + ot;
3730
                    gen_op_mov_reg_T0(ot, reg);
3731
                    gen_op_update1_cc();
3732
                    break;
3733
                } else {
3734
                    opreg = rm;
3735
                }
3736
                gen_op_mov_TN_reg(ot, 1, reg);
3737
                gen_op(s, op, ot, opreg);
3738
                break;
3739
            case 1: /* OP Gv, Ev */
3740
                modrm = ldub_code(s->pc++);
3741
                mod = (modrm >> 6) & 3;
3742
                reg = ((modrm >> 3) & 7) | rex_r;
3743
                rm = (modrm & 7) | REX_B(s);
3744
                if (mod != 3) {
3745
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3746
                    gen_op_ld_T1_A0(ot + s->mem_index);
3747
                } else if (op == OP_XORL && rm == reg) {
3748
                    goto xor_zero;
3749
                } else {
3750
                    gen_op_mov_TN_reg(ot, 1, rm);
3751
                }
3752
                gen_op(s, op, ot, reg);
3753
                break;
3754
            case 2: /* OP A, Iv */
3755
                val = insn_get(s, ot);
3756
                gen_op_movl_T1_im(val);
3757
                gen_op(s, op, ot, OR_EAX);
3758
                break;
3759
            }
3760
        }
3761
        break;
3762

    
3763
    case 0x80: /* GRP1 */
3764
    case 0x81:
3765
    case 0x82:
3766
    case 0x83:
3767
        {
3768
            int val;
3769

    
3770
            if ((b & 1) == 0)
3771
                ot = OT_BYTE;
3772
            else
3773
                ot = dflag + OT_WORD;
3774

    
3775
            modrm = ldub_code(s->pc++);
3776
            mod = (modrm >> 6) & 3;
3777
            rm = (modrm & 7) | REX_B(s);
3778
            op = (modrm >> 3) & 7;
3779

    
3780
            if (mod != 3) {
3781
                if (b == 0x83)
3782
                    s->rip_offset = 1;
3783
                else
3784
                    s->rip_offset = insn_const_size(ot);
3785
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3786
                opreg = OR_TMP0;
3787
            } else {
3788
                opreg = rm;
3789
            }
3790

    
3791
            switch(b) {
3792
            default:
3793
            case 0x80:
3794
            case 0x81:
3795
            case 0x82:
3796
                val = insn_get(s, ot);
3797
                break;
3798
            case 0x83:
3799
                val = (int8_t)insn_get(s, OT_BYTE);
3800
                break;
3801
            }
3802
            gen_op_movl_T1_im(val);
3803
            gen_op(s, op, ot, opreg);
3804
        }
3805
        break;
3806

    
3807
        /**************************/
3808
        /* inc, dec, and other misc arith */
3809
    case 0x40 ... 0x47: /* inc Gv */
3810
        ot = dflag ? OT_LONG : OT_WORD;
3811
        gen_inc(s, ot, OR_EAX + (b & 7), 1);
3812
        break;
3813
    case 0x48 ... 0x4f: /* dec Gv */
3814
        ot = dflag ? OT_LONG : OT_WORD;
3815
        gen_inc(s, ot, OR_EAX + (b & 7), -1);
3816
        break;
3817
    case 0xf6: /* GRP3 */
3818
    case 0xf7:
3819
        if ((b & 1) == 0)
3820
            ot = OT_BYTE;
3821
        else
3822
            ot = dflag + OT_WORD;
3823

    
3824
        modrm = ldub_code(s->pc++);
3825
        mod = (modrm >> 6) & 3;
3826
        rm = (modrm & 7) | REX_B(s);
3827
        op = (modrm >> 3) & 7;
3828
        if (mod != 3) {
3829
            if (op == 0)
3830
                s->rip_offset = insn_const_size(ot);
3831
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3832
            gen_op_ld_T0_A0(ot + s->mem_index);
3833
        } else {
3834
            gen_op_mov_TN_reg(ot, 0, rm);
3835
        }
3836

    
3837
        switch(op) {
3838
        case 0: /* test */
3839
            val = insn_get(s, ot);
3840
            gen_op_movl_T1_im(val);
3841
            gen_op_testl_T0_T1_cc();
3842
            s->cc_op = CC_OP_LOGICB + ot;
3843
            break;
3844
        case 2: /* not */
3845
            tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
3846
            if (mod != 3) {
3847
                gen_op_st_T0_A0(ot + s->mem_index);
3848
            } else {
3849
                gen_op_mov_reg_T0(ot, rm);
3850
            }
3851
            break;
3852
        case 3: /* neg */
3853
            tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
3854
            if (mod != 3) {
3855
                gen_op_st_T0_A0(ot + s->mem_index);
3856
            } else {
3857
                gen_op_mov_reg_T0(ot, rm);
3858
            }
3859
            gen_op_update_neg_cc();
3860
            s->cc_op = CC_OP_SUBB + ot;
3861
            break;
3862
        case 4: /* mul */
3863
            switch(ot) {
3864
            case OT_BYTE:
3865
                gen_op_mulb_AL_T0();
3866
                s->cc_op = CC_OP_MULB;
3867
                break;
3868
            case OT_WORD:
3869
                gen_op_mulw_AX_T0();
3870
                s->cc_op = CC_OP_MULW;
3871
                break;
3872
            default:
3873
            case OT_LONG:
3874
                gen_op_mull_EAX_T0();
3875
                s->cc_op = CC_OP_MULL;
3876
                break;
3877
#ifdef TARGET_X86_64
3878
            case OT_QUAD:
3879
                gen_op_mulq_EAX_T0();
3880
                s->cc_op = CC_OP_MULQ;
3881
                break;
3882
#endif
3883
            }
3884
            break;
3885
        case 5: /* imul */
3886
            switch(ot) {
3887
            case OT_BYTE:
3888
                gen_op_imulb_AL_T0();
3889
                s->cc_op = CC_OP_MULB;
3890
                break;
3891
            case OT_WORD:
3892
                gen_op_imulw_AX_T0();
3893
                s->cc_op = CC_OP_MULW;
3894
                break;
3895
            default:
3896
            case OT_LONG:
3897
                gen_op_imull_EAX_T0();
3898
                s->cc_op = CC_OP_MULL;
3899
                break;
3900
#ifdef TARGET_X86_64
3901
            case OT_QUAD:
3902
                gen_op_imulq_EAX_T0();
3903
                s->cc_op = CC_OP_MULQ;
3904
                break;
3905
#endif
3906
            }
3907
            break;
3908
        case 6: /* div */
3909
            switch(ot) {
3910
            case OT_BYTE:
3911
                gen_jmp_im(pc_start - s->cs_base);
3912
                tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
3913
                break;
3914
            case OT_WORD:
3915
                gen_jmp_im(pc_start - s->cs_base);
3916
                tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
3917
                break;
3918
            default:
3919
            case OT_LONG:
3920
                gen_jmp_im(pc_start - s->cs_base);
3921
                tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
3922
                break;
3923
#ifdef TARGET_X86_64
3924
            case OT_QUAD:
3925
                gen_jmp_im(pc_start - s->cs_base);
3926
                tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
3927
                break;
3928
#endif
3929
            }
3930
            break;
3931
        case 7: /* idiv */
3932
            switch(ot) {
3933
            case OT_BYTE:
3934
                gen_jmp_im(pc_start - s->cs_base);
3935
                tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
3936
                break;
3937
            case OT_WORD:
3938
                gen_jmp_im(pc_start - s->cs_base);
3939
                tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
3940
                break;
3941
            default:
3942
            case OT_LONG:
3943
                gen_jmp_im(pc_start - s->cs_base);
3944
                tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
3945
                break;
3946
#ifdef TARGET_X86_64
3947
            case OT_QUAD:
3948
                gen_jmp_im(pc_start - s->cs_base);
3949
                tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
3950
                break;
3951
#endif
3952
            }
3953
            break;
3954
        default:
3955
            goto illegal_op;
3956
        }
3957
        break;
3958

    
3959
    case 0xfe: /* GRP4 */
3960
    case 0xff: /* GRP5 */
3961
        if ((b & 1) == 0)
3962
            ot = OT_BYTE;
3963
        else
3964
            ot = dflag + OT_WORD;
3965

    
3966
        modrm = ldub_code(s->pc++);
3967
        mod = (modrm >> 6) & 3;
3968
        rm = (modrm & 7) | REX_B(s);
3969
        op = (modrm >> 3) & 7;
3970
        if (op >= 2 && b == 0xfe) {
3971
            goto illegal_op;
3972
        }
3973
        if (CODE64(s)) {
3974
            if (op == 2 || op == 4) {
3975
                /* operand size for jumps is 64 bit */
3976
                ot = OT_QUAD;
3977
            } else if (op == 3 || op == 5) {
3978
                /* for call calls, the operand is 16 or 32 bit, even
3979
                   in long mode */
3980
                ot = dflag ? OT_LONG : OT_WORD;
3981
            } else if (op == 6) {
3982
                /* default push size is 64 bit */
3983
                ot = dflag ? OT_QUAD : OT_WORD;
3984
            }
3985
        }
3986
        if (mod != 3) {
3987
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3988
            if (op >= 2 && op != 3 && op != 5)
3989
                gen_op_ld_T0_A0(ot + s->mem_index);
3990
        } else {
3991
            gen_op_mov_TN_reg(ot, 0, rm);
3992
        }
3993

    
3994
        switch(op) {
3995
        case 0: /* inc Ev */
3996
            if (mod != 3)
3997
                opreg = OR_TMP0;
3998
            else
3999
                opreg = rm;
4000
            gen_inc(s, ot, opreg, 1);
4001
            break;
4002
        case 1: /* dec Ev */
4003
            if (mod != 3)
4004
                opreg = OR_TMP0;
4005
            else
4006
                opreg = rm;
4007
            gen_inc(s, ot, opreg, -1);
4008
            break;
4009
        case 2: /* call Ev */
4010
            /* XXX: optimize if memory (no 'and' is necessary) */
4011
            if (s->dflag == 0)
4012
                gen_op_andl_T0_ffff();
4013
            next_eip = s->pc - s->cs_base;
4014
            gen_movtl_T1_im(next_eip);
4015
            gen_push_T1(s);
4016
            gen_op_jmp_T0();
4017
            gen_eob(s);
4018
            break;
4019
        case 3: /* lcall Ev */
4020
            gen_op_ld_T1_A0(ot + s->mem_index);
4021
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4022
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4023
        do_lcall:
4024
            if (s->pe && !s->vm86) {
4025
                if (s->cc_op != CC_OP_DYNAMIC)
4026
                    gen_op_set_cc_op(s->cc_op);
4027
                gen_jmp_im(pc_start - s->cs_base);
4028
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4029
                tcg_gen_helper_0_4(helper_lcall_protected,
4030
                                   cpu_tmp2_i32, cpu_T[1],
4031
                                   tcg_const_i32(dflag), 
4032
                                   tcg_const_i32(s->pc - pc_start));
4033
            } else {
4034
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4035
                tcg_gen_helper_0_4(helper_lcall_real,
4036
                                   cpu_tmp2_i32, cpu_T[1],
4037
                                   tcg_const_i32(dflag), 
4038
                                   tcg_const_i32(s->pc - s->cs_base));
4039
            }
4040
            gen_eob(s);
4041
            break;
4042
        case 4: /* jmp Ev */
4043
            if (s->dflag == 0)
4044
                gen_op_andl_T0_ffff();
4045
            gen_op_jmp_T0();
4046
            gen_eob(s);
4047
            break;
4048
        case 5: /* ljmp Ev */
4049
            gen_op_ld_T1_A0(ot + s->mem_index);
4050
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4051
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4052
        do_ljmp:
4053
            if (s->pe && !s->vm86) {
4054
                if (s->cc_op != CC_OP_DYNAMIC)
4055
                    gen_op_set_cc_op(s->cc_op);
4056
                gen_jmp_im(pc_start - s->cs_base);
4057
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4058
                tcg_gen_helper_0_3(helper_ljmp_protected,
4059
                                   cpu_tmp2_i32,
4060
                                   cpu_T[1],
4061
                                   tcg_const_i32(s->pc - pc_start));
4062
            } else {
4063
                gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
4064
                gen_op_movl_T0_T1();
4065
                gen_op_jmp_T0();
4066
            }
4067
            gen_eob(s);
4068
            break;
4069
        case 6: /* push Ev */
4070
            gen_push_T0(s);
4071
            break;
4072
        default:
4073
            goto illegal_op;
4074
        }
4075
        break;
4076

    
4077
    case 0x84: /* test Ev, Gv */
4078
    case 0x85:
4079
        if ((b & 1) == 0)
4080
            ot = OT_BYTE;
4081
        else
4082
            ot = dflag + OT_WORD;
4083

    
4084
        modrm = ldub_code(s->pc++);
4085
        mod = (modrm >> 6) & 3;
4086
        rm = (modrm & 7) | REX_B(s);
4087
        reg = ((modrm >> 3) & 7) | rex_r;
4088

    
4089
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4090
        gen_op_mov_TN_reg(ot, 1, reg);
4091
        gen_op_testl_T0_T1_cc();
4092
        s->cc_op = CC_OP_LOGICB + ot;
4093
        break;
4094

    
4095
    case 0xa8: /* test eAX, Iv */
4096
    case 0xa9:
4097
        if ((b & 1) == 0)
4098
            ot = OT_BYTE;
4099
        else
4100
            ot = dflag + OT_WORD;
4101
        val = insn_get(s, ot);
4102

    
4103
        gen_op_mov_TN_reg(ot, 0, OR_EAX);
4104
        gen_op_movl_T1_im(val);
4105
        gen_op_testl_T0_T1_cc();
4106
        s->cc_op = CC_OP_LOGICB + ot;
4107
        break;
4108

    
4109
    case 0x98: /* CWDE/CBW */
4110
#ifdef TARGET_X86_64
4111
        if (dflag == 2) {
4112
            gen_op_movslq_RAX_EAX();
4113
        } else
4114
#endif
4115
        if (dflag == 1)
4116
            gen_op_movswl_EAX_AX();
4117
        else
4118
            gen_op_movsbw_AX_AL();
4119
        break;
4120
    case 0x99: /* CDQ/CWD */
4121
#ifdef TARGET_X86_64
4122
        if (dflag == 2) {
4123
            gen_op_movsqo_RDX_RAX();
4124
        } else
4125
#endif
4126
        if (dflag == 1)
4127
            gen_op_movslq_EDX_EAX();
4128
        else
4129
            gen_op_movswl_DX_AX();
4130
        break;
4131
    case 0x1af: /* imul Gv, Ev */
4132
    case 0x69: /* imul Gv, Ev, I */
4133
    case 0x6b:
4134
        ot = dflag + OT_WORD;
4135
        modrm = ldub_code(s->pc++);
4136
        reg = ((modrm >> 3) & 7) | rex_r;
4137
        if (b == 0x69)
4138
            s->rip_offset = insn_const_size(ot);
4139
        else if (b == 0x6b)
4140
            s->rip_offset = 1;
4141
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4142
        if (b == 0x69) {
4143
            val = insn_get(s, ot);
4144
            gen_op_movl_T1_im(val);
4145
        } else if (b == 0x6b) {
4146
            val = (int8_t)insn_get(s, OT_BYTE);
4147
            gen_op_movl_T1_im(val);
4148
        } else {
4149
            gen_op_mov_TN_reg(ot, 1, reg);
4150
        }
4151

    
4152
#ifdef TARGET_X86_64
4153
        if (ot == OT_QUAD) {
4154
            gen_op_imulq_T0_T1();
4155
        } else
4156
#endif
4157
        if (ot == OT_LONG) {
4158
            gen_op_imull_T0_T1();
4159
        } else {
4160
            gen_op_imulw_T0_T1();
4161
        }
4162
        gen_op_mov_reg_T0(ot, reg);
4163
        s->cc_op = CC_OP_MULB + ot;
4164
        break;
4165
    case 0x1c0:
4166
    case 0x1c1: /* xadd Ev, Gv */
4167
        if ((b & 1) == 0)
4168
            ot = OT_BYTE;
4169
        else
4170
            ot = dflag + OT_WORD;
4171
        modrm = ldub_code(s->pc++);
4172
        reg = ((modrm >> 3) & 7) | rex_r;
4173
        mod = (modrm >> 6) & 3;
4174
        if (mod == 3) {
4175
            rm = (modrm & 7) | REX_B(s);
4176
            gen_op_mov_TN_reg(ot, 0, reg);
4177
            gen_op_mov_TN_reg(ot, 1, rm);
4178
            gen_op_addl_T0_T1();
4179
            gen_op_mov_reg_T1(ot, reg);
4180
            gen_op_mov_reg_T0(ot, rm);
4181
        } else {
4182
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4183
            gen_op_mov_TN_reg(ot, 0, reg);
4184
            gen_op_ld_T1_A0(ot + s->mem_index);
4185
            gen_op_addl_T0_T1();
4186
            gen_op_st_T0_A0(ot + s->mem_index);
4187
            gen_op_mov_reg_T1(ot, reg);
4188
        }
4189
        gen_op_update2_cc();
4190
        s->cc_op = CC_OP_ADDB + ot;
4191
        break;
4192
    case 0x1b0:
4193
    case 0x1b1: /* cmpxchg Ev, Gv */
4194
        {
4195
            int label1;
4196

    
4197
            if ((b & 1) == 0)
4198
                ot = OT_BYTE;
4199
            else
4200
                ot = dflag + OT_WORD;
4201
            modrm = ldub_code(s->pc++);
4202
            reg = ((modrm >> 3) & 7) | rex_r;
4203
            mod = (modrm >> 6) & 3;
4204
            gen_op_mov_TN_reg(ot, 1, reg);
4205
            if (mod == 3) {
4206
                rm = (modrm & 7) | REX_B(s);
4207
                gen_op_mov_TN_reg(ot, 0, rm);
4208
            } else {
4209
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4210
                gen_op_ld_T0_A0(ot + s->mem_index);
4211
                rm = 0; /* avoid warning */
4212
            }
4213
            label1 = gen_new_label();
4214
            tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_EAX]));
4215
            tcg_gen_sub_tl(cpu_T3, cpu_T3, cpu_T[0]);
4216
            gen_extu(ot, cpu_T3);
4217
            tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
4218
            tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
4219
            gen_op_mov_reg_T0(ot, R_EAX);
4220
            gen_set_label(label1);
4221
            if (mod == 3) {
4222
                gen_op_mov_reg_T1(ot, rm);
4223
            } else {
4224
                gen_op_st_T1_A0(ot + s->mem_index);
4225
            }
4226
            tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4227
            tcg_gen_mov_tl(cpu_cc_dst, cpu_T3);
4228
            s->cc_op = CC_OP_SUBB + ot;
4229
        }
4230
        break;
4231
    case 0x1c7: /* cmpxchg8b */
4232
        modrm = ldub_code(s->pc++);
4233
        mod = (modrm >> 6) & 3;
4234
        if ((mod == 3) || ((modrm & 0x38) != 0x8))
4235
            goto illegal_op;
4236
        gen_jmp_im(pc_start - s->cs_base);
4237
        if (s->cc_op != CC_OP_DYNAMIC)
4238
            gen_op_set_cc_op(s->cc_op);
4239
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4240
        gen_op_cmpxchg8b();
4241
        s->cc_op = CC_OP_EFLAGS;
4242
        break;
4243

    
4244
        /**************************/
4245
        /* push/pop */
4246
    case 0x50 ... 0x57: /* push */
4247
        gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
4248
        gen_push_T0(s);
4249
        break;
4250
    case 0x58 ... 0x5f: /* pop */
4251
        if (CODE64(s)) {
4252
            ot = dflag ? OT_QUAD : OT_WORD;
4253
        } else {
4254
            ot = dflag + OT_WORD;
4255
        }
4256
        gen_pop_T0(s);
4257
        /* NOTE: order is important for pop %sp */
4258
        gen_pop_update(s);
4259
        gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
4260
        break;
4261
    case 0x60: /* pusha */
4262
        if (CODE64(s))
4263
            goto illegal_op;
4264
        gen_pusha(s);
4265
        break;
4266
    case 0x61: /* popa */
4267
        if (CODE64(s))
4268
            goto illegal_op;
4269
        gen_popa(s);
4270
        break;
4271
    case 0x68: /* push Iv */
4272
    case 0x6a:
4273
        if (CODE64(s)) {
4274
            ot = dflag ? OT_QUAD : OT_WORD;
4275
        } else {
4276
            ot = dflag + OT_WORD;
4277
        }
4278
        if (b == 0x68)
4279
            val = insn_get(s, ot);
4280
        else
4281
            val = (int8_t)insn_get(s, OT_BYTE);
4282
        gen_op_movl_T0_im(val);
4283
        gen_push_T0(s);
4284
        break;
4285
    case 0x8f: /* pop Ev */
4286
        if (CODE64(s)) {
4287
            ot = dflag ? OT_QUAD : OT_WORD;
4288
        } else {
4289
            ot = dflag + OT_WORD;
4290
        }
4291
        modrm = ldub_code(s->pc++);
4292
        mod = (modrm >> 6) & 3;
4293
        gen_pop_T0(s);
4294
        if (mod == 3) {
4295
            /* NOTE: order is important for pop %sp */
4296
            gen_pop_update(s);
4297
            rm = (modrm & 7) | REX_B(s);
4298
            gen_op_mov_reg_T0(ot, rm);
4299
        } else {
4300
            /* NOTE: order is important too for MMU exceptions */
4301
            s->popl_esp_hack = 1 << ot;
4302
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4303
            s->popl_esp_hack = 0;
4304
            gen_pop_update(s);
4305
        }
4306
        break;
4307
    case 0xc8: /* enter */
4308
        {
4309
            int level;
4310
            val = lduw_code(s->pc);
4311
            s->pc += 2;
4312
            level = ldub_code(s->pc++);
4313
            gen_enter(s, val, level);
4314
        }
4315
        break;
4316
    case 0xc9: /* leave */
4317
        /* XXX: exception not precise (ESP is updated before potential exception) */
4318
        if (CODE64(s)) {
4319
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4320
            gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4321
        } else if (s->ss32) {
4322
            gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4323
            gen_op_mov_reg_T0(OT_LONG, R_ESP);
4324
        } else {
4325
            gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4326
            gen_op_mov_reg_T0(OT_WORD, R_ESP);
4327
        }
4328
        gen_pop_T0(s);
4329
        if (CODE64(s)) {
4330
            ot = dflag ? OT_QUAD : OT_WORD;
4331
        } else {
4332
            ot = dflag + OT_WORD;
4333
        }
4334
        gen_op_mov_reg_T0(ot, R_EBP);
4335
        gen_pop_update(s);
4336
        break;
4337
    case 0x06: /* push es */
4338
    case 0x0e: /* push cs */
4339
    case 0x16: /* push ss */
4340
    case 0x1e: /* push ds */
4341
        if (CODE64(s))
4342
            goto illegal_op;
4343
        gen_op_movl_T0_seg(b >> 3);
4344
        gen_push_T0(s);
4345
        break;
4346
    case 0x1a0: /* push fs */
4347
    case 0x1a8: /* push gs */
4348
        gen_op_movl_T0_seg((b >> 3) & 7);
4349
        gen_push_T0(s);
4350
        break;
4351
    case 0x07: /* pop es */
4352
    case 0x17: /* pop ss */
4353
    case 0x1f: /* pop ds */
4354
        if (CODE64(s))
4355
            goto illegal_op;
4356
        reg = b >> 3;
4357
        gen_pop_T0(s);
4358
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4359
        gen_pop_update(s);
4360
        if (reg == R_SS) {
4361
            /* if reg == SS, inhibit interrupts/trace. */
4362
            /* If several instructions disable interrupts, only the
4363
               _first_ does it */
4364
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4365
                tcg_gen_helper_0_0(helper_set_inhibit_irq);
4366
            s->tf = 0;
4367
        }
4368
        if (s->is_jmp) {
4369
            gen_jmp_im(s->pc - s->cs_base);
4370
            gen_eob(s);
4371
        }
4372
        break;
4373
    case 0x1a1: /* pop fs */
4374
    case 0x1a9: /* pop gs */
4375
        gen_pop_T0(s);
4376
        gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4377
        gen_pop_update(s);
4378
        if (s->is_jmp) {
4379
            gen_jmp_im(s->pc - s->cs_base);
4380
            gen_eob(s);
4381
        }
4382
        break;
4383

    
4384
        /**************************/
4385
        /* mov */
4386
    case 0x88:
4387
    case 0x89: /* mov Gv, Ev */
4388
        if ((b & 1) == 0)
4389
            ot = OT_BYTE;
4390
        else
4391
            ot = dflag + OT_WORD;
4392
        modrm = ldub_code(s->pc++);
4393
        reg = ((modrm >> 3) & 7) | rex_r;
4394

    
4395
        /* generate a generic store */
4396
        gen_ldst_modrm(s, modrm, ot, reg, 1);
4397
        break;
4398
    case 0xc6:
4399
    case 0xc7: /* mov Ev, Iv */
4400
        if ((b & 1) == 0)
4401
            ot = OT_BYTE;
4402
        else
4403
            ot = dflag + OT_WORD;
4404
        modrm = ldub_code(s->pc++);
4405
        mod = (modrm >> 6) & 3;
4406
        if (mod != 3) {
4407
            s->rip_offset = insn_const_size(ot);
4408
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4409
        }
4410
        val = insn_get(s, ot);
4411
        gen_op_movl_T0_im(val);
4412
        if (mod != 3)
4413
            gen_op_st_T0_A0(ot + s->mem_index);
4414
        else
4415
            gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4416
        break;
4417
    case 0x8a:
4418
    case 0x8b: /* mov Ev, Gv */
4419
        if ((b & 1) == 0)
4420
            ot = OT_BYTE;
4421
        else
4422
            ot = OT_WORD + dflag;
4423
        modrm = ldub_code(s->pc++);
4424
        reg = ((modrm >> 3) & 7) | rex_r;
4425

    
4426
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4427
        gen_op_mov_reg_T0(ot, reg);
4428
        break;
4429
    case 0x8e: /* mov seg, Gv */
4430
        modrm = ldub_code(s->pc++);
4431
        reg = (modrm >> 3) & 7;
4432
        if (reg >= 6 || reg == R_CS)
4433
            goto illegal_op;
4434
        gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4435
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4436
        if (reg == R_SS) {
4437
            /* if reg == SS, inhibit interrupts/trace */
4438
            /* If several instructions disable interrupts, only the
4439
               _first_ does it */
4440
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4441
                tcg_gen_helper_0_0(helper_set_inhibit_irq);
4442
            s->tf = 0;
4443
        }
4444
        if (s->is_jmp) {
4445
            gen_jmp_im(s->pc - s->cs_base);
4446
            gen_eob(s);
4447
        }
4448
        break;
4449
    case 0x8c: /* mov Gv, seg */
4450
        modrm = ldub_code(s->pc++);
4451
        reg = (modrm >> 3) & 7;
4452
        mod = (modrm >> 6) & 3;
4453
        if (reg >= 6)
4454
            goto illegal_op;
4455
        gen_op_movl_T0_seg(reg);
4456
        if (mod == 3)
4457
            ot = OT_WORD + dflag;
4458
        else
4459
            ot = OT_WORD;
4460
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4461
        break;
4462

    
4463
    case 0x1b6: /* movzbS Gv, Eb */
4464
    case 0x1b7: /* movzwS Gv, Eb */
4465
    case 0x1be: /* movsbS Gv, Eb */
4466
    case 0x1bf: /* movswS Gv, Eb */
4467
        {
4468
            int d_ot;
4469
            /* d_ot is the size of destination */
4470
            d_ot = dflag + OT_WORD;
4471
            /* ot is the size of source */
4472
            ot = (b & 1) + OT_BYTE;
4473
            modrm = ldub_code(s->pc++);
4474
            reg = ((modrm >> 3) & 7) | rex_r;
4475
            mod = (modrm >> 6) & 3;
4476
            rm = (modrm & 7) | REX_B(s);
4477

    
4478
            if (mod == 3) {
4479
                gen_op_mov_TN_reg(ot, 0, rm);
4480
                switch(ot | (b & 8)) {
4481
                case OT_BYTE:
4482
                    gen_op_movzbl_T0_T0();
4483
                    break;
4484
                case OT_BYTE | 8:
4485
                    gen_op_movsbl_T0_T0();
4486
                    break;
4487
                case OT_WORD:
4488
                    gen_op_movzwl_T0_T0();
4489
                    break;
4490
                default:
4491
                case OT_WORD | 8:
4492
                    gen_op_movswl_T0_T0();
4493
                    break;
4494
                }
4495
                gen_op_mov_reg_T0(d_ot, reg);
4496
            } else {
4497
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4498
                if (b & 8) {
4499
                    gen_op_lds_T0_A0(ot + s->mem_index);
4500
                } else {
4501
                    gen_op_ldu_T0_A0(ot + s->mem_index);
4502
                }
4503
                gen_op_mov_reg_T0(d_ot, reg);
4504
            }
4505
        }
4506
        break;
4507

    
4508
    case 0x8d: /* lea */
4509
        ot = dflag + OT_WORD;
4510
        modrm = ldub_code(s->pc++);
4511
        mod = (modrm >> 6) & 3;
4512
        if (mod == 3)
4513
            goto illegal_op;
4514
        reg = ((modrm >> 3) & 7) | rex_r;
4515
        /* we must ensure that no segment is added */
4516
        s->override = -1;
4517
        val = s->addseg;
4518
        s->addseg = 0;
4519
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4520
        s->addseg = val;
4521
        gen_op_mov_reg_A0(ot - OT_WORD, reg);
4522
        break;
4523

    
4524
    case 0xa0: /* mov EAX, Ov */
4525
    case 0xa1:
4526
    case 0xa2: /* mov Ov, EAX */
4527
    case 0xa3:
4528
        {
4529
            target_ulong offset_addr;
4530

    
4531
            if ((b & 1) == 0)
4532
                ot = OT_BYTE;
4533
            else
4534
                ot = dflag + OT_WORD;
4535
#ifdef TARGET_X86_64
4536
            if (s->aflag == 2) {
4537
                offset_addr = ldq_code(s->pc);
4538
                s->pc += 8;
4539
                gen_op_movq_A0_im(offset_addr);
4540
            } else
4541
#endif
4542
            {
4543
                if (s->aflag) {
4544
                    offset_addr = insn_get(s, OT_LONG);
4545
                } else {
4546
                    offset_addr = insn_get(s, OT_WORD);
4547
                }
4548
                gen_op_movl_A0_im(offset_addr);
4549
            }
4550
            gen_add_A0_ds_seg(s);
4551
            if ((b & 2) == 0) {
4552
                gen_op_ld_T0_A0(ot + s->mem_index);
4553
                gen_op_mov_reg_T0(ot, R_EAX);
4554
            } else {
4555
                gen_op_mov_TN_reg(ot, 0, R_EAX);
4556
                gen_op_st_T0_A0(ot + s->mem_index);
4557
            }
4558
        }
4559
        break;
4560
    case 0xd7: /* xlat */
4561
#ifdef TARGET_X86_64
4562
        if (s->aflag == 2) {
4563
            gen_op_movq_A0_reg(R_EBX);
4564
            gen_op_addq_A0_AL();
4565
        } else
4566
#endif
4567
        {
4568
            gen_op_movl_A0_reg(R_EBX);
4569
            gen_op_addl_A0_AL();
4570
            if (s->aflag == 0)
4571
                gen_op_andl_A0_ffff();
4572
        }
4573
        gen_add_A0_ds_seg(s);
4574
        gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4575
        gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4576
        break;
4577
    case 0xb0 ... 0xb7: /* mov R, Ib */
4578
        val = insn_get(s, OT_BYTE);
4579
        gen_op_movl_T0_im(val);
4580
        gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4581
        break;
4582
    case 0xb8 ... 0xbf: /* mov R, Iv */
4583
#ifdef TARGET_X86_64
4584
        if (dflag == 2) {
4585
            uint64_t tmp;
4586
            /* 64 bit case */
4587
            tmp = ldq_code(s->pc);
4588
            s->pc += 8;
4589
            reg = (b & 7) | REX_B(s);
4590
            gen_movtl_T0_im(tmp);
4591
            gen_op_mov_reg_T0(OT_QUAD, reg);
4592
        } else
4593
#endif
4594
        {
4595
            ot = dflag ? OT_LONG : OT_WORD;
4596
            val = insn_get(s, ot);
4597
            reg = (b & 7) | REX_B(s);
4598
            gen_op_movl_T0_im(val);
4599
            gen_op_mov_reg_T0(ot, reg);
4600
        }
4601
        break;
4602

    
4603
    case 0x91 ... 0x97: /* xchg R, EAX */
4604
        ot = dflag + OT_WORD;
4605
        reg = (b & 7) | REX_B(s);
4606
        rm = R_EAX;
4607
        goto do_xchg_reg;
4608
    case 0x86:
4609
    case 0x87: /* xchg Ev, Gv */
4610
        if ((b & 1) == 0)
4611
            ot = OT_BYTE;
4612
        else
4613
            ot = dflag + OT_WORD;
4614
        modrm = ldub_code(s->pc++);
4615
        reg = ((modrm >> 3) & 7) | rex_r;
4616
        mod = (modrm >> 6) & 3;
4617
        if (mod == 3) {
4618
            rm = (modrm & 7) | REX_B(s);
4619
        do_xchg_reg:
4620
            gen_op_mov_TN_reg(ot, 0, reg);
4621
            gen_op_mov_TN_reg(ot, 1, rm);
4622
            gen_op_mov_reg_T0(ot, rm);
4623
            gen_op_mov_reg_T1(ot, reg);
4624
        } else {
4625
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4626
            gen_op_mov_TN_reg(ot, 0, reg);
4627
            /* for xchg, lock is implicit */
4628
            if (!(prefixes & PREFIX_LOCK))
4629
                tcg_gen_helper_0_0(helper_lock);
4630
            gen_op_ld_T1_A0(ot + s->mem_index);
4631
            gen_op_st_T0_A0(ot + s->mem_index);
4632
            if (!(prefixes & PREFIX_LOCK))
4633
                tcg_gen_helper_0_0(helper_unlock);
4634
            gen_op_mov_reg_T1(ot, reg);
4635
        }
4636
        break;
4637
    case 0xc4: /* les Gv */
4638
        if (CODE64(s))
4639
            goto illegal_op;
4640
        op = R_ES;
4641
        goto do_lxx;
4642
    case 0xc5: /* lds Gv */
4643
        if (CODE64(s))
4644
            goto illegal_op;
4645
        op = R_DS;
4646
        goto do_lxx;
4647
    case 0x1b2: /* lss Gv */
4648
        op = R_SS;
4649
        goto do_lxx;
4650
    case 0x1b4: /* lfs Gv */
4651
        op = R_FS;
4652
        goto do_lxx;
4653
    case 0x1b5: /* lgs Gv */
4654
        op = R_GS;
4655
    do_lxx:
4656
        ot = dflag ? OT_LONG : OT_WORD;
4657
        modrm = ldub_code(s->pc++);
4658
        reg = ((modrm >> 3) & 7) | rex_r;
4659
        mod = (modrm >> 6) & 3;
4660
        if (mod == 3)
4661
            goto illegal_op;
4662
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4663
        gen_op_ld_T1_A0(ot + s->mem_index);
4664
        gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4665
        /* load the segment first to handle exceptions properly */
4666
        gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4667
        gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4668
        /* then put the data */
4669
        gen_op_mov_reg_T1(ot, reg);
4670
        if (s->is_jmp) {
4671
            gen_jmp_im(s->pc - s->cs_base);
4672
            gen_eob(s);
4673
        }
4674
        break;
4675

    
4676
        /************************/
4677
        /* shifts */
4678
    case 0xc0:
4679
    case 0xc1:
4680
        /* shift Ev,Ib */
4681
        shift = 2;
4682
    grp2:
4683
        {
4684
            if ((b & 1) == 0)
4685
                ot = OT_BYTE;
4686
            else
4687
                ot = dflag + OT_WORD;
4688

    
4689
            modrm = ldub_code(s->pc++);
4690
            mod = (modrm >> 6) & 3;
4691
            op = (modrm >> 3) & 7;
4692

    
4693
            if (mod != 3) {
4694
                if (shift == 2) {
4695
                    s->rip_offset = 1;
4696
                }
4697
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4698
                opreg = OR_TMP0;
4699
            } else {
4700
                opreg = (modrm & 7) | REX_B(s);
4701
            }
4702

    
4703
            /* simpler op */
4704
            if (shift == 0) {
4705
                gen_shift(s, op, ot, opreg, OR_ECX);
4706
            } else {
4707
                if (shift == 2) {
4708
                    shift = ldub_code(s->pc++);
4709
                }
4710
                gen_shifti(s, op, ot, opreg, shift);
4711
            }
4712
        }
4713
        break;
4714
    case 0xd0:
4715
    case 0xd1:
4716
        /* shift Ev,1 */
4717
        shift = 1;
4718
        goto grp2;
4719
    case 0xd2:
4720
    case 0xd3:
4721
        /* shift Ev,cl */
4722
        shift = 0;
4723
        goto grp2;
4724

    
4725
    case 0x1a4: /* shld imm */
4726
        op = 0;
4727
        shift = 1;
4728
        goto do_shiftd;
4729
    case 0x1a5: /* shld cl */
4730
        op = 0;
4731
        shift = 0;
4732
        goto do_shiftd;
4733
    case 0x1ac: /* shrd imm */
4734
        op = 1;
4735
        shift = 1;
4736
        goto do_shiftd;
4737
    case 0x1ad: /* shrd cl */
4738
        op = 1;
4739
        shift = 0;
4740
    do_shiftd:
4741
        ot = dflag + OT_WORD;
4742
        modrm = ldub_code(s->pc++);
4743
        mod = (modrm >> 6) & 3;
4744
        rm = (modrm & 7) | REX_B(s);
4745
        reg = ((modrm >> 3) & 7) | rex_r;
4746
        if (mod != 3) {
4747
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4748
            opreg = OR_TMP0;
4749
        } else {
4750
            opreg = rm;
4751
        }
4752
        gen_op_mov_TN_reg(ot, 1, reg);
4753

    
4754
        if (shift) {
4755
            val = ldub_code(s->pc++);
4756
            tcg_gen_movi_tl(cpu_T3, val);
4757
        } else {
4758
            tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
4759
        }
4760
        gen_shiftd_rm_T1_T3(s, ot, opreg, op);
4761
        break;
4762

    
4763
        /************************/
4764
        /* floats */
4765
    case 0xd8 ... 0xdf:
4766
        if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4767
            /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4768
            /* XXX: what to do if illegal op ? */
4769
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4770
            break;
4771
        }
4772
        modrm = ldub_code(s->pc++);
4773
        mod = (modrm >> 6) & 3;
4774
        rm = modrm & 7;
4775
        op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4776
        if (mod != 3) {
4777
            /* memory op */
4778
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4779
            switch(op) {
4780
            case 0x00 ... 0x07: /* fxxxs */
4781
            case 0x10 ... 0x17: /* fixxxl */
4782
            case 0x20 ... 0x27: /* fxxxl */
4783
            case 0x30 ... 0x37: /* fixxx */
4784
                {
4785
                    int op1;
4786
                    op1 = op & 7;
4787

    
4788
                    switch(op >> 4) {
4789
                    case 0:
4790
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4791
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4792
                        tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
4793
                        break;
4794
                    case 1:
4795
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4796
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4797
                        tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4798
                        break;
4799
                    case 2:
4800
                        tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
4801
                                          (s->mem_index >> 2) - 1);
4802
                        tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
4803
                        break;
4804
                    case 3:
4805
                    default:
4806
                        gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4807
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4808
                        tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4809
                        break;
4810
                    }
4811

    
4812
                    tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
4813
                    if (op1 == 3) {
4814
                        /* fcomp needs pop */
4815
                        tcg_gen_helper_0_0(helper_fpop);
4816
                    }
4817
                }
4818
                break;
4819
            case 0x08: /* flds */
4820
            case 0x0a: /* fsts */
4821
            case 0x0b: /* fstps */
4822
            case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4823
            case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4824
            case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4825
                switch(op & 7) {
4826
                case 0:
4827
                    switch(op >> 4) {
4828
                    case 0:
4829
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4830
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4831
                        tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
4832
                        break;
4833
                    case 1:
4834
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4835
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4836
                        tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4837
                        break;
4838
                    case 2:
4839
                        tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
4840
                                          (s->mem_index >> 2) - 1);
4841
                        tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
4842
                        break;
4843
                    case 3:
4844
                    default:
4845
                        gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4846
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4847
                        tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4848
                        break;
4849
                    }
4850
                    break;
4851
                case 1:
4852
                    /* XXX: the corresponding CPUID bit must be tested ! */
4853
                    switch(op >> 4) {
4854
                    case 1:
4855
                        tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
4856
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4857
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4858
                        break;
4859
                    case 2:
4860
                        tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
4861
                        tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
4862
                                          (s->mem_index >> 2) - 1);
4863
                        break;
4864
                    case 3:
4865
                    default:
4866
                        tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
4867
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4868
                        gen_op_st_T0_A0(OT_WORD + s->mem_index);
4869
                        break;
4870
                    }
4871
                    tcg_gen_helper_0_0(helper_fpop);
4872
                    break;
4873
                default:
4874
                    switch(op >> 4) {
4875
                    case 0:
4876
                        tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
4877
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4878
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4879
                        break;
4880
                    case 1:
4881
                        tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
4882
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4883
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4884
                        break;
4885
                    case 2:
4886
                        tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
4887
                        tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
4888
                                          (s->mem_index >> 2) - 1);
4889
                        break;
4890
                    case 3:
4891
                    default:
4892
                        tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
4893
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4894
                        gen_op_st_T0_A0(OT_WORD + s->mem_index);
4895
                        break;
4896
                    }
4897
                    if ((op & 7) == 3)
4898
                        tcg_gen_helper_0_0(helper_fpop);
4899
                    break;
4900
                }
4901
                break;
4902
            case 0x0c: /* fldenv mem */
4903
                if (s->cc_op != CC_OP_DYNAMIC)
4904
                    gen_op_set_cc_op(s->cc_op);
4905
                gen_jmp_im(pc_start - s->cs_base);
4906
                tcg_gen_helper_0_2(helper_fldenv, 
4907
                                   cpu_A0, tcg_const_i32(s->dflag));
4908
                break;
4909
            case 0x0d: /* fldcw mem */
4910
                gen_op_ld_T0_A0(OT_WORD + s->mem_index);
4911
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4912
                tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
4913
                break;
4914
            case 0x0e: /* fnstenv mem */
4915
                if (s->cc_op != CC_OP_DYNAMIC)
4916
                    gen_op_set_cc_op(s->cc_op);
4917
                gen_jmp_im(pc_start - s->cs_base);
4918
                tcg_gen_helper_0_2(helper_fstenv,
4919
                                   cpu_A0, tcg_const_i32(s->dflag));
4920
                break;
4921
            case 0x0f: /* fnstcw mem */
4922
                tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
4923
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4924
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
4925
                break;
4926
            case 0x1d: /* fldt mem */
4927
                if (s->cc_op != CC_OP_DYNAMIC)
4928
                    gen_op_set_cc_op(s->cc_op);
4929
                gen_jmp_im(pc_start - s->cs_base);
4930
                tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
4931
                break;
4932
            case 0x1f: /* fstpt mem */
4933
                if (s->cc_op != CC_OP_DYNAMIC)
4934
                    gen_op_set_cc_op(s->cc_op);
4935
                gen_jmp_im(pc_start - s->cs_base);
4936
                tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
4937
                tcg_gen_helper_0_0(helper_fpop);
4938
                break;
4939
            case 0x2c: /* frstor mem */
4940
                if (s->cc_op != CC_OP_DYNAMIC)
4941
                    gen_op_set_cc_op(s->cc_op);
4942
                gen_jmp_im(pc_start - s->cs_base);
4943
                tcg_gen_helper_0_2(helper_frstor,
4944
                                   cpu_A0, tcg_const_i32(s->dflag));
4945
                break;
4946
            case 0x2e: /* fnsave mem */
4947
                if (s->cc_op != CC_OP_DYNAMIC)
4948
                    gen_op_set_cc_op(s->cc_op);
4949
                gen_jmp_im(pc_start - s->cs_base);
4950
                tcg_gen_helper_0_2(helper_fsave,
4951
                                   cpu_A0, tcg_const_i32(s->dflag));
4952
                break;
4953
            case 0x2f: /* fnstsw mem */
4954
                tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
4955
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4956
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
4957
                break;
4958
            case 0x3c: /* fbld */
4959
                if (s->cc_op != CC_OP_DYNAMIC)
4960
                    gen_op_set_cc_op(s->cc_op);
4961
                gen_jmp_im(pc_start - s->cs_base);
4962
                tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
4963
                break;
4964
            case 0x3e: /* fbstp */
4965
                if (s->cc_op != CC_OP_DYNAMIC)
4966
                    gen_op_set_cc_op(s->cc_op);
4967
                gen_jmp_im(pc_start - s->cs_base);
4968
                tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
4969
                tcg_gen_helper_0_0(helper_fpop);
4970
                break;
4971
            case 0x3d: /* fildll */
4972
                tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
4973
                                  (s->mem_index >> 2) - 1);
4974
                tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
4975
                break;
4976
            case 0x3f: /* fistpll */
4977
                tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
4978
                tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
4979
                                  (s->mem_index >> 2) - 1);
4980
                tcg_gen_helper_0_0(helper_fpop);
4981
                break;
4982
            default:
4983
                goto illegal_op;
4984
            }
4985
        } else {
4986
            /* register float ops */
4987
            opreg = rm;
4988

    
4989
            switch(op) {
4990
            case 0x08: /* fld sti */
4991
                tcg_gen_helper_0_0(helper_fpush);
4992
                tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
4993
                break;
4994
            case 0x09: /* fxchg sti */
4995
            case 0x29: /* fxchg4 sti, undocumented op */
4996
            case 0x39: /* fxchg7 sti, undocumented op */
4997
                tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
4998
                break;
4999
            case 0x0a: /* grp d9/2 */
5000
                switch(rm) {
5001
                case 0: /* fnop */
5002
                    /* check exceptions (FreeBSD FPU probe) */
5003
                    if (s->cc_op != CC_OP_DYNAMIC)
5004
                        gen_op_set_cc_op(s->cc_op);
5005
                    gen_jmp_im(pc_start - s->cs_base);
5006
                    tcg_gen_helper_0_0(helper_fwait);
5007
                    break;
5008
                default:
5009
                    goto illegal_op;
5010
                }
5011
                break;
5012
            case 0x0c: /* grp d9/4 */
5013
                switch(rm) {
5014
                case 0: /* fchs */
5015
                    tcg_gen_helper_0_0(helper_fchs_ST0);
5016
                    break;
5017
                case 1: /* fabs */
5018
                    tcg_gen_helper_0_0(helper_fabs_ST0);
5019
                    break;
5020
                case 4: /* ftst */
5021
                    tcg_gen_helper_0_0(helper_fldz_FT0);
5022
                    tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5023
                    break;
5024
                case 5: /* fxam */
5025
                    tcg_gen_helper_0_0(helper_fxam_ST0);
5026
                    break;
5027
                default:
5028
                    goto illegal_op;
5029
                }
5030
                break;
5031
            case 0x0d: /* grp d9/5 */
5032
                {
5033
                    switch(rm) {
5034
                    case 0:
5035
                        tcg_gen_helper_0_0(helper_fpush);
5036
                        tcg_gen_helper_0_0(helper_fld1_ST0);
5037
                        break;
5038
                    case 1:
5039
                        tcg_gen_helper_0_0(helper_fpush);
5040
                        tcg_gen_helper_0_0(helper_fldl2t_ST0);
5041
                        break;
5042
                    case 2:
5043
                        tcg_gen_helper_0_0(helper_fpush);
5044
                        tcg_gen_helper_0_0(helper_fldl2e_ST0);
5045
                        break;
5046
                    case 3:
5047
                        tcg_gen_helper_0_0(helper_fpush);
5048
                        tcg_gen_helper_0_0(helper_fldpi_ST0);
5049
                        break;
5050
                    case 4:
5051
                        tcg_gen_helper_0_0(helper_fpush);
5052
                        tcg_gen_helper_0_0(helper_fldlg2_ST0);
5053
                        break;
5054
                    case 5:
5055
                        tcg_gen_helper_0_0(helper_fpush);
5056
                        tcg_gen_helper_0_0(helper_fldln2_ST0);
5057
                        break;
5058
                    case 6:
5059
                        tcg_gen_helper_0_0(helper_fpush);
5060
                        tcg_gen_helper_0_0(helper_fldz_ST0);
5061
                        break;
5062
                    default:
5063
                        goto illegal_op;
5064
                    }
5065
                }
5066
                break;
5067
            case 0x0e: /* grp d9/6 */
5068
                switch(rm) {
5069
                case 0: /* f2xm1 */
5070
                    tcg_gen_helper_0_0(helper_f2xm1);
5071
                    break;
5072
                case 1: /* fyl2x */
5073
                    tcg_gen_helper_0_0(helper_fyl2x);
5074
                    break;
5075
                case 2: /* fptan */
5076
                    tcg_gen_helper_0_0(helper_fptan);
5077
                    break;
5078
                case 3: /* fpatan */
5079
                    tcg_gen_helper_0_0(helper_fpatan);
5080
                    break;
5081
                case 4: /* fxtract */
5082
                    tcg_gen_helper_0_0(helper_fxtract);
5083
                    break;
5084
                case 5: /* fprem1 */
5085
                    tcg_gen_helper_0_0(helper_fprem1);
5086
                    break;
5087
                case 6: /* fdecstp */
5088
                    tcg_gen_helper_0_0(helper_fdecstp);
5089
                    break;
5090
                default:
5091
                case 7: /* fincstp */
5092
                    tcg_gen_helper_0_0(helper_fincstp);
5093
                    break;
5094
                }
5095
                break;
5096
            case 0x0f: /* grp d9/7 */
5097
                switch(rm) {
5098
                case 0: /* fprem */
5099
                    tcg_gen_helper_0_0(helper_fprem);
5100
                    break;
5101
                case 1: /* fyl2xp1 */
5102
                    tcg_gen_helper_0_0(helper_fyl2xp1);
5103
                    break;
5104
                case 2: /* fsqrt */
5105
                    tcg_gen_helper_0_0(helper_fsqrt);
5106
                    break;
5107
                case 3: /* fsincos */
5108
                    tcg_gen_helper_0_0(helper_fsincos);
5109
                    break;
5110
                case 5: /* fscale */
5111
                    tcg_gen_helper_0_0(helper_fscale);
5112
                    break;
5113
                case 4: /* frndint */
5114
                    tcg_gen_helper_0_0(helper_frndint);
5115
                    break;
5116
                case 6: /* fsin */
5117
                    tcg_gen_helper_0_0(helper_fsin);
5118
                    break;
5119
                default:
5120
                case 7: /* fcos */
5121
                    tcg_gen_helper_0_0(helper_fcos);
5122
                    break;
5123
                }
5124
                break;
5125
            case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5126
            case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5127
            case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5128
                {
5129
                    int op1;
5130

    
5131
                    op1 = op & 7;
5132
                    if (op >= 0x20) {
5133
                        tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
5134
                        if (op >= 0x30)
5135
                            tcg_gen_helper_0_0(helper_fpop);
5136
                    } else {
5137
                        tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5138
                        tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5139
                    }
5140
                }
5141
                break;
5142
            case 0x02: /* fcom */
5143
            case 0x22: /* fcom2, undocumented op */
5144
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5145
                tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5146
                break;
5147
            case 0x03: /* fcomp */
5148
            case 0x23: /* fcomp3, undocumented op */
5149
            case 0x32: /* fcomp5, undocumented op */
5150
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5151
                tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5152
                tcg_gen_helper_0_0(helper_fpop);
5153
                break;
5154
            case 0x15: /* da/5 */
5155
                switch(rm) {
5156
                case 1: /* fucompp */
5157
                    tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5158
                    tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5159
                    tcg_gen_helper_0_0(helper_fpop);
5160
                    tcg_gen_helper_0_0(helper_fpop);
5161
                    break;
5162
                default:
5163
                    goto illegal_op;
5164
                }
5165
                break;
5166
            case 0x1c:
5167
                switch(rm) {
5168
                case 0: /* feni (287 only, just do nop here) */
5169
                    break;
5170
                case 1: /* fdisi (287 only, just do nop here) */
5171
                    break;
5172
                case 2: /* fclex */
5173
                    tcg_gen_helper_0_0(helper_fclex);
5174
                    break;
5175
                case 3: /* fninit */
5176
                    tcg_gen_helper_0_0(helper_fninit);
5177
                    break;
5178
                case 4: /* fsetpm (287 only, just do nop here) */
5179
                    break;
5180
                default:
5181
                    goto illegal_op;
5182
                }
5183
                break;
5184
            case 0x1d: /* fucomi */
5185
                if (s->cc_op != CC_OP_DYNAMIC)
5186
                    gen_op_set_cc_op(s->cc_op);
5187
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5188
                tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5189
                gen_op_fcomi_dummy();
5190
                s->cc_op = CC_OP_EFLAGS;
5191
                break;
5192
            case 0x1e: /* fcomi */
5193
                if (s->cc_op != CC_OP_DYNAMIC)
5194
                    gen_op_set_cc_op(s->cc_op);
5195
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5196
                tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5197
                gen_op_fcomi_dummy();
5198
                s->cc_op = CC_OP_EFLAGS;
5199
                break;
5200
            case 0x28: /* ffree sti */
5201
                tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5202
                break;
5203
            case 0x2a: /* fst sti */
5204
                tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5205
                break;
5206
            case 0x2b: /* fstp sti */
5207
            case 0x0b: /* fstp1 sti, undocumented op */
5208
            case 0x3a: /* fstp8 sti, undocumented op */
5209
            case 0x3b: /* fstp9 sti, undocumented op */
5210
                tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5211
                tcg_gen_helper_0_0(helper_fpop);
5212
                break;
5213
            case 0x2c: /* fucom st(i) */
5214
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5215
                tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5216
                break;
5217
            case 0x2d: /* fucomp st(i) */
5218
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5219
                tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5220
                tcg_gen_helper_0_0(helper_fpop);
5221
                break;
5222
            case 0x33: /* de/3 */
5223
                switch(rm) {
5224
                case 1: /* fcompp */
5225
                    tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5226
                    tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5227
                    tcg_gen_helper_0_0(helper_fpop);
5228
                    tcg_gen_helper_0_0(helper_fpop);
5229
                    break;
5230
                default:
5231
                    goto illegal_op;
5232
                }
5233
                break;
5234
            case 0x38: /* ffreep sti, undocumented op */
5235
                tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5236
                tcg_gen_helper_0_0(helper_fpop);
5237
                break;
5238
            case 0x3c: /* df/4 */
5239
                switch(rm) {
5240
                case 0:
5241
                    tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5242
                    tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5243
                    gen_op_mov_reg_T0(OT_WORD, R_EAX);
5244
                    break;
5245
                default:
5246
                    goto illegal_op;
5247
                }
5248
                break;
5249
            case 0x3d: /* fucomip */
5250
                if (s->cc_op != CC_OP_DYNAMIC)
5251
                    gen_op_set_cc_op(s->cc_op);
5252
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5253
                tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5254
                tcg_gen_helper_0_0(helper_fpop);
5255
                gen_op_fcomi_dummy();
5256
                s->cc_op = CC_OP_EFLAGS;
5257
                break;
5258
            case 0x3e: /* fcomip */
5259
                if (s->cc_op != CC_OP_DYNAMIC)
5260
                    gen_op_set_cc_op(s->cc_op);
5261
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5262
                tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5263
                tcg_gen_helper_0_0(helper_fpop);
5264
                gen_op_fcomi_dummy();
5265
                s->cc_op = CC_OP_EFLAGS;
5266
                break;
5267
            case 0x10 ... 0x13: /* fcmovxx */
5268
            case 0x18 ... 0x1b:
5269
                {
5270
                    int op1, l1;
5271
                    const static uint8_t fcmov_cc[8] = {
5272
                        (JCC_B << 1),
5273
                        (JCC_Z << 1),
5274
                        (JCC_BE << 1),
5275
                        (JCC_P << 1),
5276
                    };
5277
                    op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
5278
                    gen_setcc(s, op1);
5279
                    l1 = gen_new_label();
5280
                    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), l1);
5281
                    tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
5282
                    gen_set_label(l1);
5283
                }
5284
                break;
5285
            default:
5286
                goto illegal_op;
5287
            }
5288
        }
5289
        break;
5290
        /************************/
5291
        /* string ops */
5292

    
5293
    case 0xa4: /* movsS */
5294
    case 0xa5:
5295
        if ((b & 1) == 0)
5296
            ot = OT_BYTE;
5297
        else
5298
            ot = dflag + OT_WORD;
5299

    
5300
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5301
            gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5302
        } else {
5303
            gen_movs(s, ot);
5304
        }
5305
        break;
5306

    
5307
    case 0xaa: /* stosS */
5308
    case 0xab:
5309
        if ((b & 1) == 0)
5310
            ot = OT_BYTE;
5311
        else
5312
            ot = dflag + OT_WORD;
5313

    
5314
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5315
            gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5316
        } else {
5317
            gen_stos(s, ot);
5318
        }
5319
        break;
5320
    case 0xac: /* lodsS */
5321
    case 0xad:
5322
        if ((b & 1) == 0)
5323
            ot = OT_BYTE;
5324
        else
5325
            ot = dflag + OT_WORD;
5326
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5327
            gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5328
        } else {
5329
            gen_lods(s, ot);
5330
        }
5331
        break;
5332
    case 0xae: /* scasS */
5333
    case 0xaf:
5334
        if ((b & 1) == 0)
5335
            ot = OT_BYTE;
5336
        else
5337
            ot = dflag + OT_WORD;
5338
        if (prefixes & PREFIX_REPNZ) {
5339
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5340
        } else if (prefixes & PREFIX_REPZ) {
5341
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5342
        } else {
5343
            gen_scas(s, ot);
5344
            s->cc_op = CC_OP_SUBB + ot;
5345
        }
5346
        break;
5347

    
5348
    case 0xa6: /* cmpsS */
5349
    case 0xa7:
5350
        if ((b & 1) == 0)
5351
            ot = OT_BYTE;
5352
        else
5353
            ot = dflag + OT_WORD;
5354
        if (prefixes & PREFIX_REPNZ) {
5355
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5356
        } else if (prefixes & PREFIX_REPZ) {
5357
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5358
        } else {
5359
            gen_cmps(s, ot);
5360
            s->cc_op = CC_OP_SUBB + ot;
5361
        }
5362
        break;
5363
    case 0x6c: /* insS */
5364
    case 0x6d:
5365
        if ((b & 1) == 0)
5366
            ot = OT_BYTE;
5367
        else
5368
            ot = dflag ? OT_LONG : OT_WORD;
5369
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5370
        gen_op_andl_T0_ffff();
5371
        gen_check_io(s, ot, pc_start - s->cs_base, 
5372
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
5373
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5374
            gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5375
        } else {
5376
            gen_ins(s, ot);
5377
        }
5378
        break;
5379
    case 0x6e: /* outsS */
5380
    case 0x6f:
5381
        if ((b & 1) == 0)
5382
            ot = OT_BYTE;
5383
        else
5384
            ot = dflag ? OT_LONG : OT_WORD;
5385
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5386
        gen_op_andl_T0_ffff();
5387
        gen_check_io(s, ot, pc_start - s->cs_base,
5388
                     svm_is_rep(prefixes) | 4);
5389
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5390
            gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5391
        } else {
5392
            gen_outs(s, ot);
5393
        }
5394
        break;
5395

    
5396
        /************************/
5397
        /* port I/O */
5398

    
5399
    case 0xe4:
5400
    case 0xe5:
5401
        if ((b & 1) == 0)
5402
            ot = OT_BYTE;
5403
        else
5404
            ot = dflag ? OT_LONG : OT_WORD;
5405
        val = ldub_code(s->pc++);
5406
        gen_op_movl_T0_im(val);
5407
        gen_check_io(s, ot, pc_start - s->cs_base,
5408
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5409
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5410
        tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5411
        gen_op_mov_reg_T1(ot, R_EAX);
5412
        break;
5413
    case 0xe6:
5414
    case 0xe7:
5415
        if ((b & 1) == 0)
5416
            ot = OT_BYTE;
5417
        else
5418
            ot = dflag ? OT_LONG : OT_WORD;
5419
        val = ldub_code(s->pc++);
5420
        gen_op_movl_T0_im(val);
5421
        gen_check_io(s, ot, pc_start - s->cs_base,
5422
                     svm_is_rep(prefixes));
5423
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5424

    
5425
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5426
        tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5427
        tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5428
        tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5429
        break;
5430
    case 0xec:
5431
    case 0xed:
5432
        if ((b & 1) == 0)
5433
            ot = OT_BYTE;
5434
        else
5435
            ot = dflag ? OT_LONG : OT_WORD;
5436
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5437
        gen_op_andl_T0_ffff();
5438
        gen_check_io(s, ot, pc_start - s->cs_base,
5439
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5440
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5441
        tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5442
        gen_op_mov_reg_T1(ot, R_EAX);
5443
        break;
5444
    case 0xee:
5445
    case 0xef:
5446
        if ((b & 1) == 0)
5447
            ot = OT_BYTE;
5448
        else
5449
            ot = dflag ? OT_LONG : OT_WORD;
5450
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5451
        gen_op_andl_T0_ffff();
5452
        gen_check_io(s, ot, pc_start - s->cs_base,
5453
                     svm_is_rep(prefixes));
5454
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5455

    
5456
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5457
        tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5458
        tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5459
        tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5460
        break;
5461

    
5462
        /************************/
5463
        /* control */
5464
    case 0xc2: /* ret im */
5465
        val = ldsw_code(s->pc);
5466
        s->pc += 2;
5467
        gen_pop_T0(s);
5468
        if (CODE64(s) && s->dflag)
5469
            s->dflag = 2;
5470
        gen_stack_update(s, val + (2 << s->dflag));
5471
        if (s->dflag == 0)
5472
            gen_op_andl_T0_ffff();
5473
        gen_op_jmp_T0();
5474
        gen_eob(s);
5475
        break;
5476
    case 0xc3: /* ret */
5477
        gen_pop_T0(s);
5478
        gen_pop_update(s);
5479
        if (s->dflag == 0)
5480
            gen_op_andl_T0_ffff();
5481
        gen_op_jmp_T0();
5482
        gen_eob(s);
5483
        break;
5484
    case 0xca: /* lret im */
5485
        val = ldsw_code(s->pc);
5486
        s->pc += 2;
5487
    do_lret:
5488
        if (s->pe && !s->vm86) {
5489
            if (s->cc_op != CC_OP_DYNAMIC)
5490
                gen_op_set_cc_op(s->cc_op);
5491
            gen_jmp_im(pc_start - s->cs_base);
5492
            tcg_gen_helper_0_2(helper_lret_protected,
5493
                               tcg_const_i32(s->dflag), 
5494
                               tcg_const_i32(val));
5495
        } else {
5496
            gen_stack_A0(s);
5497
            /* pop offset */
5498
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5499
            if (s->dflag == 0)
5500
                gen_op_andl_T0_ffff();
5501
            /* NOTE: keeping EIP updated is not a problem in case of
5502
               exception */
5503
            gen_op_jmp_T0();
5504
            /* pop selector */
5505
            gen_op_addl_A0_im(2 << s->dflag);
5506
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5507
            gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5508
            /* add stack offset */
5509
            gen_stack_update(s, val + (4 << s->dflag));
5510
        }
5511
        gen_eob(s);
5512
        break;
5513
    case 0xcb: /* lret */
5514
        val = 0;
5515
        goto do_lret;
5516
    case 0xcf: /* iret */
5517
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5518
            break;
5519
        if (!s->pe) {
5520
            /* real mode */
5521
            tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5522
            s->cc_op = CC_OP_EFLAGS;
5523
        } else if (s->vm86) {
5524
            if (s->iopl != 3) {
5525
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5526
            } else {
5527
                tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5528
                s->cc_op = CC_OP_EFLAGS;
5529
            }
5530
        } else {
5531
            if (s->cc_op != CC_OP_DYNAMIC)
5532
                gen_op_set_cc_op(s->cc_op);
5533
            gen_jmp_im(pc_start - s->cs_base);
5534
            tcg_gen_helper_0_2(helper_iret_protected,
5535
                               tcg_const_i32(s->dflag), 
5536
                               tcg_const_i32(s->pc - s->cs_base));
5537
            s->cc_op = CC_OP_EFLAGS;
5538
        }
5539
        gen_eob(s);
5540
        break;
5541
    case 0xe8: /* call im */
5542
        {
5543
            if (dflag)
5544
                tval = (int32_t)insn_get(s, OT_LONG);
5545
            else
5546
                tval = (int16_t)insn_get(s, OT_WORD);
5547
            next_eip = s->pc - s->cs_base;
5548
            tval += next_eip;
5549
            if (s->dflag == 0)
5550
                tval &= 0xffff;
5551
            gen_movtl_T0_im(next_eip);
5552
            gen_push_T0(s);
5553
            gen_jmp(s, tval);
5554
        }
5555
        break;
5556
    case 0x9a: /* lcall im */
5557
        {
5558
            unsigned int selector, offset;
5559

    
5560
            if (CODE64(s))
5561
                goto illegal_op;
5562
            ot = dflag ? OT_LONG : OT_WORD;
5563
            offset = insn_get(s, ot);
5564
            selector = insn_get(s, OT_WORD);
5565

    
5566
            gen_op_movl_T0_im(selector);
5567
            gen_op_movl_T1_imu(offset);
5568
        }
5569
        goto do_lcall;
5570
    case 0xe9: /* jmp im */
5571
        if (dflag)
5572
            tval = (int32_t)insn_get(s, OT_LONG);
5573
        else
5574
            tval = (int16_t)insn_get(s, OT_WORD);
5575
        tval += s->pc - s->cs_base;
5576
        if (s->dflag == 0)
5577
            tval &= 0xffff;
5578
        gen_jmp(s, tval);
5579
        break;
5580
    case 0xea: /* ljmp im */
5581
        {
5582
            unsigned int selector, offset;
5583

    
5584
            if (CODE64(s))
5585
                goto illegal_op;
5586
            ot = dflag ? OT_LONG : OT_WORD;
5587
            offset = insn_get(s, ot);
5588
            selector = insn_get(s, OT_WORD);
5589

    
5590
            gen_op_movl_T0_im(selector);
5591
            gen_op_movl_T1_imu(offset);
5592
        }
5593
        goto do_ljmp;
5594
    case 0xeb: /* jmp Jb */
5595
        tval = (int8_t)insn_get(s, OT_BYTE);
5596
        tval += s->pc - s->cs_base;
5597
        if (s->dflag == 0)
5598
            tval &= 0xffff;
5599
        gen_jmp(s, tval);
5600
        break;
5601
    case 0x70 ... 0x7f: /* jcc Jb */
5602
        tval = (int8_t)insn_get(s, OT_BYTE);
5603
        goto do_jcc;
5604
    case 0x180 ... 0x18f: /* jcc Jv */
5605
        if (dflag) {
5606
            tval = (int32_t)insn_get(s, OT_LONG);
5607
        } else {
5608
            tval = (int16_t)insn_get(s, OT_WORD);
5609
        }
5610
    do_jcc:
5611
        next_eip = s->pc - s->cs_base;
5612
        tval += next_eip;
5613
        if (s->dflag == 0)
5614
            tval &= 0xffff;
5615
        gen_jcc(s, b, tval, next_eip);
5616
        break;
5617

    
5618
    case 0x190 ... 0x19f: /* setcc Gv */
5619
        modrm = ldub_code(s->pc++);
5620
        gen_setcc(s, b);
5621
        gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5622
        break;
5623
    case 0x140 ... 0x14f: /* cmov Gv, Ev */
5624
        ot = dflag + OT_WORD;
5625
        modrm = ldub_code(s->pc++);
5626
        reg = ((modrm >> 3) & 7) | rex_r;
5627
        mod = (modrm >> 6) & 3;
5628
        gen_setcc(s, b);
5629
        if (mod != 3) {
5630
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5631
            gen_op_ld_T1_A0(ot + s->mem_index);
5632
        } else {
5633
            rm = (modrm & 7) | REX_B(s);
5634
            gen_op_mov_TN_reg(ot, 1, rm);
5635
        }
5636
        gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5637
        break;
5638

    
5639
        /************************/
5640
        /* flags */
5641
    case 0x9c: /* pushf */
5642
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5643
            break;
5644
        if (s->vm86 && s->iopl != 3) {
5645
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5646
        } else {
5647
            if (s->cc_op != CC_OP_DYNAMIC)
5648
                gen_op_set_cc_op(s->cc_op);
5649
            gen_op_movl_T0_eflags();
5650
            gen_push_T0(s);
5651
        }
5652
        break;
5653
    case 0x9d: /* popf */
5654
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5655
            break;
5656
        if (s->vm86 && s->iopl != 3) {
5657
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5658
        } else {
5659
            gen_pop_T0(s);
5660
            if (s->cpl == 0) {
5661
                if (s->dflag) {
5662
                    gen_op_movl_eflags_T0_cpl0();
5663
                } else {
5664
                    gen_op_movw_eflags_T0_cpl0();
5665
                }
5666
            } else {
5667
                if (s->cpl <= s->iopl) {
5668
                    if (s->dflag) {
5669
                        gen_op_movl_eflags_T0_io();
5670
                    } else {
5671
                        gen_op_movw_eflags_T0_io();
5672
                    }
5673
                } else {
5674
                    if (s->dflag) {
5675
                        gen_op_movl_eflags_T0();
5676
                    } else {
5677
                        gen_op_movw_eflags_T0();
5678
                    }
5679
                }
5680
            }
5681
            gen_pop_update(s);
5682
            s->cc_op = CC_OP_EFLAGS;
5683
            /* abort translation because TF flag may change */
5684
            gen_jmp_im(s->pc - s->cs_base);
5685
            gen_eob(s);
5686
        }
5687
        break;
5688
    case 0x9e: /* sahf */
5689
        if (CODE64(s))
5690
            goto illegal_op;
5691
        gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5692
        if (s->cc_op != CC_OP_DYNAMIC)
5693
            gen_op_set_cc_op(s->cc_op);
5694
        gen_op_movb_eflags_T0();
5695
        s->cc_op = CC_OP_EFLAGS;
5696
        break;
5697
    case 0x9f: /* lahf */
5698
        if (CODE64(s))
5699
            goto illegal_op;
5700
        if (s->cc_op != CC_OP_DYNAMIC)
5701
            gen_op_set_cc_op(s->cc_op);
5702
        gen_op_movl_T0_eflags();
5703
        gen_op_mov_reg_T0(OT_BYTE, R_AH);
5704
        break;
5705
    case 0xf5: /* cmc */
5706
        if (s->cc_op != CC_OP_DYNAMIC)
5707
            gen_op_set_cc_op(s->cc_op);
5708
        gen_op_cmc();
5709
        s->cc_op = CC_OP_EFLAGS;
5710
        break;
5711
    case 0xf8: /* clc */
5712
        if (s->cc_op != CC_OP_DYNAMIC)
5713
            gen_op_set_cc_op(s->cc_op);
5714
        gen_op_clc();
5715
        s->cc_op = CC_OP_EFLAGS;
5716
        break;
5717
    case 0xf9: /* stc */
5718
        if (s->cc_op != CC_OP_DYNAMIC)
5719
            gen_op_set_cc_op(s->cc_op);
5720
        gen_op_stc();
5721
        s->cc_op = CC_OP_EFLAGS;
5722
        break;
5723
    case 0xfc: /* cld */
5724
        tcg_gen_movi_i32(cpu_tmp2_i32, 1);
5725
        tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5726
        break;
5727
    case 0xfd: /* std */
5728
        tcg_gen_movi_i32(cpu_tmp2_i32, -1);
5729
        tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5730
        break;
5731

    
5732
        /************************/
5733
        /* bit operations */
5734
    case 0x1ba: /* bt/bts/btr/btc Gv, im */
5735
        ot = dflag + OT_WORD;
5736
        modrm = ldub_code(s->pc++);
5737
        op = (modrm >> 3) & 7;
5738
        mod = (modrm >> 6) & 3;
5739
        rm = (modrm & 7) | REX_B(s);
5740
        if (mod != 3) {
5741
            s->rip_offset = 1;
5742
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5743
            gen_op_ld_T0_A0(ot + s->mem_index);
5744
        } else {
5745
            gen_op_mov_TN_reg(ot, 0, rm);
5746
        }
5747
        /* load shift */
5748
        val = ldub_code(s->pc++);
5749
        gen_op_movl_T1_im(val);
5750
        if (op < 4)
5751
            goto illegal_op;
5752
        op -= 4;
5753
        goto bt_op;
5754
    case 0x1a3: /* bt Gv, Ev */
5755
        op = 0;
5756
        goto do_btx;
5757
    case 0x1ab: /* bts */
5758
        op = 1;
5759
        goto do_btx;
5760
    case 0x1b3: /* btr */
5761
        op = 2;
5762
        goto do_btx;
5763
    case 0x1bb: /* btc */
5764
        op = 3;
5765
    do_btx:
5766
        ot = dflag + OT_WORD;
5767
        modrm = ldub_code(s->pc++);
5768
        reg = ((modrm >> 3) & 7) | rex_r;
5769
        mod = (modrm >> 6) & 3;
5770
        rm = (modrm & 7) | REX_B(s);
5771
        gen_op_mov_TN_reg(OT_LONG, 1, reg);
5772
        if (mod != 3) {
5773
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5774
            /* specific case: we need to add a displacement */
5775
            gen_exts(ot, cpu_T[1]);
5776
            tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
5777
            tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
5778
            tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
5779
            gen_op_ld_T0_A0(ot + s->mem_index);
5780
        } else {
5781
            gen_op_mov_TN_reg(ot, 0, rm);
5782
        }
5783
    bt_op:
5784
        tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
5785
        switch(op) {
5786
        case 0:
5787
            tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
5788
            tcg_gen_movi_tl(cpu_cc_dst, 0);
5789
            break;
5790
        case 1:
5791
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5792
            tcg_gen_movi_tl(cpu_tmp0, 1);
5793
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5794
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5795
            break;
5796
        case 2:
5797
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5798
            tcg_gen_movi_tl(cpu_tmp0, 1);
5799
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5800
            tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
5801
            tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5802
            break;
5803
        default:
5804
        case 3:
5805
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5806
            tcg_gen_movi_tl(cpu_tmp0, 1);
5807
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5808
            tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5809
            break;
5810
        }
5811
        s->cc_op = CC_OP_SARB + ot;
5812
        if (op != 0) {
5813
            if (mod != 3)
5814
                gen_op_st_T0_A0(ot + s->mem_index);
5815
            else
5816
                gen_op_mov_reg_T0(ot, rm);
5817
            tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
5818
            tcg_gen_movi_tl(cpu_cc_dst, 0);
5819
        }
5820
        break;
5821
    case 0x1bc: /* bsf */
5822
    case 0x1bd: /* bsr */
5823
        {
5824
            int label1;
5825
            ot = dflag + OT_WORD;
5826
            modrm = ldub_code(s->pc++);
5827
            reg = ((modrm >> 3) & 7) | rex_r;
5828
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5829
            gen_extu(ot, cpu_T[0]);
5830
            label1 = gen_new_label();
5831
            tcg_gen_movi_tl(cpu_cc_dst, 0);
5832
            tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), label1);
5833
            if (b & 1) {
5834
                tcg_gen_helper_1_1(helper_bsr, cpu_T[0], cpu_T[0]);
5835
            } else {
5836
                tcg_gen_helper_1_1(helper_bsf, cpu_T[0], cpu_T[0]);
5837
            }
5838
            gen_op_mov_reg_T0(ot, reg);
5839
            tcg_gen_movi_tl(cpu_cc_dst, 1);
5840
            gen_set_label(label1);
5841
            tcg_gen_discard_tl(cpu_cc_src);
5842
            s->cc_op = CC_OP_LOGICB + ot;
5843
        }
5844
        break;
5845
        /************************/
5846
        /* bcd */
5847
    case 0x27: /* daa */
5848
        if (CODE64(s))
5849
            goto illegal_op;
5850
        if (s->cc_op != CC_OP_DYNAMIC)
5851
            gen_op_set_cc_op(s->cc_op);
5852
        gen_op_daa();
5853
        s->cc_op = CC_OP_EFLAGS;
5854
        break;
5855
    case 0x2f: /* das */
5856
        if (CODE64(s))
5857
            goto illegal_op;
5858
        if (s->cc_op != CC_OP_DYNAMIC)
5859
            gen_op_set_cc_op(s->cc_op);
5860
        gen_op_das();
5861
        s->cc_op = CC_OP_EFLAGS;
5862
        break;
5863
    case 0x37: /* aaa */
5864
        if (CODE64(s))
5865
            goto illegal_op;
5866
        if (s->cc_op != CC_OP_DYNAMIC)
5867
            gen_op_set_cc_op(s->cc_op);
5868
        gen_op_aaa();
5869
        s->cc_op = CC_OP_EFLAGS;
5870
        break;
5871
    case 0x3f: /* aas */
5872
        if (CODE64(s))
5873
            goto illegal_op;
5874
        if (s->cc_op != CC_OP_DYNAMIC)
5875
            gen_op_set_cc_op(s->cc_op);
5876
        gen_op_aas();
5877
        s->cc_op = CC_OP_EFLAGS;
5878
        break;
5879
    case 0xd4: /* aam */
5880
        if (CODE64(s))
5881
            goto illegal_op;
5882
        val = ldub_code(s->pc++);
5883
        if (val == 0) {
5884
            gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5885
        } else {
5886
            gen_op_aam(val);
5887
            s->cc_op = CC_OP_LOGICB;
5888
        }
5889
        break;
5890
    case 0xd5: /* aad */
5891
        if (CODE64(s))
5892
            goto illegal_op;
5893
        val = ldub_code(s->pc++);
5894
        gen_op_aad(val);
5895
        s->cc_op = CC_OP_LOGICB;
5896
        break;
5897
        /************************/
5898
        /* misc */
5899
    case 0x90: /* nop */
5900
        /* XXX: xchg + rex handling */
5901
        /* XXX: correct lock test for all insn */
5902
        if (prefixes & PREFIX_LOCK)
5903
            goto illegal_op;
5904
        if (prefixes & PREFIX_REPZ) {
5905
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
5906
        }
5907
        break;
5908
    case 0x9b: /* fwait */
5909
        if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5910
            (HF_MP_MASK | HF_TS_MASK)) {
5911
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5912
        } else {
5913
            if (s->cc_op != CC_OP_DYNAMIC)
5914
                gen_op_set_cc_op(s->cc_op);
5915
            gen_jmp_im(pc_start - s->cs_base);
5916
            tcg_gen_helper_0_0(helper_fwait);
5917
        }
5918
        break;
5919
    case 0xcc: /* int3 */
5920
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5921
            break;
5922
        gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5923
        break;
5924
    case 0xcd: /* int N */
5925
        val = ldub_code(s->pc++);
5926
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5927
            break;
5928
        if (s->vm86 && s->iopl != 3) {
5929
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5930
        } else {
5931
            gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5932
        }
5933
        break;
5934
    case 0xce: /* into */
5935
        if (CODE64(s))
5936
            goto illegal_op;
5937
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5938
            break;
5939
        if (s->cc_op != CC_OP_DYNAMIC)
5940
            gen_op_set_cc_op(s->cc_op);
5941
        gen_jmp_im(pc_start - s->cs_base);
5942
        gen_op_into(s->pc - pc_start);
5943
        break;
5944
    case 0xf1: /* icebp (undocumented, exits to external debugger) */
5945
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
5946
            break;
5947
#if 1
5948
        gen_debug(s, pc_start - s->cs_base);
5949
#else
5950
        /* start debug */
5951
        tb_flush(cpu_single_env);
5952
        cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5953
#endif
5954
        break;
5955
    case 0xfa: /* cli */
5956
        if (!s->vm86) {
5957
            if (s->cpl <= s->iopl) {
5958
                tcg_gen_helper_0_0(helper_cli);
5959
            } else {
5960
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5961
            }
5962
        } else {
5963
            if (s->iopl == 3) {
5964
                tcg_gen_helper_0_0(helper_cli);
5965
            } else {
5966
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5967
            }
5968
        }
5969
        break;
5970
    case 0xfb: /* sti */
5971
        if (!s->vm86) {
5972
            if (s->cpl <= s->iopl) {
5973
            gen_sti:
5974
                tcg_gen_helper_0_0(helper_sti);
5975
                /* interruptions are enabled only the first insn after sti */
5976
                /* If several instructions disable interrupts, only the
5977
                   _first_ does it */
5978
                if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5979
                    tcg_gen_helper_0_0(helper_set_inhibit_irq);
5980
                /* give a chance to handle pending irqs */
5981
                gen_jmp_im(s->pc - s->cs_base);
5982
                gen_eob(s);
5983
            } else {
5984
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5985
            }
5986
        } else {
5987
            if (s->iopl == 3) {
5988
                goto gen_sti;
5989
            } else {
5990
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5991
            }
5992
        }
5993
        break;
5994
    case 0x62: /* bound */
5995
        if (CODE64(s))
5996
            goto illegal_op;
5997
        ot = dflag ? OT_LONG : OT_WORD;
5998
        modrm = ldub_code(s->pc++);
5999
        reg = (modrm >> 3) & 7;
6000
        mod = (modrm >> 6) & 3;
6001
        if (mod == 3)
6002
            goto illegal_op;
6003
        gen_op_mov_TN_reg(ot, 0, reg);
6004
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6005
        gen_jmp_im(pc_start - s->cs_base);
6006
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6007
        if (ot == OT_WORD)
6008
            tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
6009
        else
6010
            tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
6011
        break;
6012
    case 0x1c8 ... 0x1cf: /* bswap reg */
6013
        reg = (b & 7) | REX_B(s);
6014
#ifdef TARGET_X86_64
6015
        if (dflag == 2) {
6016
            gen_op_mov_TN_reg(OT_QUAD, 0, reg);
6017
            tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
6018
            gen_op_mov_reg_T0(OT_QUAD, reg);
6019
        } else
6020
        {
6021
            TCGv tmp0;
6022
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
6023
            
6024
            tmp0 = tcg_temp_new(TCG_TYPE_I32);
6025
            tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
6026
            tcg_gen_bswap_i32(tmp0, tmp0);
6027
            tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
6028
            gen_op_mov_reg_T0(OT_LONG, reg);
6029
        }
6030
#else
6031
        {
6032
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
6033
            tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
6034
            gen_op_mov_reg_T0(OT_LONG, reg);
6035
        }
6036
#endif
6037
        break;
6038
    case 0xd6: /* salc */
6039
        if (CODE64(s))
6040
            goto illegal_op;
6041
        if (s->cc_op != CC_OP_DYNAMIC)
6042
            gen_op_set_cc_op(s->cc_op);
6043
        gen_op_salc();
6044
        break;
6045
    case 0xe0: /* loopnz */
6046
    case 0xe1: /* loopz */
6047
        if (s->cc_op != CC_OP_DYNAMIC)
6048
            gen_op_set_cc_op(s->cc_op);
6049
        /* FALL THRU */
6050
    case 0xe2: /* loop */
6051
    case 0xe3: /* jecxz */
6052
        {
6053
            int l1, l2;
6054

    
6055
            tval = (int8_t)insn_get(s, OT_BYTE);
6056
            next_eip = s->pc - s->cs_base;
6057
            tval += next_eip;
6058
            if (s->dflag == 0)
6059
                tval &= 0xffff;
6060

    
6061
            l1 = gen_new_label();
6062
            l2 = gen_new_label();
6063
            b &= 3;
6064
            if (b == 3) {
6065
                gen_op_jz_ecx[s->aflag](l1);
6066
            } else {
6067
                gen_op_dec_ECX[s->aflag]();
6068
                if (b <= 1)
6069
                    gen_op_mov_T0_cc();
6070
                gen_op_loop[s->aflag][b](l1);
6071
            }
6072

    
6073
            gen_jmp_im(next_eip);
6074
            gen_op_jmp_label(l2);
6075
            gen_set_label(l1);
6076
            gen_jmp_im(tval);
6077
            gen_set_label(l2);
6078
            gen_eob(s);
6079
        }
6080
        break;
6081
    case 0x130: /* wrmsr */
6082
    case 0x132: /* rdmsr */
6083
        if (s->cpl != 0) {
6084
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6085
        } else {
6086
            int retval = 0;
6087
            if (b & 2) {
6088
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
6089
                tcg_gen_helper_0_0(helper_rdmsr);
6090
            } else {
6091
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
6092
                tcg_gen_helper_0_0(helper_wrmsr);
6093
            }
6094
            if(retval)
6095
                gen_eob(s);
6096
        }
6097
        break;
6098
    case 0x131: /* rdtsc */
6099
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
6100
            break;
6101
        gen_jmp_im(pc_start - s->cs_base);
6102
        tcg_gen_helper_0_0(helper_rdtsc);
6103
        break;
6104
    case 0x133: /* rdpmc */
6105
        gen_jmp_im(pc_start - s->cs_base);
6106
        tcg_gen_helper_0_0(helper_rdpmc);
6107
        break;
6108
    case 0x134: /* sysenter */
6109
        if (CODE64(s))
6110
            goto illegal_op;
6111
        if (!s->pe) {
6112
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6113
        } else {
6114
            if (s->cc_op != CC_OP_DYNAMIC) {
6115
                gen_op_set_cc_op(s->cc_op);
6116
                s->cc_op = CC_OP_DYNAMIC;
6117
            }
6118
            gen_jmp_im(pc_start - s->cs_base);
6119
            tcg_gen_helper_0_0(helper_sysenter);
6120
            gen_eob(s);
6121
        }
6122
        break;
6123
    case 0x135: /* sysexit */
6124
        if (CODE64(s))
6125
            goto illegal_op;
6126
        if (!s->pe) {
6127
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6128
        } else {
6129
            if (s->cc_op != CC_OP_DYNAMIC) {
6130
                gen_op_set_cc_op(s->cc_op);
6131
                s->cc_op = CC_OP_DYNAMIC;
6132
            }
6133
            gen_jmp_im(pc_start - s->cs_base);
6134
            tcg_gen_helper_0_0(helper_sysexit);
6135
            gen_eob(s);
6136
        }
6137
        break;
6138
#ifdef TARGET_X86_64
6139
    case 0x105: /* syscall */
6140
        /* XXX: is it usable in real mode ? */
6141
        if (s->cc_op != CC_OP_DYNAMIC) {
6142
            gen_op_set_cc_op(s->cc_op);
6143
            s->cc_op = CC_OP_DYNAMIC;
6144
        }
6145
        gen_jmp_im(pc_start - s->cs_base);
6146
        tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
6147
        gen_eob(s);
6148
        break;
6149
    case 0x107: /* sysret */
6150
        if (!s->pe) {
6151
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6152
        } else {
6153
            if (s->cc_op != CC_OP_DYNAMIC) {
6154
                gen_op_set_cc_op(s->cc_op);
6155
                s->cc_op = CC_OP_DYNAMIC;
6156
            }
6157
            gen_jmp_im(pc_start - s->cs_base);
6158
            tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
6159
            /* condition codes are modified only in long mode */
6160
            if (s->lma)
6161
                s->cc_op = CC_OP_EFLAGS;
6162
            gen_eob(s);
6163
        }
6164
        break;
6165
#endif
6166
    case 0x1a2: /* cpuid */
6167
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
6168
            break;
6169
        tcg_gen_helper_0_0(helper_cpuid);
6170
        break;
6171
    case 0xf4: /* hlt */
6172
        if (s->cpl != 0) {
6173
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6174
        } else {
6175
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
6176
                break;
6177
            if (s->cc_op != CC_OP_DYNAMIC)
6178
                gen_op_set_cc_op(s->cc_op);
6179
            gen_jmp_im(s->pc - s->cs_base);
6180
            tcg_gen_helper_0_0(helper_hlt);
6181
            s->is_jmp = 3;
6182
        }
6183
        break;
6184
    case 0x100:
6185
        modrm = ldub_code(s->pc++);
6186
        mod = (modrm >> 6) & 3;
6187
        op = (modrm >> 3) & 7;
6188
        switch(op) {
6189
        case 0: /* sldt */
6190
            if (!s->pe || s->vm86)
6191
                goto illegal_op;
6192
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
6193
                break;
6194
            gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
6195
            ot = OT_WORD;
6196
            if (mod == 3)
6197
                ot += s->dflag;
6198
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6199
            break;
6200
        case 2: /* lldt */
6201
            if (!s->pe || s->vm86)
6202
                goto illegal_op;
6203
            if (s->cpl != 0) {
6204
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6205
            } else {
6206
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
6207
                    break;
6208
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6209
                gen_jmp_im(pc_start - s->cs_base);
6210
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6211
                tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
6212
            }
6213
            break;
6214
        case 1: /* str */
6215
            if (!s->pe || s->vm86)
6216
                goto illegal_op;
6217
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
6218
                break;
6219
            gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
6220
            ot = OT_WORD;
6221
            if (mod == 3)
6222
                ot += s->dflag;
6223
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6224
            break;
6225
        case 3: /* ltr */
6226
            if (!s->pe || s->vm86)
6227
                goto illegal_op;
6228
            if (s->cpl != 0) {
6229
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6230
            } else {
6231
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
6232
                    break;
6233
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6234
                gen_jmp_im(pc_start - s->cs_base);
6235
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6236
                tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
6237
            }
6238
            break;
6239
        case 4: /* verr */
6240
        case 5: /* verw */
6241
            if (!s->pe || s->vm86)
6242
                goto illegal_op;
6243
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6244
            if (s->cc_op != CC_OP_DYNAMIC)
6245
                gen_op_set_cc_op(s->cc_op);
6246
            if (op == 4)
6247
                gen_op_verr();
6248
            else
6249
                gen_op_verw();
6250
            s->cc_op = CC_OP_EFLAGS;
6251
            break;
6252
        default:
6253
            goto illegal_op;
6254
        }
6255
        break;
6256
    case 0x101:
6257
        modrm = ldub_code(s->pc++);
6258
        mod = (modrm >> 6) & 3;
6259
        op = (modrm >> 3) & 7;
6260
        rm = modrm & 7;
6261
        switch(op) {
6262
        case 0: /* sgdt */
6263
            if (mod == 3)
6264
                goto illegal_op;
6265
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
6266
                break;
6267
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6268
            gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
6269
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
6270
            gen_add_A0_im(s, 2);
6271
            gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
6272
            if (!s->dflag)
6273
                gen_op_andl_T0_im(0xffffff);
6274
            gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6275
            break;
6276
        case 1:
6277
            if (mod == 3) {
6278
                switch (rm) {
6279
                case 0: /* monitor */
6280
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6281
                        s->cpl != 0)
6282
                        goto illegal_op;
6283
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
6284
                        break;
6285
                    gen_jmp_im(pc_start - s->cs_base);
6286
#ifdef TARGET_X86_64
6287
                    if (s->aflag == 2) {
6288
                        gen_op_movq_A0_reg(R_EBX);
6289
                        gen_op_addq_A0_AL();
6290
                    } else
6291
#endif
6292
                    {
6293
                        gen_op_movl_A0_reg(R_EBX);
6294
                        gen_op_addl_A0_AL();
6295
                        if (s->aflag == 0)
6296
                            gen_op_andl_A0_ffff();
6297
                    }
6298
                    gen_add_A0_ds_seg(s);
6299
                    tcg_gen_helper_0_1(helper_monitor, cpu_A0);
6300
                    break;
6301
                case 1: /* mwait */
6302
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6303
                        s->cpl != 0)
6304
                        goto illegal_op;
6305
                    if (s->cc_op != CC_OP_DYNAMIC) {
6306
                        gen_op_set_cc_op(s->cc_op);
6307
                        s->cc_op = CC_OP_DYNAMIC;
6308
                    }
6309
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
6310
                        break;
6311
                    gen_jmp_im(s->pc - s->cs_base);
6312
                    tcg_gen_helper_0_0(helper_mwait);
6313
                    gen_eob(s);
6314
                    break;
6315
                default:
6316
                    goto illegal_op;
6317
                }
6318
            } else { /* sidt */
6319
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
6320
                    break;
6321
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6322
                gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
6323
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
6324
                gen_add_A0_im(s, 2);
6325
                gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
6326
                if (!s->dflag)
6327
                    gen_op_andl_T0_im(0xffffff);
6328
                gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6329
            }
6330
            break;
6331
        case 2: /* lgdt */
6332
        case 3: /* lidt */
6333
            if (mod == 3) {
6334
                switch(rm) {
6335
                case 0: /* VMRUN */
6336
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
6337
                        break;
6338
                    if (s->cc_op != CC_OP_DYNAMIC)
6339
                        gen_op_set_cc_op(s->cc_op);
6340
                    gen_jmp_im(s->pc - s->cs_base);
6341
                    tcg_gen_helper_0_0(helper_vmrun);
6342
                    s->cc_op = CC_OP_EFLAGS;
6343
                    gen_eob(s);
6344
                    break;
6345
                case 1: /* VMMCALL */
6346
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
6347
                         break;
6348
                    /* FIXME: cause #UD if hflags & SVM */
6349
                    tcg_gen_helper_0_0(helper_vmmcall);
6350
                    break;
6351
                case 2: /* VMLOAD */
6352
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
6353
                         break;
6354
                    tcg_gen_helper_0_0(helper_vmload);
6355
                    break;
6356
                case 3: /* VMSAVE */
6357
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
6358
                         break;
6359
                    tcg_gen_helper_0_0(helper_vmsave);
6360
                    break;
6361
                case 4: /* STGI */
6362
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
6363
                         break;
6364
                    tcg_gen_helper_0_0(helper_stgi);
6365
                    break;
6366
                case 5: /* CLGI */
6367
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
6368
                         break;
6369
                    tcg_gen_helper_0_0(helper_clgi);
6370
                    break;
6371
                case 6: /* SKINIT */
6372
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
6373
                         break;
6374
                    tcg_gen_helper_0_0(helper_skinit);
6375
                    break;
6376
                case 7: /* INVLPGA */
6377
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
6378
                         break;
6379
                    tcg_gen_helper_0_0(helper_invlpga);
6380
                    break;
6381
                default:
6382
                    goto illegal_op;
6383
                }
6384
            } else if (s->cpl != 0) {
6385
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6386
            } else {
6387
                if (gen_svm_check_intercept(s, pc_start,
6388
                                            op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
6389
                    break;
6390
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6391
                gen_op_ld_T1_A0(OT_WORD + s->mem_index);
6392
                gen_add_A0_im(s, 2);
6393
                gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6394
                if (!s->dflag)
6395
                    gen_op_andl_T0_im(0xffffff);
6396
                if (op == 2) {
6397
                    gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
6398
                    gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
6399
                } else {
6400
                    gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
6401
                    gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
6402
                }
6403
            }
6404
            break;
6405
        case 4: /* smsw */
6406
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
6407
                break;
6408
            gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
6409
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6410
            break;
6411
        case 6: /* lmsw */
6412
            if (s->cpl != 0) {
6413
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6414
            } else {
6415
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
6416
                    break;
6417
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6418
                tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
6419
                gen_jmp_im(s->pc - s->cs_base);
6420
                gen_eob(s);
6421
            }
6422
            break;
6423
        case 7: /* invlpg */
6424
            if (s->cpl != 0) {
6425
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6426
            } else {
6427
                if (mod == 3) {
6428
#ifdef TARGET_X86_64
6429
                    if (CODE64(s) && rm == 0) {
6430
                        /* swapgs */
6431
                        gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
6432
                        gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
6433
                        gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
6434
                        gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
6435
                    } else
6436
#endif
6437
                    {
6438
                        goto illegal_op;
6439
                    }
6440
                } else {
6441
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
6442
                        break;
6443
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6444
                    tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
6445
                    gen_jmp_im(s->pc - s->cs_base);
6446
                    gen_eob(s);
6447
                }
6448
            }
6449
            break;
6450
        default:
6451
            goto illegal_op;
6452
        }
6453
        break;
6454
    case 0x108: /* invd */
6455
    case 0x109: /* wbinvd */
6456
        if (s->cpl != 0) {
6457
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6458
        } else {
6459
            if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6460
                break;
6461
            /* nothing to do */
6462
        }
6463
        break;
6464
    case 0x63: /* arpl or movslS (x86_64) */
6465
#ifdef TARGET_X86_64
6466
        if (CODE64(s)) {
6467
            int d_ot;
6468
            /* d_ot is the size of destination */
6469
            d_ot = dflag + OT_WORD;
6470

    
6471
            modrm = ldub_code(s->pc++);
6472
            reg = ((modrm >> 3) & 7) | rex_r;
6473
            mod = (modrm >> 6) & 3;
6474
            rm = (modrm & 7) | REX_B(s);
6475

    
6476
            if (mod == 3) {
6477
                gen_op_mov_TN_reg(OT_LONG, 0, rm);
6478
                /* sign extend */
6479
                if (d_ot == OT_QUAD)
6480
                    gen_op_movslq_T0_T0();
6481
                gen_op_mov_reg_T0(d_ot, reg);
6482
            } else {
6483
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6484
                if (d_ot == OT_QUAD) {
6485
                    gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6486
                } else {
6487
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6488
                }
6489
                gen_op_mov_reg_T0(d_ot, reg);
6490
            }
6491
        } else
6492
#endif
6493
        {
6494
            if (!s->pe || s->vm86)
6495
                goto illegal_op;
6496
            ot = dflag ? OT_LONG : OT_WORD;
6497
            modrm = ldub_code(s->pc++);
6498
            reg = (modrm >> 3) & 7;
6499
            mod = (modrm >> 6) & 3;
6500
            rm = modrm & 7;
6501
            if (mod != 3) {
6502
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6503
                gen_op_ld_T0_A0(ot + s->mem_index);
6504
            } else {
6505
                gen_op_mov_TN_reg(ot, 0, rm);
6506
            }
6507
            gen_op_mov_TN_reg(ot, 1, reg);
6508
            if (s->cc_op != CC_OP_DYNAMIC)
6509
                gen_op_set_cc_op(s->cc_op);
6510
            gen_op_arpl();
6511
            s->cc_op = CC_OP_EFLAGS;
6512
            if (mod != 3) {
6513
                gen_op_st_T0_A0(ot + s->mem_index);
6514
            } else {
6515
                gen_op_mov_reg_T0(ot, rm);
6516
            }
6517
            gen_op_arpl_update();
6518
        }
6519
        break;
6520
    case 0x102: /* lar */
6521
    case 0x103: /* lsl */
6522
        if (!s->pe || s->vm86)
6523
            goto illegal_op;
6524
        ot = dflag ? OT_LONG : OT_WORD;
6525
        modrm = ldub_code(s->pc++);
6526
        reg = ((modrm >> 3) & 7) | rex_r;
6527
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6528
        gen_op_mov_TN_reg(ot, 1, reg);
6529
        if (s->cc_op != CC_OP_DYNAMIC)
6530
            gen_op_set_cc_op(s->cc_op);
6531
        if (b == 0x102)
6532
            gen_op_lar();
6533
        else
6534
            gen_op_lsl();
6535
        s->cc_op = CC_OP_EFLAGS;
6536
        gen_op_mov_reg_T1(ot, reg);
6537
        break;
6538
    case 0x118:
6539
        modrm = ldub_code(s->pc++);
6540
        mod = (modrm >> 6) & 3;
6541
        op = (modrm >> 3) & 7;
6542
        switch(op) {
6543
        case 0: /* prefetchnta */
6544
        case 1: /* prefetchnt0 */
6545
        case 2: /* prefetchnt0 */
6546
        case 3: /* prefetchnt0 */
6547
            if (mod == 3)
6548
                goto illegal_op;
6549
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6550
            /* nothing more to do */
6551
            break;
6552
        default: /* nop (multi byte) */
6553
            gen_nop_modrm(s, modrm);
6554
            break;
6555
        }
6556
        break;
6557
    case 0x119 ... 0x11f: /* nop (multi byte) */
6558
        modrm = ldub_code(s->pc++);
6559
        gen_nop_modrm(s, modrm);
6560
        break;
6561
    case 0x120: /* mov reg, crN */
6562
    case 0x122: /* mov crN, reg */
6563
        if (s->cpl != 0) {
6564
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6565
        } else {
6566
            modrm = ldub_code(s->pc++);
6567
            if ((modrm & 0xc0) != 0xc0)
6568
                goto illegal_op;
6569
            rm = (modrm & 7) | REX_B(s);
6570
            reg = ((modrm >> 3) & 7) | rex_r;
6571
            if (CODE64(s))
6572
                ot = OT_QUAD;
6573
            else
6574
                ot = OT_LONG;
6575
            switch(reg) {
6576
            case 0:
6577
            case 2:
6578
            case 3:
6579
            case 4:
6580
            case 8:
6581
                if (b & 2) {
6582
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6583
                    gen_op_mov_TN_reg(ot, 0, rm);
6584
                    tcg_gen_helper_0_2(helper_movl_crN_T0, 
6585
                                       tcg_const_i32(reg), cpu_T[0]);
6586
                    gen_jmp_im(s->pc - s->cs_base);
6587
                    gen_eob(s);
6588
                } else {
6589
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6590
#if !defined(CONFIG_USER_ONLY)
6591
                    if (reg == 8)
6592
                        tcg_gen_helper_1_0(helper_movtl_T0_cr8, cpu_T[0]);
6593
                    else
6594
#endif
6595
                        gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6596
                    gen_op_mov_reg_T0(ot, rm);
6597
                }
6598
                break;
6599
            default:
6600
                goto illegal_op;
6601
            }
6602
        }
6603
        break;
6604
    case 0x121: /* mov reg, drN */
6605
    case 0x123: /* mov drN, reg */
6606
        if (s->cpl != 0) {
6607
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6608
        } else {
6609
            modrm = ldub_code(s->pc++);
6610
            if ((modrm & 0xc0) != 0xc0)
6611
                goto illegal_op;
6612
            rm = (modrm & 7) | REX_B(s);
6613
            reg = ((modrm >> 3) & 7) | rex_r;
6614
            if (CODE64(s))
6615
                ot = OT_QUAD;
6616
            else
6617
                ot = OT_LONG;
6618
            /* XXX: do it dynamically with CR4.DE bit */
6619
            if (reg == 4 || reg == 5 || reg >= 8)
6620
                goto illegal_op;
6621
            if (b & 2) {
6622
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6623
                gen_op_mov_TN_reg(ot, 0, rm);
6624
                tcg_gen_helper_0_2(helper_movl_drN_T0,
6625
                                   tcg_const_i32(reg), cpu_T[0]);
6626
                gen_jmp_im(s->pc - s->cs_base);
6627
                gen_eob(s);
6628
            } else {
6629
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6630
                gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6631
                gen_op_mov_reg_T0(ot, rm);
6632
            }
6633
        }
6634
        break;
6635
    case 0x106: /* clts */
6636
        if (s->cpl != 0) {
6637
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6638
        } else {
6639
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6640
            tcg_gen_helper_0_0(helper_clts);
6641
            /* abort block because static cpu state changed */
6642
            gen_jmp_im(s->pc - s->cs_base);
6643
            gen_eob(s);
6644
        }
6645
        break;
6646
    /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6647
    case 0x1c3: /* MOVNTI reg, mem */
6648
        if (!(s->cpuid_features & CPUID_SSE2))
6649
            goto illegal_op;
6650
        ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6651
        modrm = ldub_code(s->pc++);
6652
        mod = (modrm >> 6) & 3;
6653
        if (mod == 3)
6654
            goto illegal_op;
6655
        reg = ((modrm >> 3) & 7) | rex_r;
6656
        /* generate a generic store */
6657
        gen_ldst_modrm(s, modrm, ot, reg, 1);
6658
        break;
6659
    case 0x1ae:
6660
        modrm = ldub_code(s->pc++);
6661
        mod = (modrm >> 6) & 3;
6662
        op = (modrm >> 3) & 7;
6663
        switch(op) {
6664
        case 0: /* fxsave */
6665
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6666
                (s->flags & HF_EM_MASK))
6667
                goto illegal_op;
6668
            if (s->flags & HF_TS_MASK) {
6669
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6670
                break;
6671
            }
6672
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6673
            if (s->cc_op != CC_OP_DYNAMIC)
6674
                gen_op_set_cc_op(s->cc_op);
6675
            gen_jmp_im(pc_start - s->cs_base);
6676
            tcg_gen_helper_0_2(helper_fxsave, 
6677
                               cpu_A0, tcg_const_i32((s->dflag == 2)));
6678
            break;
6679
        case 1: /* fxrstor */
6680
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6681
                (s->flags & HF_EM_MASK))
6682
                goto illegal_op;
6683
            if (s->flags & HF_TS_MASK) {
6684
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6685
                break;
6686
            }
6687
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6688
            if (s->cc_op != CC_OP_DYNAMIC)
6689
                gen_op_set_cc_op(s->cc_op);
6690
            gen_jmp_im(pc_start - s->cs_base);
6691
            tcg_gen_helper_0_2(helper_fxrstor,
6692
                               cpu_A0, tcg_const_i32((s->dflag == 2)));
6693
            break;
6694
        case 2: /* ldmxcsr */
6695
        case 3: /* stmxcsr */
6696
            if (s->flags & HF_TS_MASK) {
6697
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6698
                break;
6699
            }
6700
            if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6701
                mod == 3)
6702
                goto illegal_op;
6703
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6704
            if (op == 2) {
6705
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6706
                gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6707
            } else {
6708
                gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6709
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
6710
            }
6711
            break;
6712
        case 5: /* lfence */
6713
        case 6: /* mfence */
6714
            if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6715
                goto illegal_op;
6716
            break;
6717
        case 7: /* sfence / clflush */
6718
            if ((modrm & 0xc7) == 0xc0) {
6719
                /* sfence */
6720
                /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6721
                if (!(s->cpuid_features & CPUID_SSE))
6722
                    goto illegal_op;
6723
            } else {
6724
                /* clflush */
6725
                if (!(s->cpuid_features & CPUID_CLFLUSH))
6726
                    goto illegal_op;
6727
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6728
            }
6729
            break;
6730
        default:
6731
            goto illegal_op;
6732
        }
6733
        break;
6734
    case 0x10d: /* 3DNow! prefetch(w) */
6735
        modrm = ldub_code(s->pc++);
6736
        mod = (modrm >> 6) & 3;
6737
        if (mod == 3)
6738
            goto illegal_op;
6739
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6740
        /* ignore for now */
6741
        break;
6742
    case 0x1aa: /* rsm */
6743
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6744
            break;
6745
        if (!(s->flags & HF_SMM_MASK))
6746
            goto illegal_op;
6747
        if (s->cc_op != CC_OP_DYNAMIC) {
6748
            gen_op_set_cc_op(s->cc_op);
6749
            s->cc_op = CC_OP_DYNAMIC;
6750
        }
6751
        gen_jmp_im(s->pc - s->cs_base);
6752
        tcg_gen_helper_0_0(helper_rsm);
6753
        gen_eob(s);
6754
        break;
6755
    case 0x10e ... 0x10f:
6756
        /* 3DNow! instructions, ignore prefixes */
6757
        s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
6758
    case 0x110 ... 0x117:
6759
    case 0x128 ... 0x12f:
6760
    case 0x150 ... 0x177:
6761
    case 0x17c ... 0x17f:
6762
    case 0x1c2:
6763
    case 0x1c4 ... 0x1c6:
6764
    case 0x1d0 ... 0x1fe:
6765
        gen_sse(s, b, pc_start, rex_r);
6766
        break;
6767
    default:
6768
        goto illegal_op;
6769
    }
6770
    /* lock generation */
6771
    if (s->prefix & PREFIX_LOCK)
6772
        tcg_gen_helper_0_0(helper_unlock);
6773
    return s->pc;
6774
 illegal_op:
6775
    if (s->prefix & PREFIX_LOCK)
6776
        tcg_gen_helper_0_0(helper_unlock);
6777
    /* XXX: ensure that no lock was generated */
6778
    gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6779
    return s->pc;
6780
}
6781

    
6782
static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
6783
{
6784
    switch(macro_id) {
6785
#ifdef MACRO_TEST
6786
    case MACRO_TEST:
6787
        tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
6788
        break;
6789
#endif
6790
    }
6791
}
6792

    
6793
void optimize_flags_init(void)
6794
{
6795
#if TCG_TARGET_REG_BITS == 32
6796
    assert(sizeof(CCTable) == (1 << 3));
6797
#else
6798
    assert(sizeof(CCTable) == (1 << 4));
6799
#endif
6800
    tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
6801

    
6802
    cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
6803
#if TARGET_LONG_BITS > HOST_LONG_BITS
6804
    cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL, 
6805
                                  TCG_AREG0, offsetof(CPUState, t0), "T0");
6806
    cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
6807
                                  TCG_AREG0, offsetof(CPUState, t1), "T1");
6808
    cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
6809
                                TCG_AREG0, offsetof(CPUState, t2), "A0");
6810
#else
6811
    cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
6812
    cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
6813
    cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
6814
#endif
6815
    cpu_T3 = tcg_global_mem_new(TCG_TYPE_TL,
6816
                                TCG_AREG0, offsetof(CPUState, t3), "T3");
6817
#if defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS)
6818
    /* XXX: must be suppressed once there are less fixed registers */
6819
    cpu_tmp1_i64 = tcg_global_reg2_new_hack(TCG_TYPE_I64, TCG_AREG1, TCG_AREG2, "tmp1");
6820
#endif
6821
    cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
6822
                                   TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
6823
    cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
6824
                                    TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
6825
    cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
6826
                                    TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
6827
}
6828

    
6829
/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6830
   basic block 'tb'. If search_pc is TRUE, also generate PC
6831
   information for each intermediate instruction. */
6832
static inline int gen_intermediate_code_internal(CPUState *env,
6833
                                                 TranslationBlock *tb,
6834
                                                 int search_pc)
6835
{
6836
    DisasContext dc1, *dc = &dc1;
6837
    target_ulong pc_ptr;
6838
    uint16_t *gen_opc_end;
6839
    int j, lj, cflags;
6840
    uint64_t flags;
6841
    target_ulong pc_start;
6842
    target_ulong cs_base;
6843

    
6844
    /* generate intermediate code */
6845
    pc_start = tb->pc;
6846
    cs_base = tb->cs_base;
6847
    flags = tb->flags;
6848
    cflags = tb->cflags;
6849

    
6850
    dc->pe = (flags >> HF_PE_SHIFT) & 1;
6851
    dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6852
    dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6853
    dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6854
    dc->f_st = 0;
6855
    dc->vm86 = (flags >> VM_SHIFT) & 1;
6856
    dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6857
    dc->iopl = (flags >> IOPL_SHIFT) & 3;
6858
    dc->tf = (flags >> TF_SHIFT) & 1;
6859
    dc->singlestep_enabled = env->singlestep_enabled;
6860
    dc->cc_op = CC_OP_DYNAMIC;
6861
    dc->cs_base = cs_base;
6862
    dc->tb = tb;
6863
    dc->popl_esp_hack = 0;
6864
    /* select memory access functions */
6865
    dc->mem_index = 0;
6866
    if (flags & HF_SOFTMMU_MASK) {
6867
        if (dc->cpl == 3)
6868
            dc->mem_index = 2 * 4;
6869
        else
6870
            dc->mem_index = 1 * 4;
6871
    }
6872
    dc->cpuid_features = env->cpuid_features;
6873
    dc->cpuid_ext_features = env->cpuid_ext_features;
6874
    dc->cpuid_ext2_features = env->cpuid_ext2_features;
6875
#ifdef TARGET_X86_64
6876
    dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6877
    dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6878
#endif
6879
    dc->flags = flags;
6880
    dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6881
                    (flags & HF_INHIBIT_IRQ_MASK)
6882
#ifndef CONFIG_SOFTMMU
6883
                    || (flags & HF_SOFTMMU_MASK)
6884
#endif
6885
                    );
6886
#if 0
6887
    /* check addseg logic */
6888
    if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6889
        printf("ERROR addseg\n");
6890
#endif
6891

    
6892
    cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
6893
#if !(defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS))
6894
    cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
6895
#endif
6896
    cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
6897
    cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
6898
    cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
6899
    cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
6900
    cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
6901
    cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
6902
    cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
6903

    
6904
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6905

    
6906
    dc->is_jmp = DISAS_NEXT;
6907
    pc_ptr = pc_start;
6908
    lj = -1;
6909

    
6910
    for(;;) {
6911
        if (env->nb_breakpoints > 0) {
6912
            for(j = 0; j < env->nb_breakpoints; j++) {
6913
                if (env->breakpoints[j] == pc_ptr) {
6914
                    gen_debug(dc, pc_ptr - dc->cs_base);
6915
                    break;
6916
                }
6917
            }
6918
        }
6919
        if (search_pc) {
6920
            j = gen_opc_ptr - gen_opc_buf;
6921
            if (lj < j) {
6922
                lj++;
6923
                while (lj < j)
6924
                    gen_opc_instr_start[lj++] = 0;
6925
            }
6926
            gen_opc_pc[lj] = pc_ptr;
6927
            gen_opc_cc_op[lj] = dc->cc_op;
6928
            gen_opc_instr_start[lj] = 1;
6929
        }
6930
        pc_ptr = disas_insn(dc, pc_ptr);
6931
        /* stop translation if indicated */
6932
        if (dc->is_jmp)
6933
            break;
6934
        /* if single step mode, we generate only one instruction and
6935
           generate an exception */
6936
        /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6937
           the flag and abort the translation to give the irqs a
6938
           change to be happen */
6939
        if (dc->tf || dc->singlestep_enabled ||
6940
            (flags & HF_INHIBIT_IRQ_MASK) ||
6941
            (cflags & CF_SINGLE_INSN)) {
6942
            gen_jmp_im(pc_ptr - dc->cs_base);
6943
            gen_eob(dc);
6944
            break;
6945
        }
6946
        /* if too long translation, stop generation too */
6947
        if (gen_opc_ptr >= gen_opc_end ||
6948
            (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6949
            gen_jmp_im(pc_ptr - dc->cs_base);
6950
            gen_eob(dc);
6951
            break;
6952
        }
6953
    }
6954
    *gen_opc_ptr = INDEX_op_end;
6955
    /* we don't forget to fill the last values */
6956
    if (search_pc) {
6957
        j = gen_opc_ptr - gen_opc_buf;
6958
        lj++;
6959
        while (lj <= j)
6960
            gen_opc_instr_start[lj++] = 0;
6961
    }
6962

    
6963
#ifdef DEBUG_DISAS
6964
    if (loglevel & CPU_LOG_TB_CPU) {
6965
        cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6966
    }
6967
    if (loglevel & CPU_LOG_TB_IN_ASM) {
6968
        int disas_flags;
6969
        fprintf(logfile, "----------------\n");
6970
        fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6971
#ifdef TARGET_X86_64
6972
        if (dc->code64)
6973
            disas_flags = 2;
6974
        else
6975
#endif
6976
            disas_flags = !dc->code32;
6977
        target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6978
        fprintf(logfile, "\n");
6979
        if (loglevel & CPU_LOG_TB_OP_OPT) {
6980
            fprintf(logfile, "OP before opt:\n");
6981
            tcg_dump_ops(&tcg_ctx, logfile);
6982
            fprintf(logfile, "\n");
6983
        }
6984
    }
6985
#endif
6986

    
6987
    if (!search_pc)
6988
        tb->size = pc_ptr - pc_start;
6989
    return 0;
6990
}
6991

    
6992
int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6993
{
6994
    return gen_intermediate_code_internal(env, tb, 0);
6995
}
6996

    
6997
int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6998
{
6999
    return gen_intermediate_code_internal(env, tb, 1);
7000
}
7001

    
7002
void gen_pc_load(CPUState *env, TranslationBlock *tb,
7003
                unsigned long searched_pc, int pc_pos, void *puc)
7004
{
7005
    int cc_op;
7006
#ifdef DEBUG_DISAS
7007
    if (loglevel & CPU_LOG_TB_OP) {
7008
        int i;
7009
        fprintf(logfile, "RESTORE:\n");
7010
        for(i = 0;i <= pc_pos; i++) {
7011
            if (gen_opc_instr_start[i]) {
7012
                fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7013
            }
7014
        }
7015
        fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7016
                searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7017
                (uint32_t)tb->cs_base);
7018
    }
7019
#endif
7020
    env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7021
    cc_op = gen_opc_cc_op[pc_pos];
7022
    if (cc_op != CC_OP_DYNAMIC)
7023
        env->cc_op = cc_op;
7024
}