Statistics
| Branch: | Revision:

root / target-i386 / translate.c @ b8b6a50b

History | View | Annotate | Download (222.7 kB)

1
/*
2
 *  i386 translation
3
 *
4
 *  Copyright (c) 2003 Fabrice Bellard
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20
#include <stdarg.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23
#include <string.h>
24
#include <inttypes.h>
25
#include <signal.h>
26
#include <assert.h>
27

    
28
#include "cpu.h"
29
#include "exec-all.h"
30
#include "disas.h"
31
#include "helper.h"
32
#include "tcg-op.h"
33

    
34
#define PREFIX_REPZ   0x01
35
#define PREFIX_REPNZ  0x02
36
#define PREFIX_LOCK   0x04
37
#define PREFIX_DATA   0x08
38
#define PREFIX_ADR    0x10
39

    
40
#ifdef TARGET_X86_64
41
#define X86_64_ONLY(x) x
42
#define X86_64_DEF(x...) x
43
#define CODE64(s) ((s)->code64)
44
#define REX_X(s) ((s)->rex_x)
45
#define REX_B(s) ((s)->rex_b)
46
/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
47
#if 1
48
#define BUGGY_64(x) NULL
49
#endif
50
#else
51
#define X86_64_ONLY(x) NULL
52
#define X86_64_DEF(x...)
53
#define CODE64(s) 0
54
#define REX_X(s) 0
55
#define REX_B(s) 0
56
#endif
57

    
58
//#define MACRO_TEST   1
59

    
60
/* global register indexes */
61
static TCGv cpu_env, cpu_T[2], cpu_A0;
62
/* local register indexes (only used inside old micro ops) */
63
static TCGv cpu_tmp0, cpu_tmp1, cpu_tmp2, cpu_tmp3, cpu_ptr0, cpu_ptr1;
64

    
65
#ifdef TARGET_X86_64
66
static int x86_64_hregs;
67
#endif
68

    
69
typedef struct DisasContext {
70
    /* current insn context */
71
    int override; /* -1 if no override */
72
    int prefix;
73
    int aflag, dflag;
74
    target_ulong pc; /* pc = eip + cs_base */
75
    int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
76
                   static state change (stop translation) */
77
    /* current block context */
78
    target_ulong cs_base; /* base of CS segment */
79
    int pe;     /* protected mode */
80
    int code32; /* 32 bit code segment */
81
#ifdef TARGET_X86_64
82
    int lma;    /* long mode active */
83
    int code64; /* 64 bit code segment */
84
    int rex_x, rex_b;
85
#endif
86
    int ss32;   /* 32 bit stack segment */
87
    int cc_op;  /* current CC operation */
88
    int addseg; /* non zero if either DS/ES/SS have a non zero base */
89
    int f_st;   /* currently unused */
90
    int vm86;   /* vm86 mode */
91
    int cpl;
92
    int iopl;
93
    int tf;     /* TF cpu flag */
94
    int singlestep_enabled; /* "hardware" single step enabled */
95
    int jmp_opt; /* use direct block chaining for direct jumps */
96
    int mem_index; /* select memory access functions */
97
    uint64_t flags; /* all execution flags */
98
    struct TranslationBlock *tb;
99
    int popl_esp_hack; /* for correct popl with esp base handling */
100
    int rip_offset; /* only used in x86_64, but left for simplicity */
101
    int cpuid_features;
102
    int cpuid_ext_features;
103
    int cpuid_ext2_features;
104
} DisasContext;
105

    
106
static void gen_eob(DisasContext *s);
107
static void gen_jmp(DisasContext *s, target_ulong eip);
108
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
109

    
110
/* i386 arith/logic operations */
111
enum {
112
    OP_ADDL,
113
    OP_ORL,
114
    OP_ADCL,
115
    OP_SBBL,
116
    OP_ANDL,
117
    OP_SUBL,
118
    OP_XORL,
119
    OP_CMPL,
120
};
121

    
122
/* i386 shift ops */
123
enum {
124
    OP_ROL,
125
    OP_ROR,
126
    OP_RCL,
127
    OP_RCR,
128
    OP_SHL,
129
    OP_SHR,
130
    OP_SHL1, /* undocumented */
131
    OP_SAR = 7,
132
};
133

    
134
/* operand size */
135
enum {
136
    OT_BYTE = 0,
137
    OT_WORD,
138
    OT_LONG,
139
    OT_QUAD,
140
};
141

    
142
enum {
143
    /* I386 int registers */
144
    OR_EAX,   /* MUST be even numbered */
145
    OR_ECX,
146
    OR_EDX,
147
    OR_EBX,
148
    OR_ESP,
149
    OR_EBP,
150
    OR_ESI,
151
    OR_EDI,
152

    
153
    OR_TMP0 = 16,    /* temporary operand register */
154
    OR_TMP1,
155
    OR_A0, /* temporary register used when doing address evaluation */
156
};
157

    
158
static inline void gen_op_movl_T0_0(void)
159
{
160
    tcg_gen_movi_tl(cpu_T[0], 0);
161
}
162

    
163
static inline void gen_op_movl_T0_im(int32_t val)
164
{
165
    tcg_gen_movi_tl(cpu_T[0], val);
166
}
167

    
168
static inline void gen_op_movl_T0_imu(uint32_t val)
169
{
170
    tcg_gen_movi_tl(cpu_T[0], val);
171
}
172

    
173
static inline void gen_op_movl_T1_im(int32_t val)
174
{
175
    tcg_gen_movi_tl(cpu_T[1], val);
176
}
177

    
178
static inline void gen_op_movl_T1_imu(uint32_t val)
179
{
180
    tcg_gen_movi_tl(cpu_T[1], val);
181
}
182

    
183
static inline void gen_op_movl_A0_im(uint32_t val)
184
{
185
    tcg_gen_movi_tl(cpu_A0, val);
186
}
187

    
188
#ifdef TARGET_X86_64
189
static inline void gen_op_movq_A0_im(int64_t val)
190
{
191
    tcg_gen_movi_tl(cpu_A0, val);
192
}
193
#endif
194

    
195
static inline void gen_movtl_T0_im(target_ulong val)
196
{
197
    tcg_gen_movi_tl(cpu_T[0], val);
198
}
199

    
200
static inline void gen_movtl_T1_im(target_ulong val)
201
{
202
    tcg_gen_movi_tl(cpu_T[1], val);
203
}
204

    
205
static inline void gen_op_andl_T0_ffff(void)
206
{
207
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
208
}
209

    
210
static inline void gen_op_andl_T0_im(uint32_t val)
211
{
212
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
213
}
214

    
215
static inline void gen_op_movl_T0_T1(void)
216
{
217
    tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
218
}
219

    
220
static inline void gen_op_andl_A0_ffff(void)
221
{
222
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
223
}
224

    
225
#ifdef TARGET_X86_64
226

    
227
#define NB_OP_SIZES 4
228

    
229
#define DEF_REGS(prefix, suffix) \
230
  prefix ## EAX ## suffix,\
231
  prefix ## ECX ## suffix,\
232
  prefix ## EDX ## suffix,\
233
  prefix ## EBX ## suffix,\
234
  prefix ## ESP ## suffix,\
235
  prefix ## EBP ## suffix,\
236
  prefix ## ESI ## suffix,\
237
  prefix ## EDI ## suffix,\
238
  prefix ## R8 ## suffix,\
239
  prefix ## R9 ## suffix,\
240
  prefix ## R10 ## suffix,\
241
  prefix ## R11 ## suffix,\
242
  prefix ## R12 ## suffix,\
243
  prefix ## R13 ## suffix,\
244
  prefix ## R14 ## suffix,\
245
  prefix ## R15 ## suffix,
246

    
247
#else /* !TARGET_X86_64 */
248

    
249
#define NB_OP_SIZES 3
250

    
251
#define DEF_REGS(prefix, suffix) \
252
  prefix ## EAX ## suffix,\
253
  prefix ## ECX ## suffix,\
254
  prefix ## EDX ## suffix,\
255
  prefix ## EBX ## suffix,\
256
  prefix ## ESP ## suffix,\
257
  prefix ## EBP ## suffix,\
258
  prefix ## ESI ## suffix,\
259
  prefix ## EDI ## suffix,
260

    
261
#endif /* !TARGET_X86_64 */
262

    
263
#if defined(WORDS_BIGENDIAN)
264
#define REG_B_OFFSET (sizeof(target_ulong) - 1)
265
#define REG_H_OFFSET (sizeof(target_ulong) - 2)
266
#define REG_W_OFFSET (sizeof(target_ulong) - 2)
267
#define REG_L_OFFSET (sizeof(target_ulong) - 4)
268
#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
269
#else
270
#define REG_B_OFFSET 0
271
#define REG_H_OFFSET 1
272
#define REG_W_OFFSET 0
273
#define REG_L_OFFSET 0
274
#define REG_LH_OFFSET 4
275
#endif
276

    
277
static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
278
{
279
    switch(ot) {
280
    case OT_BYTE:
281
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
282
            tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
283
        } else {
284
            tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
285
        }
286
        break;
287
    case OT_WORD:
288
        tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
289
        break;
290
#ifdef TARGET_X86_64
291
    case OT_LONG:
292
        tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
293
        /* high part of register set to zero */
294
        tcg_gen_movi_tl(cpu_tmp0, 0);
295
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
296
        break;
297
    default:
298
    case OT_QUAD:
299
        tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
300
        break;
301
#else
302
    default:
303
    case OT_LONG:
304
        tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
305
        break;
306
#endif
307
    }
308
}
309

    
310
static inline void gen_op_mov_reg_T0(int ot, int reg)
311
{
312
    gen_op_mov_reg_TN(ot, 0, reg);
313
}
314

    
315
static inline void gen_op_mov_reg_T1(int ot, int reg)
316
{
317
    gen_op_mov_reg_TN(ot, 1, reg);
318
}
319

    
320
static inline void gen_op_mov_reg_A0(int size, int reg)
321
{
322
    switch(size) {
323
    case 0:
324
        tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
325
        break;
326
#ifdef TARGET_X86_64
327
    case 1:
328
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
329
        /* high part of register set to zero */
330
        tcg_gen_movi_tl(cpu_tmp0, 0);
331
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
332
        break;
333
    default:
334
    case 2:
335
        tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
336
        break;
337
#else
338
    default:
339
    case 1:
340
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
341
        break;
342
#endif
343
    }
344
}
345

    
346
static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
347
{
348
    switch(ot) {
349
    case OT_BYTE:
350
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
351
            goto std_case;
352
        } else {
353
            tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
354
        }
355
        break;
356
    default:
357
    std_case:
358
        tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
359
        break;
360
    }
361
}
362

    
363
static inline void gen_op_movl_A0_reg(int reg)
364
{
365
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
366
}
367

    
368
static inline void gen_op_addl_A0_im(int32_t val)
369
{
370
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
371
#ifdef TARGET_X86_64
372
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
373
#endif
374
}
375

    
376
#ifdef TARGET_X86_64
377
static inline void gen_op_addq_A0_im(int64_t val)
378
{
379
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
380
}
381
#endif
382
    
383
static void gen_add_A0_im(DisasContext *s, int val)
384
{
385
#ifdef TARGET_X86_64
386
    if (CODE64(s))
387
        gen_op_addq_A0_im(val);
388
    else
389
#endif
390
        gen_op_addl_A0_im(val);
391
}
392

    
393
static inline void gen_op_addl_T0_T1(void)
394
{
395
    tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
396
}
397

    
398
static inline void gen_op_jmp_T0(void)
399
{
400
    tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
401
}
402

    
403
static inline void gen_op_addw_ESP_im(int32_t val)
404
{
405
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
406
    tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
407
    tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]) + REG_W_OFFSET);
408
}
409

    
410
static inline void gen_op_addl_ESP_im(int32_t val)
411
{
412
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
413
    tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
414
#ifdef TARGET_X86_64
415
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
416
#endif
417
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
418
}
419

    
420
#ifdef TARGET_X86_64
421
static inline void gen_op_addq_ESP_im(int32_t val)
422
{
423
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
424
    tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
425
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
426
}
427
#endif
428

    
429
static inline void gen_op_set_cc_op(int32_t val)
430
{
431
    tcg_gen_movi_tl(cpu_tmp0, val);
432
    tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, cc_op));
433
}
434

    
435
static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
436
{
437
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
438
    if (shift != 0) 
439
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
440
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
441
#ifdef TARGET_X86_64
442
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
443
#endif
444
}
445

    
446
static inline void gen_op_movl_A0_seg(int reg)
447
{
448
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
449
}
450

    
451
static inline void gen_op_addl_A0_seg(int reg)
452
{
453
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
454
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
455
#ifdef TARGET_X86_64
456
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
457
#endif
458
}
459

    
460
#ifdef TARGET_X86_64
461
static inline void gen_op_movq_A0_seg(int reg)
462
{
463
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
464
}
465

    
466
static inline void gen_op_addq_A0_seg(int reg)
467
{
468
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
469
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
470
}
471

    
472
static inline void gen_op_movq_A0_reg(int reg)
473
{
474
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
475
}
476

    
477
static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
478
{
479
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
480
    if (shift != 0) 
481
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
482
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
483
}
484
#endif
485

    
486
static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
487
    [0] = {
488
        DEF_REGS(gen_op_cmovw_, _T1_T0)
489
    },
490
    [1] = {
491
        DEF_REGS(gen_op_cmovl_, _T1_T0)
492
    },
493
#ifdef TARGET_X86_64
494
    [2] = {
495
        DEF_REGS(gen_op_cmovq_, _T1_T0)
496
    },
497
#endif
498
};
499

    
500
#define DEF_ARITHC(SUFFIX)\
501
    {\
502
        gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
503
        gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
504
    },\
505
    {\
506
        gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
507
        gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
508
    },\
509
    {\
510
        gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
511
        gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
512
    },\
513
    {\
514
        X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
515
        X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
516
    },
517

    
518
static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
519
    DEF_ARITHC( )
520
};
521

    
522
static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
523
    DEF_ARITHC(_raw)
524
#ifndef CONFIG_USER_ONLY
525
    DEF_ARITHC(_kernel)
526
    DEF_ARITHC(_user)
527
#endif
528
};
529

    
530
static const int cc_op_arithb[8] = {
531
    CC_OP_ADDB,
532
    CC_OP_LOGICB,
533
    CC_OP_ADDB,
534
    CC_OP_SUBB,
535
    CC_OP_LOGICB,
536
    CC_OP_SUBB,
537
    CC_OP_LOGICB,
538
    CC_OP_SUBB,
539
};
540

    
541
#define DEF_CMPXCHG(SUFFIX)\
542
    gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
543
    gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
544
    gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
545
    X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
546

    
547
static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
548
    DEF_CMPXCHG( )
549
};
550

    
551
static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
552
    DEF_CMPXCHG(_raw)
553
#ifndef CONFIG_USER_ONLY
554
    DEF_CMPXCHG(_kernel)
555
    DEF_CMPXCHG(_user)
556
#endif
557
};
558

    
559
#define DEF_SHIFT(SUFFIX)\
560
    {\
561
        gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
562
        gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
563
        gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
564
        gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
565
        gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
566
        gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
567
        gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
568
        gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
569
    },\
570
    {\
571
        gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
572
        gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
573
        gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
574
        gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
575
        gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
576
        gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
577
        gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
578
        gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
579
    },\
580
    {\
581
        gen_op_roll ## SUFFIX ## _T0_T1_cc,\
582
        gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
583
        gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
584
        gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
585
        gen_op_shll ## SUFFIX ## _T0_T1_cc,\
586
        gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
587
        gen_op_shll ## SUFFIX ## _T0_T1_cc,\
588
        gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
589
    },\
590
    {\
591
        X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
592
        X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
593
        X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
594
        X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
595
        X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
596
        X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
597
        X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
598
        X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
599
    },
600

    
601
static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
602
    DEF_SHIFT( )
603
};
604

    
605
static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
606
    DEF_SHIFT(_raw)
607
#ifndef CONFIG_USER_ONLY
608
    DEF_SHIFT(_kernel)
609
    DEF_SHIFT(_user)
610
#endif
611
};
612

    
613
#define DEF_SHIFTD(SUFFIX, op)\
614
    {\
615
        NULL,\
616
        NULL,\
617
    },\
618
    {\
619
        gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
620
        gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
621
     },\
622
    {\
623
        gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
624
        gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
625
    },\
626
    {\
627
X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
628
           gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
629
    },
630

    
631
static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
632
    DEF_SHIFTD(, im)
633
};
634

    
635
static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
636
    DEF_SHIFTD(, ECX)
637
};
638

    
639
static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
640
    DEF_SHIFTD(_raw, im)
641
#ifndef CONFIG_USER_ONLY
642
    DEF_SHIFTD(_kernel, im)
643
    DEF_SHIFTD(_user, im)
644
#endif
645
};
646

    
647
static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
648
    DEF_SHIFTD(_raw, ECX)
649
#ifndef CONFIG_USER_ONLY
650
    DEF_SHIFTD(_kernel, ECX)
651
    DEF_SHIFTD(_user, ECX)
652
#endif
653
};
654

    
655
static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
656
    [0] = {
657
        gen_op_btw_T0_T1_cc,
658
        gen_op_btsw_T0_T1_cc,
659
        gen_op_btrw_T0_T1_cc,
660
        gen_op_btcw_T0_T1_cc,
661
    },
662
    [1] = {
663
        gen_op_btl_T0_T1_cc,
664
        gen_op_btsl_T0_T1_cc,
665
        gen_op_btrl_T0_T1_cc,
666
        gen_op_btcl_T0_T1_cc,
667
    },
668
#ifdef TARGET_X86_64
669
    [2] = {
670
        gen_op_btq_T0_T1_cc,
671
        gen_op_btsq_T0_T1_cc,
672
        gen_op_btrq_T0_T1_cc,
673
        gen_op_btcq_T0_T1_cc,
674
    },
675
#endif
676
};
677

    
678
static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
679
    gen_op_add_bitw_A0_T1,
680
    gen_op_add_bitl_A0_T1,
681
    X86_64_ONLY(gen_op_add_bitq_A0_T1),
682
};
683

    
684
static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
685
    [0] = {
686
        gen_op_bsfw_T0_cc,
687
        gen_op_bsrw_T0_cc,
688
    },
689
    [1] = {
690
        gen_op_bsfl_T0_cc,
691
        gen_op_bsrl_T0_cc,
692
    },
693
#ifdef TARGET_X86_64
694
    [2] = {
695
        gen_op_bsfq_T0_cc,
696
        gen_op_bsrq_T0_cc,
697
    },
698
#endif
699
};
700

    
701
static inline void gen_op_lds_T0_A0(int idx)
702
{
703
    int mem_index = (idx >> 2) - 1;
704
    switch(idx & 3) {
705
    case 0:
706
        tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
707
        break;
708
    case 1:
709
        tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
710
        break;
711
    default:
712
    case 2:
713
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
714
        break;
715
    }
716
}
717

    
718
/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
719
static inline void gen_op_ld_T0_A0(int idx)
720
{
721
    int mem_index = (idx >> 2) - 1;
722
    switch(idx & 3) {
723
    case 0:
724
        tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
725
        break;
726
    case 1:
727
        tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
728
        break;
729
    case 2:
730
        tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
731
        break;
732
    default:
733
    case 3:
734
        tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
735
        break;
736
    }
737
}
738

    
739
static inline void gen_op_ldu_T0_A0(int idx)
740
{
741
    gen_op_ld_T0_A0(idx);
742
}
743

    
744
static inline void gen_op_ld_T1_A0(int idx)
745
{
746
    int mem_index = (idx >> 2) - 1;
747
    switch(idx & 3) {
748
    case 0:
749
        tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
750
        break;
751
    case 1:
752
        tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
753
        break;
754
    case 2:
755
        tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
756
        break;
757
    default:
758
    case 3:
759
        tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
760
        break;
761
    }
762
}
763

    
764
static inline void gen_op_st_T0_A0(int idx)
765
{
766
    int mem_index = (idx >> 2) - 1;
767
    switch(idx & 3) {
768
    case 0:
769
        tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
770
        break;
771
    case 1:
772
        tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
773
        break;
774
    case 2:
775
        tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
776
        break;
777
    default:
778
    case 3:
779
        tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
780
        break;
781
    }
782
}
783

    
784
static inline void gen_op_st_T1_A0(int idx)
785
{
786
    int mem_index = (idx >> 2) - 1;
787
    switch(idx & 3) {
788
    case 0:
789
        tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
790
        break;
791
    case 1:
792
        tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
793
        break;
794
    case 2:
795
        tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
796
        break;
797
    default:
798
    case 3:
799
        tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
800
        break;
801
    }
802
}
803

    
804
static inline void gen_jmp_im(target_ulong pc)
805
{
806
    tcg_gen_movi_tl(cpu_tmp0, pc);
807
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
808
}
809

    
810
static inline void gen_string_movl_A0_ESI(DisasContext *s)
811
{
812
    int override;
813

    
814
    override = s->override;
815
#ifdef TARGET_X86_64
816
    if (s->aflag == 2) {
817
        if (override >= 0) {
818
            gen_op_movq_A0_seg(override);
819
            gen_op_addq_A0_reg_sN(0, R_ESI);
820
        } else {
821
            gen_op_movq_A0_reg(R_ESI);
822
        }
823
    } else
824
#endif
825
    if (s->aflag) {
826
        /* 32 bit address */
827
        if (s->addseg && override < 0)
828
            override = R_DS;
829
        if (override >= 0) {
830
            gen_op_movl_A0_seg(override);
831
            gen_op_addl_A0_reg_sN(0, R_ESI);
832
        } else {
833
            gen_op_movl_A0_reg(R_ESI);
834
        }
835
    } else {
836
        /* 16 address, always override */
837
        if (override < 0)
838
            override = R_DS;
839
        gen_op_movl_A0_reg(R_ESI);
840
        gen_op_andl_A0_ffff();
841
        gen_op_addl_A0_seg(override);
842
    }
843
}
844

    
845
static inline void gen_string_movl_A0_EDI(DisasContext *s)
846
{
847
#ifdef TARGET_X86_64
848
    if (s->aflag == 2) {
849
        gen_op_movq_A0_reg(R_EDI);
850
    } else
851
#endif
852
    if (s->aflag) {
853
        if (s->addseg) {
854
            gen_op_movl_A0_seg(R_ES);
855
            gen_op_addl_A0_reg_sN(0, R_EDI);
856
        } else {
857
            gen_op_movl_A0_reg(R_EDI);
858
        }
859
    } else {
860
        gen_op_movl_A0_reg(R_EDI);
861
        gen_op_andl_A0_ffff();
862
        gen_op_addl_A0_seg(R_ES);
863
    }
864
}
865

    
866
static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
867
    gen_op_movl_T0_Dshiftb,
868
    gen_op_movl_T0_Dshiftw,
869
    gen_op_movl_T0_Dshiftl,
870
    X86_64_ONLY(gen_op_movl_T0_Dshiftq),
871
};
872

    
873
static GenOpFunc1 *gen_op_jnz_ecx[3] = {
874
    gen_op_jnz_ecxw,
875
    gen_op_jnz_ecxl,
876
    X86_64_ONLY(gen_op_jnz_ecxq),
877
};
878

    
879
static GenOpFunc1 *gen_op_jz_ecx[3] = {
880
    gen_op_jz_ecxw,
881
    gen_op_jz_ecxl,
882
    X86_64_ONLY(gen_op_jz_ecxq),
883
};
884

    
885
static GenOpFunc *gen_op_dec_ECX[3] = {
886
    gen_op_decw_ECX,
887
    gen_op_decl_ECX,
888
    X86_64_ONLY(gen_op_decq_ECX),
889
};
890

    
891
static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
892
    {
893
        gen_op_jnz_subb,
894
        gen_op_jnz_subw,
895
        gen_op_jnz_subl,
896
        X86_64_ONLY(gen_op_jnz_subq),
897
    },
898
    {
899
        gen_op_jz_subb,
900
        gen_op_jz_subw,
901
        gen_op_jz_subl,
902
        X86_64_ONLY(gen_op_jz_subq),
903
    },
904
};
905

    
906
static void *helper_in_func[3] = {
907
    helper_inb,
908
    helper_inw,
909
    helper_inl,
910
};
911

    
912
static void *helper_out_func[3] = {
913
    helper_outb,
914
    helper_outw,
915
    helper_outl,
916
};
917

    
918
static void *gen_check_io_func[3] = {
919
    helper_check_iob,
920
    helper_check_iow,
921
    helper_check_iol,
922
};
923

    
924
static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
925
                         uint32_t svm_flags)
926
{
927
    int state_saved;
928
    target_ulong next_eip;
929

    
930
    state_saved = 0;
931
    if (s->pe && (s->cpl > s->iopl || s->vm86)) {
932
        if (s->cc_op != CC_OP_DYNAMIC)
933
            gen_op_set_cc_op(s->cc_op);
934
        gen_jmp_im(cur_eip);
935
        state_saved = 1;
936
        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
937
        tcg_gen_helper_0_1(gen_check_io_func[ot],
938
                           cpu_tmp2);
939
    }
940
    if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
941
        if (!state_saved) {
942
            if (s->cc_op != CC_OP_DYNAMIC)
943
                gen_op_set_cc_op(s->cc_op);
944
            gen_jmp_im(cur_eip);
945
            state_saved = 1;
946
        }
947
        svm_flags |= (1 << (4 + ot));
948
        next_eip = s->pc - s->cs_base;
949
        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
950
        tcg_gen_helper_0_3(helper_svm_check_io,
951
                           cpu_tmp2,
952
                           tcg_const_i32(svm_flags),
953
                           tcg_const_i32(next_eip - cur_eip));
954
    }
955
}
956

    
957
static inline void gen_movs(DisasContext *s, int ot)
958
{
959
    gen_string_movl_A0_ESI(s);
960
    gen_op_ld_T0_A0(ot + s->mem_index);
961
    gen_string_movl_A0_EDI(s);
962
    gen_op_st_T0_A0(ot + s->mem_index);
963
    gen_op_movl_T0_Dshift[ot]();
964
#ifdef TARGET_X86_64
965
    if (s->aflag == 2) {
966
        gen_op_addq_ESI_T0();
967
        gen_op_addq_EDI_T0();
968
    } else
969
#endif
970
    if (s->aflag) {
971
        gen_op_addl_ESI_T0();
972
        gen_op_addl_EDI_T0();
973
    } else {
974
        gen_op_addw_ESI_T0();
975
        gen_op_addw_EDI_T0();
976
    }
977
}
978

    
979
static inline void gen_update_cc_op(DisasContext *s)
980
{
981
    if (s->cc_op != CC_OP_DYNAMIC) {
982
        gen_op_set_cc_op(s->cc_op);
983
        s->cc_op = CC_OP_DYNAMIC;
984
    }
985
}
986

    
987
/* XXX: does not work with gdbstub "ice" single step - not a
988
   serious problem */
989
static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
990
{
991
    int l1, l2;
992

    
993
    l1 = gen_new_label();
994
    l2 = gen_new_label();
995
    gen_op_jnz_ecx[s->aflag](l1);
996
    gen_set_label(l2);
997
    gen_jmp_tb(s, next_eip, 1);
998
    gen_set_label(l1);
999
    return l2;
1000
}
1001

    
1002
static inline void gen_stos(DisasContext *s, int ot)
1003
{
1004
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1005
    gen_string_movl_A0_EDI(s);
1006
    gen_op_st_T0_A0(ot + s->mem_index);
1007
    gen_op_movl_T0_Dshift[ot]();
1008
#ifdef TARGET_X86_64
1009
    if (s->aflag == 2) {
1010
        gen_op_addq_EDI_T0();
1011
    } else
1012
#endif
1013
    if (s->aflag) {
1014
        gen_op_addl_EDI_T0();
1015
    } else {
1016
        gen_op_addw_EDI_T0();
1017
    }
1018
}
1019

    
1020
static inline void gen_lods(DisasContext *s, int ot)
1021
{
1022
    gen_string_movl_A0_ESI(s);
1023
    gen_op_ld_T0_A0(ot + s->mem_index);
1024
    gen_op_mov_reg_T0(ot, R_EAX);
1025
    gen_op_movl_T0_Dshift[ot]();
1026
#ifdef TARGET_X86_64
1027
    if (s->aflag == 2) {
1028
        gen_op_addq_ESI_T0();
1029
    } else
1030
#endif
1031
    if (s->aflag) {
1032
        gen_op_addl_ESI_T0();
1033
    } else {
1034
        gen_op_addw_ESI_T0();
1035
    }
1036
}
1037

    
1038
static inline void gen_scas(DisasContext *s, int ot)
1039
{
1040
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1041
    gen_string_movl_A0_EDI(s);
1042
    gen_op_ld_T1_A0(ot + s->mem_index);
1043
    gen_op_cmpl_T0_T1_cc();
1044
    gen_op_movl_T0_Dshift[ot]();
1045
#ifdef TARGET_X86_64
1046
    if (s->aflag == 2) {
1047
        gen_op_addq_EDI_T0();
1048
    } else
1049
#endif
1050
    if (s->aflag) {
1051
        gen_op_addl_EDI_T0();
1052
    } else {
1053
        gen_op_addw_EDI_T0();
1054
    }
1055
}
1056

    
1057
static inline void gen_cmps(DisasContext *s, int ot)
1058
{
1059
    gen_string_movl_A0_ESI(s);
1060
    gen_op_ld_T0_A0(ot + s->mem_index);
1061
    gen_string_movl_A0_EDI(s);
1062
    gen_op_ld_T1_A0(ot + s->mem_index);
1063
    gen_op_cmpl_T0_T1_cc();
1064
    gen_op_movl_T0_Dshift[ot]();
1065
#ifdef TARGET_X86_64
1066
    if (s->aflag == 2) {
1067
        gen_op_addq_ESI_T0();
1068
        gen_op_addq_EDI_T0();
1069
    } else
1070
#endif
1071
    if (s->aflag) {
1072
        gen_op_addl_ESI_T0();
1073
        gen_op_addl_EDI_T0();
1074
    } else {
1075
        gen_op_addw_ESI_T0();
1076
        gen_op_addw_EDI_T0();
1077
    }
1078
}
1079

    
1080
static inline void gen_ins(DisasContext *s, int ot)
1081
{
1082
    gen_string_movl_A0_EDI(s);
1083
    gen_op_movl_T0_0();
1084
    gen_op_st_T0_A0(ot + s->mem_index);
1085
    gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1086
    tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[1]);
1087
    tcg_gen_andi_i32(cpu_tmp2, cpu_tmp2, 0xffff);
1088
    tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2);
1089
    gen_op_st_T0_A0(ot + s->mem_index);
1090
    gen_op_movl_T0_Dshift[ot]();
1091
#ifdef TARGET_X86_64
1092
    if (s->aflag == 2) {
1093
        gen_op_addq_EDI_T0();
1094
    } else
1095
#endif
1096
    if (s->aflag) {
1097
        gen_op_addl_EDI_T0();
1098
    } else {
1099
        gen_op_addw_EDI_T0();
1100
    }
1101
}
1102

    
1103
static inline void gen_outs(DisasContext *s, int ot)
1104
{
1105
    gen_string_movl_A0_ESI(s);
1106
    gen_op_ld_T0_A0(ot + s->mem_index);
1107

    
1108
    gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1109
    tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[1]);
1110
    tcg_gen_andi_i32(cpu_tmp2, cpu_tmp2, 0xffff);
1111
    tcg_gen_trunc_tl_i32(cpu_tmp3, cpu_T[0]);
1112
    tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2, cpu_tmp3);
1113

    
1114
    gen_op_movl_T0_Dshift[ot]();
1115
#ifdef TARGET_X86_64
1116
    if (s->aflag == 2) {
1117
        gen_op_addq_ESI_T0();
1118
    } else
1119
#endif
1120
    if (s->aflag) {
1121
        gen_op_addl_ESI_T0();
1122
    } else {
1123
        gen_op_addw_ESI_T0();
1124
    }
1125
}
1126

    
1127
/* same method as Valgrind : we generate jumps to current or next
1128
   instruction */
1129
#define GEN_REPZ(op)                                                          \
1130
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1131
                                 target_ulong cur_eip, target_ulong next_eip) \
1132
{                                                                             \
1133
    int l2;\
1134
    gen_update_cc_op(s);                                                      \
1135
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1136
    gen_ ## op(s, ot);                                                        \
1137
    gen_op_dec_ECX[s->aflag]();                                               \
1138
    /* a loop would cause two single step exceptions if ECX = 1               \
1139
       before rep string_insn */                                              \
1140
    if (!s->jmp_opt)                                                          \
1141
        gen_op_jz_ecx[s->aflag](l2);                                          \
1142
    gen_jmp(s, cur_eip);                                                      \
1143
}
1144

    
1145
#define GEN_REPZ2(op)                                                         \
1146
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1147
                                   target_ulong cur_eip,                      \
1148
                                   target_ulong next_eip,                     \
1149
                                   int nz)                                    \
1150
{                                                                             \
1151
    int l2;\
1152
    gen_update_cc_op(s);                                                      \
1153
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1154
    gen_ ## op(s, ot);                                                        \
1155
    gen_op_dec_ECX[s->aflag]();                                               \
1156
    gen_op_set_cc_op(CC_OP_SUBB + ot);                                        \
1157
    gen_op_string_jnz_sub[nz][ot](l2);\
1158
    if (!s->jmp_opt)                                                          \
1159
        gen_op_jz_ecx[s->aflag](l2);                                          \
1160
    gen_jmp(s, cur_eip);                                                      \
1161
}
1162

    
1163
GEN_REPZ(movs)
1164
GEN_REPZ(stos)
1165
GEN_REPZ(lods)
1166
GEN_REPZ(ins)
1167
GEN_REPZ(outs)
1168
GEN_REPZ2(scas)
1169
GEN_REPZ2(cmps)
1170

    
1171
enum {
1172
    JCC_O,
1173
    JCC_B,
1174
    JCC_Z,
1175
    JCC_BE,
1176
    JCC_S,
1177
    JCC_P,
1178
    JCC_L,
1179
    JCC_LE,
1180
};
1181

    
1182
static GenOpFunc1 *gen_jcc_sub[4][8] = {
1183
    [OT_BYTE] = {
1184
        NULL,
1185
        gen_op_jb_subb,
1186
        gen_op_jz_subb,
1187
        gen_op_jbe_subb,
1188
        gen_op_js_subb,
1189
        NULL,
1190
        gen_op_jl_subb,
1191
        gen_op_jle_subb,
1192
    },
1193
    [OT_WORD] = {
1194
        NULL,
1195
        gen_op_jb_subw,
1196
        gen_op_jz_subw,
1197
        gen_op_jbe_subw,
1198
        gen_op_js_subw,
1199
        NULL,
1200
        gen_op_jl_subw,
1201
        gen_op_jle_subw,
1202
    },
1203
    [OT_LONG] = {
1204
        NULL,
1205
        gen_op_jb_subl,
1206
        gen_op_jz_subl,
1207
        gen_op_jbe_subl,
1208
        gen_op_js_subl,
1209
        NULL,
1210
        gen_op_jl_subl,
1211
        gen_op_jle_subl,
1212
    },
1213
#ifdef TARGET_X86_64
1214
    [OT_QUAD] = {
1215
        NULL,
1216
        BUGGY_64(gen_op_jb_subq),
1217
        gen_op_jz_subq,
1218
        BUGGY_64(gen_op_jbe_subq),
1219
        gen_op_js_subq,
1220
        NULL,
1221
        BUGGY_64(gen_op_jl_subq),
1222
        BUGGY_64(gen_op_jle_subq),
1223
    },
1224
#endif
1225
};
1226
static GenOpFunc1 *gen_op_loop[3][4] = {
1227
    [0] = {
1228
        gen_op_loopnzw,
1229
        gen_op_loopzw,
1230
        gen_op_jnz_ecxw,
1231
    },
1232
    [1] = {
1233
        gen_op_loopnzl,
1234
        gen_op_loopzl,
1235
        gen_op_jnz_ecxl,
1236
    },
1237
#ifdef TARGET_X86_64
1238
    [2] = {
1239
        gen_op_loopnzq,
1240
        gen_op_loopzq,
1241
        gen_op_jnz_ecxq,
1242
    },
1243
#endif
1244
};
1245

    
1246
static GenOpFunc *gen_setcc_slow[8] = {
1247
    gen_op_seto_T0_cc,
1248
    gen_op_setb_T0_cc,
1249
    gen_op_setz_T0_cc,
1250
    gen_op_setbe_T0_cc,
1251
    gen_op_sets_T0_cc,
1252
    gen_op_setp_T0_cc,
1253
    gen_op_setl_T0_cc,
1254
    gen_op_setle_T0_cc,
1255
};
1256

    
1257
static GenOpFunc *gen_setcc_sub[4][8] = {
1258
    [OT_BYTE] = {
1259
        NULL,
1260
        gen_op_setb_T0_subb,
1261
        gen_op_setz_T0_subb,
1262
        gen_op_setbe_T0_subb,
1263
        gen_op_sets_T0_subb,
1264
        NULL,
1265
        gen_op_setl_T0_subb,
1266
        gen_op_setle_T0_subb,
1267
    },
1268
    [OT_WORD] = {
1269
        NULL,
1270
        gen_op_setb_T0_subw,
1271
        gen_op_setz_T0_subw,
1272
        gen_op_setbe_T0_subw,
1273
        gen_op_sets_T0_subw,
1274
        NULL,
1275
        gen_op_setl_T0_subw,
1276
        gen_op_setle_T0_subw,
1277
    },
1278
    [OT_LONG] = {
1279
        NULL,
1280
        gen_op_setb_T0_subl,
1281
        gen_op_setz_T0_subl,
1282
        gen_op_setbe_T0_subl,
1283
        gen_op_sets_T0_subl,
1284
        NULL,
1285
        gen_op_setl_T0_subl,
1286
        gen_op_setle_T0_subl,
1287
    },
1288
#ifdef TARGET_X86_64
1289
    [OT_QUAD] = {
1290
        NULL,
1291
        gen_op_setb_T0_subq,
1292
        gen_op_setz_T0_subq,
1293
        gen_op_setbe_T0_subq,
1294
        gen_op_sets_T0_subq,
1295
        NULL,
1296
        gen_op_setl_T0_subq,
1297
        gen_op_setle_T0_subq,
1298
    },
1299
#endif
1300
};
1301

    
1302
static void *helper_fp_arith_ST0_FT0[8] = {
1303
    helper_fadd_ST0_FT0,
1304
    helper_fmul_ST0_FT0,
1305
    helper_fcom_ST0_FT0,
1306
    helper_fcom_ST0_FT0,
1307
    helper_fsub_ST0_FT0,
1308
    helper_fsubr_ST0_FT0,
1309
    helper_fdiv_ST0_FT0,
1310
    helper_fdivr_ST0_FT0,
1311
};
1312

    
1313
/* NOTE the exception in "r" op ordering */
1314
static void *helper_fp_arith_STN_ST0[8] = {
1315
    helper_fadd_STN_ST0,
1316
    helper_fmul_STN_ST0,
1317
    NULL,
1318
    NULL,
1319
    helper_fsubr_STN_ST0,
1320
    helper_fsub_STN_ST0,
1321
    helper_fdivr_STN_ST0,
1322
    helper_fdiv_STN_ST0,
1323
};
1324

    
1325
/* if d == OR_TMP0, it means memory operand (address in A0) */
1326
static void gen_op(DisasContext *s1, int op, int ot, int d)
1327
{
1328
    GenOpFunc *gen_update_cc;
1329

    
1330
    if (d != OR_TMP0) {
1331
        gen_op_mov_TN_reg(ot, 0, d);
1332
    } else {
1333
        gen_op_ld_T0_A0(ot + s1->mem_index);
1334
    }
1335
    switch(op) {
1336
    case OP_ADCL:
1337
    case OP_SBBL:
1338
        if (s1->cc_op != CC_OP_DYNAMIC)
1339
            gen_op_set_cc_op(s1->cc_op);
1340
        if (d != OR_TMP0) {
1341
            gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1342
            gen_op_mov_reg_T0(ot, d);
1343
        } else {
1344
            gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1345
        }
1346
        s1->cc_op = CC_OP_DYNAMIC;
1347
        goto the_end;
1348
    case OP_ADDL:
1349
        gen_op_addl_T0_T1();
1350
        s1->cc_op = CC_OP_ADDB + ot;
1351
        gen_update_cc = gen_op_update2_cc;
1352
        break;
1353
    case OP_SUBL:
1354
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1355
        s1->cc_op = CC_OP_SUBB + ot;
1356
        gen_update_cc = gen_op_update2_cc;
1357
        break;
1358
    default:
1359
    case OP_ANDL:
1360
        tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1361
        s1->cc_op = CC_OP_LOGICB + ot;
1362
        gen_update_cc = gen_op_update1_cc;
1363
        break;
1364
    case OP_ORL:
1365
        tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1366
        s1->cc_op = CC_OP_LOGICB + ot;
1367
        gen_update_cc = gen_op_update1_cc;
1368
        break;
1369
    case OP_XORL:
1370
        tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1371
        s1->cc_op = CC_OP_LOGICB + ot;
1372
        gen_update_cc = gen_op_update1_cc;
1373
        break;
1374
    case OP_CMPL:
1375
        gen_op_cmpl_T0_T1_cc();
1376
        s1->cc_op = CC_OP_SUBB + ot;
1377
        gen_update_cc = NULL;
1378
        break;
1379
    }
1380
    if (op != OP_CMPL) {
1381
        if (d != OR_TMP0)
1382
            gen_op_mov_reg_T0(ot, d);
1383
        else
1384
            gen_op_st_T0_A0(ot + s1->mem_index);
1385
    }
1386
    /* the flags update must happen after the memory write (precise
1387
       exception support) */
1388
    if (gen_update_cc)
1389
        gen_update_cc();
1390
 the_end: ;
1391
}
1392

    
1393
/* if d == OR_TMP0, it means memory operand (address in A0) */
1394
static void gen_inc(DisasContext *s1, int ot, int d, int c)
1395
{
1396
    if (d != OR_TMP0)
1397
        gen_op_mov_TN_reg(ot, 0, d);
1398
    else
1399
        gen_op_ld_T0_A0(ot + s1->mem_index);
1400
    if (s1->cc_op != CC_OP_DYNAMIC)
1401
        gen_op_set_cc_op(s1->cc_op);
1402
    if (c > 0) {
1403
        gen_op_incl_T0();
1404
        s1->cc_op = CC_OP_INCB + ot;
1405
    } else {
1406
        gen_op_decl_T0();
1407
        s1->cc_op = CC_OP_DECB + ot;
1408
    }
1409
    if (d != OR_TMP0)
1410
        gen_op_mov_reg_T0(ot, d);
1411
    else
1412
        gen_op_st_T0_A0(ot + s1->mem_index);
1413
    gen_op_update_inc_cc();
1414
}
1415

    
1416
static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1417
{
1418
    if (d != OR_TMP0)
1419
        gen_op_mov_TN_reg(ot, 0, d);
1420
    else
1421
        gen_op_ld_T0_A0(ot + s1->mem_index);
1422
    if (s != OR_TMP1)
1423
        gen_op_mov_TN_reg(ot, 1, s);
1424
    /* for zero counts, flags are not updated, so must do it dynamically */
1425
    if (s1->cc_op != CC_OP_DYNAMIC)
1426
        gen_op_set_cc_op(s1->cc_op);
1427

    
1428
    if (d != OR_TMP0)
1429
        gen_op_shift_T0_T1_cc[ot][op]();
1430
    else
1431
        gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1432
    if (d != OR_TMP0)
1433
        gen_op_mov_reg_T0(ot, d);
1434
    s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1435
}
1436

    
1437
static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1438
{
1439
    /* currently not optimized */
1440
    gen_op_movl_T1_im(c);
1441
    gen_shift(s1, op, ot, d, OR_TMP1);
1442
}
1443

    
1444
static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1445
{
1446
    target_long disp;
1447
    int havesib;
1448
    int base;
1449
    int index;
1450
    int scale;
1451
    int opreg;
1452
    int mod, rm, code, override, must_add_seg;
1453

    
1454
    override = s->override;
1455
    must_add_seg = s->addseg;
1456
    if (override >= 0)
1457
        must_add_seg = 1;
1458
    mod = (modrm >> 6) & 3;
1459
    rm = modrm & 7;
1460

    
1461
    if (s->aflag) {
1462

    
1463
        havesib = 0;
1464
        base = rm;
1465
        index = 0;
1466
        scale = 0;
1467

    
1468
        if (base == 4) {
1469
            havesib = 1;
1470
            code = ldub_code(s->pc++);
1471
            scale = (code >> 6) & 3;
1472
            index = ((code >> 3) & 7) | REX_X(s);
1473
            base = (code & 7);
1474
        }
1475
        base |= REX_B(s);
1476

    
1477
        switch (mod) {
1478
        case 0:
1479
            if ((base & 7) == 5) {
1480
                base = -1;
1481
                disp = (int32_t)ldl_code(s->pc);
1482
                s->pc += 4;
1483
                if (CODE64(s) && !havesib) {
1484
                    disp += s->pc + s->rip_offset;
1485
                }
1486
            } else {
1487
                disp = 0;
1488
            }
1489
            break;
1490
        case 1:
1491
            disp = (int8_t)ldub_code(s->pc++);
1492
            break;
1493
        default:
1494
        case 2:
1495
            disp = ldl_code(s->pc);
1496
            s->pc += 4;
1497
            break;
1498
        }
1499

    
1500
        if (base >= 0) {
1501
            /* for correct popl handling with esp */
1502
            if (base == 4 && s->popl_esp_hack)
1503
                disp += s->popl_esp_hack;
1504
#ifdef TARGET_X86_64
1505
            if (s->aflag == 2) {
1506
                gen_op_movq_A0_reg(base);
1507
                if (disp != 0) {
1508
                    gen_op_addq_A0_im(disp);
1509
                }
1510
            } else
1511
#endif
1512
            {
1513
                gen_op_movl_A0_reg(base);
1514
                if (disp != 0)
1515
                    gen_op_addl_A0_im(disp);
1516
            }
1517
        } else {
1518
#ifdef TARGET_X86_64
1519
            if (s->aflag == 2) {
1520
                gen_op_movq_A0_im(disp);
1521
            } else
1522
#endif
1523
            {
1524
                gen_op_movl_A0_im(disp);
1525
            }
1526
        }
1527
        /* XXX: index == 4 is always invalid */
1528
        if (havesib && (index != 4 || scale != 0)) {
1529
#ifdef TARGET_X86_64
1530
            if (s->aflag == 2) {
1531
                gen_op_addq_A0_reg_sN(scale, index);
1532
            } else
1533
#endif
1534
            {
1535
                gen_op_addl_A0_reg_sN(scale, index);
1536
            }
1537
        }
1538
        if (must_add_seg) {
1539
            if (override < 0) {
1540
                if (base == R_EBP || base == R_ESP)
1541
                    override = R_SS;
1542
                else
1543
                    override = R_DS;
1544
            }
1545
#ifdef TARGET_X86_64
1546
            if (s->aflag == 2) {
1547
                gen_op_addq_A0_seg(override);
1548
            } else
1549
#endif
1550
            {
1551
                gen_op_addl_A0_seg(override);
1552
            }
1553
        }
1554
    } else {
1555
        switch (mod) {
1556
        case 0:
1557
            if (rm == 6) {
1558
                disp = lduw_code(s->pc);
1559
                s->pc += 2;
1560
                gen_op_movl_A0_im(disp);
1561
                rm = 0; /* avoid SS override */
1562
                goto no_rm;
1563
            } else {
1564
                disp = 0;
1565
            }
1566
            break;
1567
        case 1:
1568
            disp = (int8_t)ldub_code(s->pc++);
1569
            break;
1570
        default:
1571
        case 2:
1572
            disp = lduw_code(s->pc);
1573
            s->pc += 2;
1574
            break;
1575
        }
1576
        switch(rm) {
1577
        case 0:
1578
            gen_op_movl_A0_reg(R_EBX);
1579
            gen_op_addl_A0_reg_sN(0, R_ESI);
1580
            break;
1581
        case 1:
1582
            gen_op_movl_A0_reg(R_EBX);
1583
            gen_op_addl_A0_reg_sN(0, R_EDI);
1584
            break;
1585
        case 2:
1586
            gen_op_movl_A0_reg(R_EBP);
1587
            gen_op_addl_A0_reg_sN(0, R_ESI);
1588
            break;
1589
        case 3:
1590
            gen_op_movl_A0_reg(R_EBP);
1591
            gen_op_addl_A0_reg_sN(0, R_EDI);
1592
            break;
1593
        case 4:
1594
            gen_op_movl_A0_reg(R_ESI);
1595
            break;
1596
        case 5:
1597
            gen_op_movl_A0_reg(R_EDI);
1598
            break;
1599
        case 6:
1600
            gen_op_movl_A0_reg(R_EBP);
1601
            break;
1602
        default:
1603
        case 7:
1604
            gen_op_movl_A0_reg(R_EBX);
1605
            break;
1606
        }
1607
        if (disp != 0)
1608
            gen_op_addl_A0_im(disp);
1609
        gen_op_andl_A0_ffff();
1610
    no_rm:
1611
        if (must_add_seg) {
1612
            if (override < 0) {
1613
                if (rm == 2 || rm == 3 || rm == 6)
1614
                    override = R_SS;
1615
                else
1616
                    override = R_DS;
1617
            }
1618
            gen_op_addl_A0_seg(override);
1619
        }
1620
    }
1621

    
1622
    opreg = OR_A0;
1623
    disp = 0;
1624
    *reg_ptr = opreg;
1625
    *offset_ptr = disp;
1626
}
1627

    
1628
static void gen_nop_modrm(DisasContext *s, int modrm)
1629
{
1630
    int mod, rm, base, code;
1631

    
1632
    mod = (modrm >> 6) & 3;
1633
    if (mod == 3)
1634
        return;
1635
    rm = modrm & 7;
1636

    
1637
    if (s->aflag) {
1638

    
1639
        base = rm;
1640

    
1641
        if (base == 4) {
1642
            code = ldub_code(s->pc++);
1643
            base = (code & 7);
1644
        }
1645

    
1646
        switch (mod) {
1647
        case 0:
1648
            if (base == 5) {
1649
                s->pc += 4;
1650
            }
1651
            break;
1652
        case 1:
1653
            s->pc++;
1654
            break;
1655
        default:
1656
        case 2:
1657
            s->pc += 4;
1658
            break;
1659
        }
1660
    } else {
1661
        switch (mod) {
1662
        case 0:
1663
            if (rm == 6) {
1664
                s->pc += 2;
1665
            }
1666
            break;
1667
        case 1:
1668
            s->pc++;
1669
            break;
1670
        default:
1671
        case 2:
1672
            s->pc += 2;
1673
            break;
1674
        }
1675
    }
1676
}
1677

    
1678
/* used for LEA and MOV AX, mem */
1679
static void gen_add_A0_ds_seg(DisasContext *s)
1680
{
1681
    int override, must_add_seg;
1682
    must_add_seg = s->addseg;
1683
    override = R_DS;
1684
    if (s->override >= 0) {
1685
        override = s->override;
1686
        must_add_seg = 1;
1687
    } else {
1688
        override = R_DS;
1689
    }
1690
    if (must_add_seg) {
1691
#ifdef TARGET_X86_64
1692
        if (CODE64(s)) {
1693
            gen_op_addq_A0_seg(override);
1694
        } else
1695
#endif
1696
        {
1697
            gen_op_addl_A0_seg(override);
1698
        }
1699
    }
1700
}
1701

    
1702
/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1703
   OR_TMP0 */
1704
static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1705
{
1706
    int mod, rm, opreg, disp;
1707

    
1708
    mod = (modrm >> 6) & 3;
1709
    rm = (modrm & 7) | REX_B(s);
1710
    if (mod == 3) {
1711
        if (is_store) {
1712
            if (reg != OR_TMP0)
1713
                gen_op_mov_TN_reg(ot, 0, reg);
1714
            gen_op_mov_reg_T0(ot, rm);
1715
        } else {
1716
            gen_op_mov_TN_reg(ot, 0, rm);
1717
            if (reg != OR_TMP0)
1718
                gen_op_mov_reg_T0(ot, reg);
1719
        }
1720
    } else {
1721
        gen_lea_modrm(s, modrm, &opreg, &disp);
1722
        if (is_store) {
1723
            if (reg != OR_TMP0)
1724
                gen_op_mov_TN_reg(ot, 0, reg);
1725
            gen_op_st_T0_A0(ot + s->mem_index);
1726
        } else {
1727
            gen_op_ld_T0_A0(ot + s->mem_index);
1728
            if (reg != OR_TMP0)
1729
                gen_op_mov_reg_T0(ot, reg);
1730
        }
1731
    }
1732
}
1733

    
1734
static inline uint32_t insn_get(DisasContext *s, int ot)
1735
{
1736
    uint32_t ret;
1737

    
1738
    switch(ot) {
1739
    case OT_BYTE:
1740
        ret = ldub_code(s->pc);
1741
        s->pc++;
1742
        break;
1743
    case OT_WORD:
1744
        ret = lduw_code(s->pc);
1745
        s->pc += 2;
1746
        break;
1747
    default:
1748
    case OT_LONG:
1749
        ret = ldl_code(s->pc);
1750
        s->pc += 4;
1751
        break;
1752
    }
1753
    return ret;
1754
}
1755

    
1756
static inline int insn_const_size(unsigned int ot)
1757
{
1758
    if (ot <= OT_LONG)
1759
        return 1 << ot;
1760
    else
1761
        return 4;
1762
}
1763

    
1764
static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1765
{
1766
    TranslationBlock *tb;
1767
    target_ulong pc;
1768

    
1769
    pc = s->cs_base + eip;
1770
    tb = s->tb;
1771
    /* NOTE: we handle the case where the TB spans two pages here */
1772
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1773
        (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK))  {
1774
        /* jump to same page: we can use a direct jump */
1775
        tcg_gen_goto_tb(tb_num);
1776
        gen_jmp_im(eip);
1777
        tcg_gen_exit_tb((long)tb + tb_num);
1778
    } else {
1779
        /* jump to another page: currently not optimized */
1780
        gen_jmp_im(eip);
1781
        gen_eob(s);
1782
    }
1783
}
1784

    
1785
static inline void gen_jcc(DisasContext *s, int b,
1786
                           target_ulong val, target_ulong next_eip)
1787
{
1788
    TranslationBlock *tb;
1789
    int inv, jcc_op;
1790
    GenOpFunc1 *func;
1791
    target_ulong tmp;
1792
    int l1, l2;
1793

    
1794
    inv = b & 1;
1795
    jcc_op = (b >> 1) & 7;
1796

    
1797
    if (s->jmp_opt) {
1798
        switch(s->cc_op) {
1799
            /* we optimize the cmp/jcc case */
1800
        case CC_OP_SUBB:
1801
        case CC_OP_SUBW:
1802
        case CC_OP_SUBL:
1803
        case CC_OP_SUBQ:
1804
            func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1805
            break;
1806

    
1807
            /* some jumps are easy to compute */
1808
        case CC_OP_ADDB:
1809
        case CC_OP_ADDW:
1810
        case CC_OP_ADDL:
1811
        case CC_OP_ADDQ:
1812

    
1813
        case CC_OP_ADCB:
1814
        case CC_OP_ADCW:
1815
        case CC_OP_ADCL:
1816
        case CC_OP_ADCQ:
1817

    
1818
        case CC_OP_SBBB:
1819
        case CC_OP_SBBW:
1820
        case CC_OP_SBBL:
1821
        case CC_OP_SBBQ:
1822

    
1823
        case CC_OP_LOGICB:
1824
        case CC_OP_LOGICW:
1825
        case CC_OP_LOGICL:
1826
        case CC_OP_LOGICQ:
1827

    
1828
        case CC_OP_INCB:
1829
        case CC_OP_INCW:
1830
        case CC_OP_INCL:
1831
        case CC_OP_INCQ:
1832

    
1833
        case CC_OP_DECB:
1834
        case CC_OP_DECW:
1835
        case CC_OP_DECL:
1836
        case CC_OP_DECQ:
1837

    
1838
        case CC_OP_SHLB:
1839
        case CC_OP_SHLW:
1840
        case CC_OP_SHLL:
1841
        case CC_OP_SHLQ:
1842

    
1843
        case CC_OP_SARB:
1844
        case CC_OP_SARW:
1845
        case CC_OP_SARL:
1846
        case CC_OP_SARQ:
1847
            switch(jcc_op) {
1848
            case JCC_Z:
1849
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1850
                break;
1851
            case JCC_S:
1852
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1853
                break;
1854
            default:
1855
                func = NULL;
1856
                break;
1857
            }
1858
            break;
1859
        default:
1860
            func = NULL;
1861
            break;
1862
        }
1863

    
1864
        if (s->cc_op != CC_OP_DYNAMIC) {
1865
            gen_op_set_cc_op(s->cc_op);
1866
            s->cc_op = CC_OP_DYNAMIC;
1867
        }
1868

    
1869
        if (!func) {
1870
            gen_setcc_slow[jcc_op]();
1871
            func = gen_op_jnz_T0_label;
1872
        }
1873

    
1874
        if (inv) {
1875
            tmp = val;
1876
            val = next_eip;
1877
            next_eip = tmp;
1878
        }
1879
        tb = s->tb;
1880

    
1881
        l1 = gen_new_label();
1882
        func(l1);
1883

    
1884
        gen_goto_tb(s, 0, next_eip);
1885

    
1886
        gen_set_label(l1);
1887
        gen_goto_tb(s, 1, val);
1888

    
1889
        s->is_jmp = 3;
1890
    } else {
1891

    
1892
        if (s->cc_op != CC_OP_DYNAMIC) {
1893
            gen_op_set_cc_op(s->cc_op);
1894
            s->cc_op = CC_OP_DYNAMIC;
1895
        }
1896
        gen_setcc_slow[jcc_op]();
1897
        if (inv) {
1898
            tmp = val;
1899
            val = next_eip;
1900
            next_eip = tmp;
1901
        }
1902
        l1 = gen_new_label();
1903
        l2 = gen_new_label();
1904
        gen_op_jnz_T0_label(l1);
1905
        gen_jmp_im(next_eip);
1906
        gen_op_jmp_label(l2);
1907
        gen_set_label(l1);
1908
        gen_jmp_im(val);
1909
        gen_set_label(l2);
1910
        gen_eob(s);
1911
    }
1912
}
1913

    
1914
static void gen_setcc(DisasContext *s, int b)
1915
{
1916
    int inv, jcc_op;
1917
    GenOpFunc *func;
1918

    
1919
    inv = b & 1;
1920
    jcc_op = (b >> 1) & 7;
1921
    switch(s->cc_op) {
1922
        /* we optimize the cmp/jcc case */
1923
    case CC_OP_SUBB:
1924
    case CC_OP_SUBW:
1925
    case CC_OP_SUBL:
1926
    case CC_OP_SUBQ:
1927
        func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1928
        if (!func)
1929
            goto slow_jcc;
1930
        break;
1931

    
1932
        /* some jumps are easy to compute */
1933
    case CC_OP_ADDB:
1934
    case CC_OP_ADDW:
1935
    case CC_OP_ADDL:
1936
    case CC_OP_ADDQ:
1937

    
1938
    case CC_OP_LOGICB:
1939
    case CC_OP_LOGICW:
1940
    case CC_OP_LOGICL:
1941
    case CC_OP_LOGICQ:
1942

    
1943
    case CC_OP_INCB:
1944
    case CC_OP_INCW:
1945
    case CC_OP_INCL:
1946
    case CC_OP_INCQ:
1947

    
1948
    case CC_OP_DECB:
1949
    case CC_OP_DECW:
1950
    case CC_OP_DECL:
1951
    case CC_OP_DECQ:
1952

    
1953
    case CC_OP_SHLB:
1954
    case CC_OP_SHLW:
1955
    case CC_OP_SHLL:
1956
    case CC_OP_SHLQ:
1957
        switch(jcc_op) {
1958
        case JCC_Z:
1959
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1960
            break;
1961
        case JCC_S:
1962
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1963
            break;
1964
        default:
1965
            goto slow_jcc;
1966
        }
1967
        break;
1968
    default:
1969
    slow_jcc:
1970
        if (s->cc_op != CC_OP_DYNAMIC)
1971
            gen_op_set_cc_op(s->cc_op);
1972
        func = gen_setcc_slow[jcc_op];
1973
        break;
1974
    }
1975
    func();
1976
    if (inv) {
1977
        gen_op_xor_T0_1();
1978
    }
1979
}
1980

    
1981
/* move T0 to seg_reg and compute if the CPU state may change. Never
1982
   call this function with seg_reg == R_CS */
1983
static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1984
{
1985
    if (s->pe && !s->vm86) {
1986
        /* XXX: optimize by finding processor state dynamically */
1987
        if (s->cc_op != CC_OP_DYNAMIC)
1988
            gen_op_set_cc_op(s->cc_op);
1989
        gen_jmp_im(cur_eip);
1990
        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
1991
        tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2);
1992
        /* abort translation because the addseg value may change or
1993
           because ss32 may change. For R_SS, translation must always
1994
           stop as a special handling must be done to disable hardware
1995
           interrupts for the next instruction */
1996
        if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1997
            s->is_jmp = 3;
1998
    } else {
1999
        gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
2000
        if (seg_reg == R_SS)
2001
            s->is_jmp = 3;
2002
    }
2003
}
2004

    
2005
static inline int svm_is_rep(int prefixes)
2006
{
2007
    return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2008
}
2009

    
2010
static inline int
2011
gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2012
                              uint32_t type, uint64_t param)
2013
{
2014
    if(!(s->flags & (INTERCEPT_SVM_MASK)))
2015
        /* no SVM activated */
2016
        return 0;
2017
    switch(type) {
2018
        /* CRx and DRx reads/writes */
2019
        case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2020
            if (s->cc_op != CC_OP_DYNAMIC) {
2021
                gen_op_set_cc_op(s->cc_op);
2022
            }
2023
            gen_jmp_im(pc_start - s->cs_base);
2024
            tcg_gen_helper_0_2(helper_svm_check_intercept_param, 
2025
                               tcg_const_i32(type), tcg_const_i64(param));
2026
            /* this is a special case as we do not know if the interception occurs
2027
               so we assume there was none */
2028
            return 0;
2029
        case SVM_EXIT_MSR:
2030
            if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2031
                if (s->cc_op != CC_OP_DYNAMIC) {
2032
                    gen_op_set_cc_op(s->cc_op);
2033
                }
2034
                gen_jmp_im(pc_start - s->cs_base);
2035
                tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2036
                                   tcg_const_i32(type), tcg_const_i64(param));
2037
                /* this is a special case as we do not know if the interception occurs
2038
                   so we assume there was none */
2039
                return 0;
2040
            }
2041
            break;
2042
        default:
2043
            if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2044
                if (s->cc_op != CC_OP_DYNAMIC) {
2045
                    gen_op_set_cc_op(s->cc_op);
2046
                }
2047
                gen_jmp_im(pc_start - s->cs_base);
2048
                tcg_gen_helper_0_2(helper_vmexit,
2049
                                   tcg_const_i32(type), tcg_const_i64(param));
2050
                /* we can optimize this one so TBs don't get longer
2051
                   than up to vmexit */
2052
                gen_eob(s);
2053
                return 1;
2054
            }
2055
    }
2056
    return 0;
2057
}
2058

    
2059
static inline int
2060
gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2061
{
2062
    return gen_svm_check_intercept_param(s, pc_start, type, 0);
2063
}
2064

    
2065
static inline void gen_stack_update(DisasContext *s, int addend)
2066
{
2067
#ifdef TARGET_X86_64
2068
    if (CODE64(s)) {
2069
        gen_op_addq_ESP_im(addend);
2070
    } else
2071
#endif
2072
    if (s->ss32) {
2073
        gen_op_addl_ESP_im(addend);
2074
    } else {
2075
        gen_op_addw_ESP_im(addend);
2076
    }
2077
}
2078

    
2079
/* generate a push. It depends on ss32, addseg and dflag */
2080
static void gen_push_T0(DisasContext *s)
2081
{
2082
#ifdef TARGET_X86_64
2083
    if (CODE64(s)) {
2084
        gen_op_movq_A0_reg(R_ESP);
2085
        if (s->dflag) {
2086
            gen_op_addq_A0_im(-8);
2087
            gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2088
        } else {
2089
            gen_op_addq_A0_im(-2);
2090
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2091
        }
2092
        gen_op_mov_reg_A0(2, R_ESP);
2093
    } else
2094
#endif
2095
    {
2096
        gen_op_movl_A0_reg(R_ESP);
2097
        if (!s->dflag)
2098
            gen_op_addl_A0_im(-2);
2099
        else
2100
            gen_op_addl_A0_im(-4);
2101
        if (s->ss32) {
2102
            if (s->addseg) {
2103
                gen_op_movl_T1_A0();
2104
                gen_op_addl_A0_seg(R_SS);
2105
            }
2106
        } else {
2107
            gen_op_andl_A0_ffff();
2108
            gen_op_movl_T1_A0();
2109
            gen_op_addl_A0_seg(R_SS);
2110
        }
2111
        gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2112
        if (s->ss32 && !s->addseg)
2113
            gen_op_mov_reg_A0(1, R_ESP);
2114
        else
2115
            gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2116
    }
2117
}
2118

    
2119
/* generate a push. It depends on ss32, addseg and dflag */
2120
/* slower version for T1, only used for call Ev */
2121
static void gen_push_T1(DisasContext *s)
2122
{
2123
#ifdef TARGET_X86_64
2124
    if (CODE64(s)) {
2125
        gen_op_movq_A0_reg(R_ESP);
2126
        if (s->dflag) {
2127
            gen_op_addq_A0_im(-8);
2128
            gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2129
        } else {
2130
            gen_op_addq_A0_im(-2);
2131
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2132
        }
2133
        gen_op_mov_reg_A0(2, R_ESP);
2134
    } else
2135
#endif
2136
    {
2137
        gen_op_movl_A0_reg(R_ESP);
2138
        if (!s->dflag)
2139
            gen_op_addl_A0_im(-2);
2140
        else
2141
            gen_op_addl_A0_im(-4);
2142
        if (s->ss32) {
2143
            if (s->addseg) {
2144
                gen_op_addl_A0_seg(R_SS);
2145
            }
2146
        } else {
2147
            gen_op_andl_A0_ffff();
2148
            gen_op_addl_A0_seg(R_SS);
2149
        }
2150
        gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2151

    
2152
        if (s->ss32 && !s->addseg)
2153
            gen_op_mov_reg_A0(1, R_ESP);
2154
        else
2155
            gen_stack_update(s, (-2) << s->dflag);
2156
    }
2157
}
2158

    
2159
/* two step pop is necessary for precise exceptions */
2160
static void gen_pop_T0(DisasContext *s)
2161
{
2162
#ifdef TARGET_X86_64
2163
    if (CODE64(s)) {
2164
        gen_op_movq_A0_reg(R_ESP);
2165
        gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2166
    } else
2167
#endif
2168
    {
2169
        gen_op_movl_A0_reg(R_ESP);
2170
        if (s->ss32) {
2171
            if (s->addseg)
2172
                gen_op_addl_A0_seg(R_SS);
2173
        } else {
2174
            gen_op_andl_A0_ffff();
2175
            gen_op_addl_A0_seg(R_SS);
2176
        }
2177
        gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2178
    }
2179
}
2180

    
2181
static void gen_pop_update(DisasContext *s)
2182
{
2183
#ifdef TARGET_X86_64
2184
    if (CODE64(s) && s->dflag) {
2185
        gen_stack_update(s, 8);
2186
    } else
2187
#endif
2188
    {
2189
        gen_stack_update(s, 2 << s->dflag);
2190
    }
2191
}
2192

    
2193
static void gen_stack_A0(DisasContext *s)
2194
{
2195
    gen_op_movl_A0_reg(R_ESP);
2196
    if (!s->ss32)
2197
        gen_op_andl_A0_ffff();
2198
    gen_op_movl_T1_A0();
2199
    if (s->addseg)
2200
        gen_op_addl_A0_seg(R_SS);
2201
}
2202

    
2203
/* NOTE: wrap around in 16 bit not fully handled */
2204
static void gen_pusha(DisasContext *s)
2205
{
2206
    int i;
2207
    gen_op_movl_A0_reg(R_ESP);
2208
    gen_op_addl_A0_im(-16 <<  s->dflag);
2209
    if (!s->ss32)
2210
        gen_op_andl_A0_ffff();
2211
    gen_op_movl_T1_A0();
2212
    if (s->addseg)
2213
        gen_op_addl_A0_seg(R_SS);
2214
    for(i = 0;i < 8; i++) {
2215
        gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2216
        gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2217
        gen_op_addl_A0_im(2 <<  s->dflag);
2218
    }
2219
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2220
}
2221

    
2222
/* NOTE: wrap around in 16 bit not fully handled */
2223
static void gen_popa(DisasContext *s)
2224
{
2225
    int i;
2226
    gen_op_movl_A0_reg(R_ESP);
2227
    if (!s->ss32)
2228
        gen_op_andl_A0_ffff();
2229
    gen_op_movl_T1_A0();
2230
    gen_op_addl_T1_im(16 <<  s->dflag);
2231
    if (s->addseg)
2232
        gen_op_addl_A0_seg(R_SS);
2233
    for(i = 0;i < 8; i++) {
2234
        /* ESP is not reloaded */
2235
        if (i != 3) {
2236
            gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2237
            gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2238
        }
2239
        gen_op_addl_A0_im(2 <<  s->dflag);
2240
    }
2241
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2242
}
2243

    
2244
static void gen_enter(DisasContext *s, int esp_addend, int level)
2245
{
2246
    int ot, opsize;
2247

    
2248
    level &= 0x1f;
2249
#ifdef TARGET_X86_64
2250
    if (CODE64(s)) {
2251
        ot = s->dflag ? OT_QUAD : OT_WORD;
2252
        opsize = 1 << ot;
2253

    
2254
        gen_op_movl_A0_reg(R_ESP);
2255
        gen_op_addq_A0_im(-opsize);
2256
        gen_op_movl_T1_A0();
2257

    
2258
        /* push bp */
2259
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2260
        gen_op_st_T0_A0(ot + s->mem_index);
2261
        if (level) {
2262
            /* XXX: must save state */
2263
            tcg_gen_helper_0_3(helper_enter64_level,
2264
                               tcg_const_i32(level),
2265
                               tcg_const_i32((ot == OT_QUAD)),
2266
                               cpu_T[1]);
2267
        }
2268
        gen_op_mov_reg_T1(ot, R_EBP);
2269
        gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2270
        gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2271
    } else
2272
#endif
2273
    {
2274
        ot = s->dflag + OT_WORD;
2275
        opsize = 2 << s->dflag;
2276

    
2277
        gen_op_movl_A0_reg(R_ESP);
2278
        gen_op_addl_A0_im(-opsize);
2279
        if (!s->ss32)
2280
            gen_op_andl_A0_ffff();
2281
        gen_op_movl_T1_A0();
2282
        if (s->addseg)
2283
            gen_op_addl_A0_seg(R_SS);
2284
        /* push bp */
2285
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2286
        gen_op_st_T0_A0(ot + s->mem_index);
2287
        if (level) {
2288
            /* XXX: must save state */
2289
            tcg_gen_helper_0_3(helper_enter_level,
2290
                               tcg_const_i32(level),
2291
                               tcg_const_i32(s->dflag),
2292
                               cpu_T[1]);
2293
        }
2294
        gen_op_mov_reg_T1(ot, R_EBP);
2295
        gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2296
        gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2297
    }
2298
}
2299

    
2300
static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2301
{
2302
    if (s->cc_op != CC_OP_DYNAMIC)
2303
        gen_op_set_cc_op(s->cc_op);
2304
    gen_jmp_im(cur_eip);
2305
    tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
2306
    s->is_jmp = 3;
2307
}
2308

    
2309
/* an interrupt is different from an exception because of the
2310
   privilege checks */
2311
static void gen_interrupt(DisasContext *s, int intno,
2312
                          target_ulong cur_eip, target_ulong next_eip)
2313
{
2314
    if (s->cc_op != CC_OP_DYNAMIC)
2315
        gen_op_set_cc_op(s->cc_op);
2316
    gen_jmp_im(cur_eip);
2317
    tcg_gen_helper_0_2(helper_raise_interrupt, 
2318
                       tcg_const_i32(intno), 
2319
                       tcg_const_i32(next_eip - cur_eip));
2320
    s->is_jmp = 3;
2321
}
2322

    
2323
static void gen_debug(DisasContext *s, target_ulong cur_eip)
2324
{
2325
    if (s->cc_op != CC_OP_DYNAMIC)
2326
        gen_op_set_cc_op(s->cc_op);
2327
    gen_jmp_im(cur_eip);
2328
    tcg_gen_helper_0_0(helper_debug);
2329
    s->is_jmp = 3;
2330
}
2331

    
2332
/* generate a generic end of block. Trace exception is also generated
2333
   if needed */
2334
static void gen_eob(DisasContext *s)
2335
{
2336
    if (s->cc_op != CC_OP_DYNAMIC)
2337
        gen_op_set_cc_op(s->cc_op);
2338
    if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2339
        tcg_gen_helper_0_0(helper_reset_inhibit_irq);
2340
    }
2341
    if (s->singlestep_enabled) {
2342
        tcg_gen_helper_0_0(helper_debug);
2343
    } else if (s->tf) {
2344
        tcg_gen_helper_0_0(helper_single_step);
2345
    } else {
2346
        tcg_gen_exit_tb(0);
2347
    }
2348
    s->is_jmp = 3;
2349
}
2350

    
2351
/* generate a jump to eip. No segment change must happen before as a
2352
   direct call to the next block may occur */
2353
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2354
{
2355
    if (s->jmp_opt) {
2356
        if (s->cc_op != CC_OP_DYNAMIC) {
2357
            gen_op_set_cc_op(s->cc_op);
2358
            s->cc_op = CC_OP_DYNAMIC;
2359
        }
2360
        gen_goto_tb(s, tb_num, eip);
2361
        s->is_jmp = 3;
2362
    } else {
2363
        gen_jmp_im(eip);
2364
        gen_eob(s);
2365
    }
2366
}
2367

    
2368
static void gen_jmp(DisasContext *s, target_ulong eip)
2369
{
2370
    gen_jmp_tb(s, eip, 0);
2371
}
2372

    
2373
static inline void gen_ldq_env_A0(int idx, int offset)
2374
{
2375
    int mem_index = (idx >> 2) - 1;
2376
    tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0, mem_index);
2377
    tcg_gen_st_i64(cpu_tmp1, cpu_env, offset);
2378
}
2379

    
2380
static inline void gen_stq_env_A0(int idx, int offset)
2381
{
2382
    int mem_index = (idx >> 2) - 1;
2383
    tcg_gen_ld_i64(cpu_tmp1, cpu_env, offset);
2384
    tcg_gen_qemu_st64(cpu_tmp1, cpu_A0, mem_index);
2385
}
2386

    
2387
static inline void gen_ldo_env_A0(int idx, int offset)
2388
{
2389
    int mem_index = (idx >> 2) - 1;
2390
    tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0, mem_index);
2391
    tcg_gen_st_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2392
    tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2393
    tcg_gen_qemu_ld64(cpu_tmp1, cpu_tmp0, mem_index);
2394
    tcg_gen_st_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2395
}
2396

    
2397
static inline void gen_sto_env_A0(int idx, int offset)
2398
{
2399
    int mem_index = (idx >> 2) - 1;
2400
    tcg_gen_ld_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2401
    tcg_gen_qemu_st64(cpu_tmp1, cpu_A0, mem_index);
2402
    tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2403
    tcg_gen_ld_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2404
    tcg_gen_qemu_st64(cpu_tmp1, cpu_tmp0, mem_index);
2405
}
2406

    
2407
static inline void gen_op_movo(int d_offset, int s_offset)
2408
{
2409
    tcg_gen_ld_i64(cpu_tmp1, cpu_env, s_offset);
2410
    tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset);
2411
    tcg_gen_ld_i64(cpu_tmp1, cpu_env, s_offset + 8);
2412
    tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset + 8);
2413
}
2414

    
2415
static inline void gen_op_movq(int d_offset, int s_offset)
2416
{
2417
    tcg_gen_ld_i64(cpu_tmp1, cpu_env, s_offset);
2418
    tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset);
2419
}
2420

    
2421
static inline void gen_op_movl(int d_offset, int s_offset)
2422
{
2423
    tcg_gen_ld_i32(cpu_tmp2, cpu_env, s_offset);
2424
    tcg_gen_st_i32(cpu_tmp2, cpu_env, d_offset);
2425
}
2426

    
2427
static inline void gen_op_movq_env_0(int d_offset)
2428
{
2429
    tcg_gen_movi_i64(cpu_tmp1, 0);
2430
    tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset);
2431
}
2432

    
2433
#define SSE_SPECIAL ((void *)1)
2434
#define SSE_DUMMY ((void *)2)
2435

    
2436
#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2437
#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2438
                     helper_ ## x ## ss, helper_ ## x ## sd, }
2439

    
2440
static void *sse_op_table1[256][4] = {
2441
    /* 3DNow! extensions */
2442
    [0x0e] = { SSE_DUMMY }, /* femms */
2443
    [0x0f] = { SSE_DUMMY }, /* pf... */
2444
    /* pure SSE operations */
2445
    [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2446
    [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2447
    [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2448
    [0x13] = { SSE_SPECIAL, SSE_SPECIAL },  /* movlps, movlpd */
2449
    [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2450
    [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2451
    [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd, movshdup */
2452
    [0x17] = { SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd */
2453

    
2454
    [0x28] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2455
    [0x29] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2456
    [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2457
    [0x2b] = { SSE_SPECIAL, SSE_SPECIAL },  /* movntps, movntpd */
2458
    [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2459
    [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2460
    [0x2e] = { helper_ucomiss, helper_ucomisd },
2461
    [0x2f] = { helper_comiss, helper_comisd },
2462
    [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2463
    [0x51] = SSE_FOP(sqrt),
2464
    [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
2465
    [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
2466
    [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
2467
    [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
2468
    [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
2469
    [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
2470
    [0x58] = SSE_FOP(add),
2471
    [0x59] = SSE_FOP(mul),
2472
    [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
2473
               helper_cvtss2sd, helper_cvtsd2ss },
2474
    [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
2475
    [0x5c] = SSE_FOP(sub),
2476
    [0x5d] = SSE_FOP(min),
2477
    [0x5e] = SSE_FOP(div),
2478
    [0x5f] = SSE_FOP(max),
2479

    
2480
    [0xc2] = SSE_FOP(cmpeq),
2481
    [0xc6] = { helper_shufps, helper_shufpd },
2482

    
2483
    /* MMX ops and their SSE extensions */
2484
    [0x60] = MMX_OP2(punpcklbw),
2485
    [0x61] = MMX_OP2(punpcklwd),
2486
    [0x62] = MMX_OP2(punpckldq),
2487
    [0x63] = MMX_OP2(packsswb),
2488
    [0x64] = MMX_OP2(pcmpgtb),
2489
    [0x65] = MMX_OP2(pcmpgtw),
2490
    [0x66] = MMX_OP2(pcmpgtl),
2491
    [0x67] = MMX_OP2(packuswb),
2492
    [0x68] = MMX_OP2(punpckhbw),
2493
    [0x69] = MMX_OP2(punpckhwd),
2494
    [0x6a] = MMX_OP2(punpckhdq),
2495
    [0x6b] = MMX_OP2(packssdw),
2496
    [0x6c] = { NULL, helper_punpcklqdq_xmm },
2497
    [0x6d] = { NULL, helper_punpckhqdq_xmm },
2498
    [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2499
    [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2500
    [0x70] = { helper_pshufw_mmx,
2501
               helper_pshufd_xmm,
2502
               helper_pshufhw_xmm,
2503
               helper_pshuflw_xmm },
2504
    [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2505
    [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2506
    [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2507
    [0x74] = MMX_OP2(pcmpeqb),
2508
    [0x75] = MMX_OP2(pcmpeqw),
2509
    [0x76] = MMX_OP2(pcmpeql),
2510
    [0x77] = { SSE_DUMMY }, /* emms */
2511
    [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
2512
    [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
2513
    [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2514
    [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2515
    [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2516
    [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2517
    [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
2518
    [0xd1] = MMX_OP2(psrlw),
2519
    [0xd2] = MMX_OP2(psrld),
2520
    [0xd3] = MMX_OP2(psrlq),
2521
    [0xd4] = MMX_OP2(paddq),
2522
    [0xd5] = MMX_OP2(pmullw),
2523
    [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2524
    [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2525
    [0xd8] = MMX_OP2(psubusb),
2526
    [0xd9] = MMX_OP2(psubusw),
2527
    [0xda] = MMX_OP2(pminub),
2528
    [0xdb] = MMX_OP2(pand),
2529
    [0xdc] = MMX_OP2(paddusb),
2530
    [0xdd] = MMX_OP2(paddusw),
2531
    [0xde] = MMX_OP2(pmaxub),
2532
    [0xdf] = MMX_OP2(pandn),
2533
    [0xe0] = MMX_OP2(pavgb),
2534
    [0xe1] = MMX_OP2(psraw),
2535
    [0xe2] = MMX_OP2(psrad),
2536
    [0xe3] = MMX_OP2(pavgw),
2537
    [0xe4] = MMX_OP2(pmulhuw),
2538
    [0xe5] = MMX_OP2(pmulhw),
2539
    [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
2540
    [0xe7] = { SSE_SPECIAL , SSE_SPECIAL },  /* movntq, movntq */
2541
    [0xe8] = MMX_OP2(psubsb),
2542
    [0xe9] = MMX_OP2(psubsw),
2543
    [0xea] = MMX_OP2(pminsw),
2544
    [0xeb] = MMX_OP2(por),
2545
    [0xec] = MMX_OP2(paddsb),
2546
    [0xed] = MMX_OP2(paddsw),
2547
    [0xee] = MMX_OP2(pmaxsw),
2548
    [0xef] = MMX_OP2(pxor),
2549
    [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2550
    [0xf1] = MMX_OP2(psllw),
2551
    [0xf2] = MMX_OP2(pslld),
2552
    [0xf3] = MMX_OP2(psllq),
2553
    [0xf4] = MMX_OP2(pmuludq),
2554
    [0xf5] = MMX_OP2(pmaddwd),
2555
    [0xf6] = MMX_OP2(psadbw),
2556
    [0xf7] = MMX_OP2(maskmov),
2557
    [0xf8] = MMX_OP2(psubb),
2558
    [0xf9] = MMX_OP2(psubw),
2559
    [0xfa] = MMX_OP2(psubl),
2560
    [0xfb] = MMX_OP2(psubq),
2561
    [0xfc] = MMX_OP2(paddb),
2562
    [0xfd] = MMX_OP2(paddw),
2563
    [0xfe] = MMX_OP2(paddl),
2564
};
2565

    
2566
static void *sse_op_table2[3 * 8][2] = {
2567
    [0 + 2] = MMX_OP2(psrlw),
2568
    [0 + 4] = MMX_OP2(psraw),
2569
    [0 + 6] = MMX_OP2(psllw),
2570
    [8 + 2] = MMX_OP2(psrld),
2571
    [8 + 4] = MMX_OP2(psrad),
2572
    [8 + 6] = MMX_OP2(pslld),
2573
    [16 + 2] = MMX_OP2(psrlq),
2574
    [16 + 3] = { NULL, helper_psrldq_xmm },
2575
    [16 + 6] = MMX_OP2(psllq),
2576
    [16 + 7] = { NULL, helper_pslldq_xmm },
2577
};
2578

    
2579
static void *sse_op_table3[4 * 3] = {
2580
    helper_cvtsi2ss,
2581
    helper_cvtsi2sd,
2582
    X86_64_ONLY(helper_cvtsq2ss),
2583
    X86_64_ONLY(helper_cvtsq2sd),
2584

    
2585
    helper_cvttss2si,
2586
    helper_cvttsd2si,
2587
    X86_64_ONLY(helper_cvttss2sq),
2588
    X86_64_ONLY(helper_cvttsd2sq),
2589

    
2590
    helper_cvtss2si,
2591
    helper_cvtsd2si,
2592
    X86_64_ONLY(helper_cvtss2sq),
2593
    X86_64_ONLY(helper_cvtsd2sq),
2594
};
2595

    
2596
static void *sse_op_table4[8][4] = {
2597
    SSE_FOP(cmpeq),
2598
    SSE_FOP(cmplt),
2599
    SSE_FOP(cmple),
2600
    SSE_FOP(cmpunord),
2601
    SSE_FOP(cmpneq),
2602
    SSE_FOP(cmpnlt),
2603
    SSE_FOP(cmpnle),
2604
    SSE_FOP(cmpord),
2605
};
2606

    
2607
static void *sse_op_table5[256] = {
2608
    [0x0c] = helper_pi2fw,
2609
    [0x0d] = helper_pi2fd,
2610
    [0x1c] = helper_pf2iw,
2611
    [0x1d] = helper_pf2id,
2612
    [0x8a] = helper_pfnacc,
2613
    [0x8e] = helper_pfpnacc,
2614
    [0x90] = helper_pfcmpge,
2615
    [0x94] = helper_pfmin,
2616
    [0x96] = helper_pfrcp,
2617
    [0x97] = helper_pfrsqrt,
2618
    [0x9a] = helper_pfsub,
2619
    [0x9e] = helper_pfadd,
2620
    [0xa0] = helper_pfcmpgt,
2621
    [0xa4] = helper_pfmax,
2622
    [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2623
    [0xa7] = helper_movq, /* pfrsqit1 */
2624
    [0xaa] = helper_pfsubr,
2625
    [0xae] = helper_pfacc,
2626
    [0xb0] = helper_pfcmpeq,
2627
    [0xb4] = helper_pfmul,
2628
    [0xb6] = helper_movq, /* pfrcpit2 */
2629
    [0xb7] = helper_pmulhrw_mmx,
2630
    [0xbb] = helper_pswapd,
2631
    [0xbf] = helper_pavgb_mmx /* pavgusb */
2632
};
2633

    
2634
static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2635
{
2636
    int b1, op1_offset, op2_offset, is_xmm, val, ot;
2637
    int modrm, mod, rm, reg, reg_addr, offset_addr;
2638
    void *sse_op2;
2639

    
2640
    b &= 0xff;
2641
    if (s->prefix & PREFIX_DATA)
2642
        b1 = 1;
2643
    else if (s->prefix & PREFIX_REPZ)
2644
        b1 = 2;
2645
    else if (s->prefix & PREFIX_REPNZ)
2646
        b1 = 3;
2647
    else
2648
        b1 = 0;
2649
    sse_op2 = sse_op_table1[b][b1];
2650
    if (!sse_op2)
2651
        goto illegal_op;
2652
    if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2653
        is_xmm = 1;
2654
    } else {
2655
        if (b1 == 0) {
2656
            /* MMX case */
2657
            is_xmm = 0;
2658
        } else {
2659
            is_xmm = 1;
2660
        }
2661
    }
2662
    /* simple MMX/SSE operation */
2663
    if (s->flags & HF_TS_MASK) {
2664
        gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2665
        return;
2666
    }
2667
    if (s->flags & HF_EM_MASK) {
2668
    illegal_op:
2669
        gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2670
        return;
2671
    }
2672
    if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2673
        goto illegal_op;
2674
    if (b == 0x0e) {
2675
        if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
2676
            goto illegal_op;
2677
        /* femms */
2678
        tcg_gen_helper_0_0(helper_emms);
2679
        return;
2680
    }
2681
    if (b == 0x77) {
2682
        /* emms */
2683
        tcg_gen_helper_0_0(helper_emms);
2684
        return;
2685
    }
2686
    /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2687
       the static cpu state) */
2688
    if (!is_xmm) {
2689
        tcg_gen_helper_0_0(helper_enter_mmx);
2690
    }
2691

    
2692
    modrm = ldub_code(s->pc++);
2693
    reg = ((modrm >> 3) & 7);
2694
    if (is_xmm)
2695
        reg |= rex_r;
2696
    mod = (modrm >> 6) & 3;
2697
    if (sse_op2 == SSE_SPECIAL) {
2698
        b |= (b1 << 8);
2699
        switch(b) {
2700
        case 0x0e7: /* movntq */
2701
            if (mod == 3)
2702
                goto illegal_op;
2703
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2704
            gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2705
            break;
2706
        case 0x1e7: /* movntdq */
2707
        case 0x02b: /* movntps */
2708
        case 0x12b: /* movntps */
2709
        case 0x3f0: /* lddqu */
2710
            if (mod == 3)
2711
                goto illegal_op;
2712
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2713
            gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2714
            break;
2715
        case 0x6e: /* movd mm, ea */
2716
#ifdef TARGET_X86_64
2717
            if (s->dflag == 2) {
2718
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2719
                tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
2720
            } else
2721
#endif
2722
            {
2723
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2724
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
2725
                                 offsetof(CPUX86State,fpregs[reg].mmx));
2726
                tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
2727
            }
2728
            break;
2729
        case 0x16e: /* movd xmm, ea */
2730
#ifdef TARGET_X86_64
2731
            if (s->dflag == 2) {
2732
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2733
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
2734
                                 offsetof(CPUX86State,xmm_regs[reg]));
2735
                tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
2736
            } else
2737
#endif
2738
            {
2739
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2740
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
2741
                                 offsetof(CPUX86State,xmm_regs[reg]));
2742
                tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
2743
                tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2);
2744
            }
2745
            break;
2746
        case 0x6f: /* movq mm, ea */
2747
            if (mod != 3) {
2748
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2749
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2750
            } else {
2751
                rm = (modrm & 7);
2752
                tcg_gen_ld_i64(cpu_tmp1, cpu_env,
2753
                               offsetof(CPUX86State,fpregs[rm].mmx));
2754
                tcg_gen_st_i64(cpu_tmp1, cpu_env,
2755
                               offsetof(CPUX86State,fpregs[reg].mmx));
2756
            }
2757
            break;
2758
        case 0x010: /* movups */
2759
        case 0x110: /* movupd */
2760
        case 0x028: /* movaps */
2761
        case 0x128: /* movapd */
2762
        case 0x16f: /* movdqa xmm, ea */
2763
        case 0x26f: /* movdqu xmm, ea */
2764
            if (mod != 3) {
2765
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2766
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2767
            } else {
2768
                rm = (modrm & 7) | REX_B(s);
2769
                gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2770
                            offsetof(CPUX86State,xmm_regs[rm]));
2771
            }
2772
            break;
2773
        case 0x210: /* movss xmm, ea */
2774
            if (mod != 3) {
2775
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2776
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
2777
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2778
                gen_op_movl_T0_0();
2779
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2780
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2781
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2782
            } else {
2783
                rm = (modrm & 7) | REX_B(s);
2784
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2785
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2786
            }
2787
            break;
2788
        case 0x310: /* movsd xmm, ea */
2789
            if (mod != 3) {
2790
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2791
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2792
                gen_op_movl_T0_0();
2793
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2794
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2795
            } else {
2796
                rm = (modrm & 7) | REX_B(s);
2797
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2798
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2799
            }
2800
            break;
2801
        case 0x012: /* movlps */
2802
        case 0x112: /* movlpd */
2803
            if (mod != 3) {
2804
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2805
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2806
            } else {
2807
                /* movhlps */
2808
                rm = (modrm & 7) | REX_B(s);
2809
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2810
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2811
            }
2812
            break;
2813
        case 0x212: /* movsldup */
2814
            if (mod != 3) {
2815
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2816
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2817
            } else {
2818
                rm = (modrm & 7) | REX_B(s);
2819
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2820
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2821
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2822
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2823
            }
2824
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2825
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2826
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2827
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2828
            break;
2829
        case 0x312: /* movddup */
2830
            if (mod != 3) {
2831
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2832
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2833
            } else {
2834
                rm = (modrm & 7) | REX_B(s);
2835
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2836
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2837
            }
2838
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2839
                        offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2840
            break;
2841
        case 0x016: /* movhps */
2842
        case 0x116: /* movhpd */
2843
            if (mod != 3) {
2844
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2845
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2846
            } else {
2847
                /* movlhps */
2848
                rm = (modrm & 7) | REX_B(s);
2849
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2850
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2851
            }
2852
            break;
2853
        case 0x216: /* movshdup */
2854
            if (mod != 3) {
2855
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2856
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2857
            } else {
2858
                rm = (modrm & 7) | REX_B(s);
2859
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2860
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2861
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2862
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2863
            }
2864
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2865
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2866
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2867
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2868
            break;
2869
        case 0x7e: /* movd ea, mm */
2870
#ifdef TARGET_X86_64
2871
            if (s->dflag == 2) {
2872
                tcg_gen_ld_i64(cpu_T[0], cpu_env, 
2873
                               offsetof(CPUX86State,fpregs[reg].mmx));
2874
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2875
            } else
2876
#endif
2877
            {
2878
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
2879
                                 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
2880
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2881
            }
2882
            break;
2883
        case 0x17e: /* movd ea, xmm */
2884
#ifdef TARGET_X86_64
2885
            if (s->dflag == 2) {
2886
                tcg_gen_ld_i64(cpu_T[0], cpu_env, 
2887
                               offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2888
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2889
            } else
2890
#endif
2891
            {
2892
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
2893
                                 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2894
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2895
            }
2896
            break;
2897
        case 0x27e: /* movq xmm, ea */
2898
            if (mod != 3) {
2899
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2900
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2901
            } else {
2902
                rm = (modrm & 7) | REX_B(s);
2903
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2904
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2905
            }
2906
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2907
            break;
2908
        case 0x7f: /* movq ea, mm */
2909
            if (mod != 3) {
2910
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2911
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2912
            } else {
2913
                rm = (modrm & 7);
2914
                gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2915
                            offsetof(CPUX86State,fpregs[reg].mmx));
2916
            }
2917
            break;
2918
        case 0x011: /* movups */
2919
        case 0x111: /* movupd */
2920
        case 0x029: /* movaps */
2921
        case 0x129: /* movapd */
2922
        case 0x17f: /* movdqa ea, xmm */
2923
        case 0x27f: /* movdqu ea, xmm */
2924
            if (mod != 3) {
2925
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2926
                gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2927
            } else {
2928
                rm = (modrm & 7) | REX_B(s);
2929
                gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2930
                            offsetof(CPUX86State,xmm_regs[reg]));
2931
            }
2932
            break;
2933
        case 0x211: /* movss ea, xmm */
2934
            if (mod != 3) {
2935
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2936
                gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2937
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
2938
            } else {
2939
                rm = (modrm & 7) | REX_B(s);
2940
                gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2941
                            offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2942
            }
2943
            break;
2944
        case 0x311: /* movsd ea, xmm */
2945
            if (mod != 3) {
2946
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2947
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2948
            } else {
2949
                rm = (modrm & 7) | REX_B(s);
2950
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2951
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2952
            }
2953
            break;
2954
        case 0x013: /* movlps */
2955
        case 0x113: /* movlpd */
2956
            if (mod != 3) {
2957
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2958
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2959
            } else {
2960
                goto illegal_op;
2961
            }
2962
            break;
2963
        case 0x017: /* movhps */
2964
        case 0x117: /* movhpd */
2965
            if (mod != 3) {
2966
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2967
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2968
            } else {
2969
                goto illegal_op;
2970
            }
2971
            break;
2972
        case 0x71: /* shift mm, im */
2973
        case 0x72:
2974
        case 0x73:
2975
        case 0x171: /* shift xmm, im */
2976
        case 0x172:
2977
        case 0x173:
2978
            val = ldub_code(s->pc++);
2979
            if (is_xmm) {
2980
                gen_op_movl_T0_im(val);
2981
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2982
                gen_op_movl_T0_0();
2983
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2984
                op1_offset = offsetof(CPUX86State,xmm_t0);
2985
            } else {
2986
                gen_op_movl_T0_im(val);
2987
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2988
                gen_op_movl_T0_0();
2989
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2990
                op1_offset = offsetof(CPUX86State,mmx_t0);
2991
            }
2992
            sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2993
            if (!sse_op2)
2994
                goto illegal_op;
2995
            if (is_xmm) {
2996
                rm = (modrm & 7) | REX_B(s);
2997
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2998
            } else {
2999
                rm = (modrm & 7);
3000
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3001
            }
3002
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3003
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3004
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3005
            break;
3006
        case 0x050: /* movmskps */
3007
            rm = (modrm & 7) | REX_B(s);
3008
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3009
                             offsetof(CPUX86State,xmm_regs[rm]));
3010
            tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2, cpu_ptr0);
3011
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
3012
            gen_op_mov_reg_T0(OT_LONG, reg);
3013
            break;
3014
        case 0x150: /* movmskpd */
3015
            rm = (modrm & 7) | REX_B(s);
3016
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3017
                             offsetof(CPUX86State,xmm_regs[rm]));
3018
            tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2, cpu_ptr0);
3019
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
3020
            gen_op_mov_reg_T0(OT_LONG, reg);
3021
            break;
3022
        case 0x02a: /* cvtpi2ps */
3023
        case 0x12a: /* cvtpi2pd */
3024
            tcg_gen_helper_0_0(helper_enter_mmx);
3025
            if (mod != 3) {
3026
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3027
                op2_offset = offsetof(CPUX86State,mmx_t0);
3028
                gen_ldq_env_A0(s->mem_index, op2_offset);
3029
            } else {
3030
                rm = (modrm & 7);
3031
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3032
            }
3033
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3034
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3035
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3036
            switch(b >> 8) {
3037
            case 0x0:
3038
                tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3039
                break;
3040
            default:
3041
            case 0x1:
3042
                tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3043
                break;
3044
            }
3045
            break;
3046
        case 0x22a: /* cvtsi2ss */
3047
        case 0x32a: /* cvtsi2sd */
3048
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3049
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3050
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3051
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3052
            sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3053
            tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
3054
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2);
3055
            break;
3056
        case 0x02c: /* cvttps2pi */
3057
        case 0x12c: /* cvttpd2pi */
3058
        case 0x02d: /* cvtps2pi */
3059
        case 0x12d: /* cvtpd2pi */
3060
            tcg_gen_helper_0_0(helper_enter_mmx);
3061
            if (mod != 3) {
3062
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3063
                op2_offset = offsetof(CPUX86State,xmm_t0);
3064
                gen_ldo_env_A0(s->mem_index, op2_offset);
3065
            } else {
3066
                rm = (modrm & 7) | REX_B(s);
3067
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3068
            }
3069
            op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3070
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3071
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3072
            switch(b) {
3073
            case 0x02c:
3074
                tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3075
                break;
3076
            case 0x12c:
3077
                tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3078
                break;
3079
            case 0x02d:
3080
                tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3081
                break;
3082
            case 0x12d:
3083
                tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3084
                break;
3085
            }
3086
            break;
3087
        case 0x22c: /* cvttss2si */
3088
        case 0x32c: /* cvttsd2si */
3089
        case 0x22d: /* cvtss2si */
3090
        case 0x32d: /* cvtsd2si */
3091
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3092
            if (mod != 3) {
3093
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3094
                if ((b >> 8) & 1) {
3095
                    gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3096
                } else {
3097
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3098
                    gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3099
                }
3100
                op2_offset = offsetof(CPUX86State,xmm_t0);
3101
            } else {
3102
                rm = (modrm & 7) | REX_B(s);
3103
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3104
            }
3105
            sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3106
                                    (b & 1) * 4];
3107
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3108
            if (ot == OT_LONG) {
3109
                tcg_gen_helper_1_1(sse_op2, cpu_tmp2, cpu_ptr0);
3110
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
3111
            } else {
3112
                tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3113
            }
3114
            gen_op_mov_reg_T0(ot, reg);
3115
            break;
3116
        case 0xc4: /* pinsrw */
3117
        case 0x1c4:
3118
            s->rip_offset = 1;
3119
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3120
            val = ldub_code(s->pc++);
3121
            if (b1) {
3122
                val &= 7;
3123
                tcg_gen_st16_tl(cpu_T[0], cpu_env,
3124
                                offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3125
            } else {
3126
                val &= 3;
3127
                tcg_gen_st16_tl(cpu_T[0], cpu_env,
3128
                                offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3129
            }
3130
            break;
3131
        case 0xc5: /* pextrw */
3132
        case 0x1c5:
3133
            if (mod != 3)
3134
                goto illegal_op;
3135
            val = ldub_code(s->pc++);
3136
            if (b1) {
3137
                val &= 7;
3138
                rm = (modrm & 7) | REX_B(s);
3139
                tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3140
                                 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3141
            } else {
3142
                val &= 3;
3143
                rm = (modrm & 7);
3144
                tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3145
                                offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3146
            }
3147
            reg = ((modrm >> 3) & 7) | rex_r;
3148
            gen_op_mov_reg_T0(OT_LONG, reg);
3149
            break;
3150
        case 0x1d6: /* movq ea, xmm */
3151
            if (mod != 3) {
3152
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3153
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3154
            } else {
3155
                rm = (modrm & 7) | REX_B(s);
3156
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3157
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3158
                gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3159
            }
3160
            break;
3161
        case 0x2d6: /* movq2dq */
3162
            tcg_gen_helper_0_0(helper_enter_mmx);
3163
            rm = (modrm & 7);
3164
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3165
                        offsetof(CPUX86State,fpregs[rm].mmx));
3166
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3167
            break;
3168
        case 0x3d6: /* movdq2q */
3169
            tcg_gen_helper_0_0(helper_enter_mmx);
3170
            rm = (modrm & 7) | REX_B(s);
3171
            gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3172
                        offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3173
            break;
3174
        case 0xd7: /* pmovmskb */
3175
        case 0x1d7:
3176
            if (mod != 3)
3177
                goto illegal_op;
3178
            if (b1) {
3179
                rm = (modrm & 7) | REX_B(s);
3180
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3181
                tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2, cpu_ptr0);
3182
            } else {
3183
                rm = (modrm & 7);
3184
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3185
                tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2, cpu_ptr0);
3186
            }
3187
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
3188
            reg = ((modrm >> 3) & 7) | rex_r;
3189
            gen_op_mov_reg_T0(OT_LONG, reg);
3190
            break;
3191
        default:
3192
            goto illegal_op;
3193
        }
3194
    } else {
3195
        /* generic MMX or SSE operation */
3196
        switch(b) {
3197
        case 0x70: /* pshufx insn */
3198
        case 0xc6: /* pshufx insn */
3199
        case 0xc2: /* compare insns */
3200
            s->rip_offset = 1;
3201
            break;
3202
        default:
3203
            break;
3204
        }
3205
        if (is_xmm) {
3206
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3207
            if (mod != 3) {
3208
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3209
                op2_offset = offsetof(CPUX86State,xmm_t0);
3210
                if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3211
                                b == 0xc2)) {
3212
                    /* specific case for SSE single instructions */
3213
                    if (b1 == 2) {
3214
                        /* 32 bit access */
3215
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3216
                        gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3217
                    } else {
3218
                        /* 64 bit access */
3219
                        gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3220
                    }
3221
                } else {
3222
                    gen_ldo_env_A0(s->mem_index, op2_offset);
3223
                }
3224
            } else {
3225
                rm = (modrm & 7) | REX_B(s);
3226
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3227
            }
3228
        } else {
3229
            op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3230
            if (mod != 3) {
3231
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3232
                op2_offset = offsetof(CPUX86State,mmx_t0);
3233
                gen_ldq_env_A0(s->mem_index, op2_offset);
3234
            } else {
3235
                rm = (modrm & 7);
3236
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3237
            }
3238
        }
3239
        switch(b) {
3240
        case 0x0f: /* 3DNow! data insns */
3241
            if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3242
                goto illegal_op;
3243
            val = ldub_code(s->pc++);
3244
            sse_op2 = sse_op_table5[val];
3245
            if (!sse_op2)
3246
                goto illegal_op;
3247
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3248
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3249
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3250
            break;
3251
        case 0x70: /* pshufx insn */
3252
        case 0xc6: /* pshufx insn */
3253
            val = ldub_code(s->pc++);
3254
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3255
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3256
            tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3257
            break;
3258
        case 0xc2:
3259
            /* compare insns */
3260
            val = ldub_code(s->pc++);
3261
            if (val >= 8)
3262
                goto illegal_op;
3263
            sse_op2 = sse_op_table4[val][b1];
3264
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3265
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3266
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3267
            break;
3268
        case 0xf7:
3269
            /* maskmov : we must prepare A0 */
3270
            if (mod != 3)
3271
                goto illegal_op;
3272
#ifdef TARGET_X86_64
3273
            if (s->aflag == 2) {
3274
                gen_op_movq_A0_reg(R_EDI);
3275
            } else
3276
#endif
3277
            {
3278
                gen_op_movl_A0_reg(R_EDI);
3279
                if (s->aflag == 0)
3280
                    gen_op_andl_A0_ffff();
3281
            }
3282
            gen_add_A0_ds_seg(s);
3283

    
3284
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3285
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3286
            tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
3287
            break;
3288
        default:
3289
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3290
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3291
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3292
            break;
3293
        }
3294
        if (b == 0x2e || b == 0x2f) {
3295
            /* just to keep the EFLAGS optimization correct */
3296
            gen_op_com_dummy();
3297
            s->cc_op = CC_OP_EFLAGS;
3298
        }
3299
    }
3300
}
3301

    
3302
/* convert one instruction. s->is_jmp is set if the translation must
3303
   be stopped. Return the next pc value */
3304
static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3305
{
3306
    int b, prefixes, aflag, dflag;
3307
    int shift, ot;
3308
    int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3309
    target_ulong next_eip, tval;
3310
    int rex_w, rex_r;
3311

    
3312
    s->pc = pc_start;
3313
    prefixes = 0;
3314
    aflag = s->code32;
3315
    dflag = s->code32;
3316
    s->override = -1;
3317
    rex_w = -1;
3318
    rex_r = 0;
3319
#ifdef TARGET_X86_64
3320
    s->rex_x = 0;
3321
    s->rex_b = 0;
3322
    x86_64_hregs = 0;
3323
#endif
3324
    s->rip_offset = 0; /* for relative ip address */
3325
 next_byte:
3326
    b = ldub_code(s->pc);
3327
    s->pc++;
3328
    /* check prefixes */
3329
#ifdef TARGET_X86_64
3330
    if (CODE64(s)) {
3331
        switch (b) {
3332
        case 0xf3:
3333
            prefixes |= PREFIX_REPZ;
3334
            goto next_byte;
3335
        case 0xf2:
3336
            prefixes |= PREFIX_REPNZ;
3337
            goto next_byte;
3338
        case 0xf0:
3339
            prefixes |= PREFIX_LOCK;
3340
            goto next_byte;
3341
        case 0x2e:
3342
            s->override = R_CS;
3343
            goto next_byte;
3344
        case 0x36:
3345
            s->override = R_SS;
3346
            goto next_byte;
3347
        case 0x3e:
3348
            s->override = R_DS;
3349
            goto next_byte;
3350
        case 0x26:
3351
            s->override = R_ES;
3352
            goto next_byte;
3353
        case 0x64:
3354
            s->override = R_FS;
3355
            goto next_byte;
3356
        case 0x65:
3357
            s->override = R_GS;
3358
            goto next_byte;
3359
        case 0x66:
3360
            prefixes |= PREFIX_DATA;
3361
            goto next_byte;
3362
        case 0x67:
3363
            prefixes |= PREFIX_ADR;
3364
            goto next_byte;
3365
        case 0x40 ... 0x4f:
3366
            /* REX prefix */
3367
            rex_w = (b >> 3) & 1;
3368
            rex_r = (b & 0x4) << 1;
3369
            s->rex_x = (b & 0x2) << 2;
3370
            REX_B(s) = (b & 0x1) << 3;
3371
            x86_64_hregs = 1; /* select uniform byte register addressing */
3372
            goto next_byte;
3373
        }
3374
        if (rex_w == 1) {
3375
            /* 0x66 is ignored if rex.w is set */
3376
            dflag = 2;
3377
        } else {
3378
            if (prefixes & PREFIX_DATA)
3379
                dflag ^= 1;
3380
        }
3381
        if (!(prefixes & PREFIX_ADR))
3382
            aflag = 2;
3383
    } else
3384
#endif
3385
    {
3386
        switch (b) {
3387
        case 0xf3:
3388
            prefixes |= PREFIX_REPZ;
3389
            goto next_byte;
3390
        case 0xf2:
3391
            prefixes |= PREFIX_REPNZ;
3392
            goto next_byte;
3393
        case 0xf0:
3394
            prefixes |= PREFIX_LOCK;
3395
            goto next_byte;
3396
        case 0x2e:
3397
            s->override = R_CS;
3398
            goto next_byte;
3399
        case 0x36:
3400
            s->override = R_SS;
3401
            goto next_byte;
3402
        case 0x3e:
3403
            s->override = R_DS;
3404
            goto next_byte;
3405
        case 0x26:
3406
            s->override = R_ES;
3407
            goto next_byte;
3408
        case 0x64:
3409
            s->override = R_FS;
3410
            goto next_byte;
3411
        case 0x65:
3412
            s->override = R_GS;
3413
            goto next_byte;
3414
        case 0x66:
3415
            prefixes |= PREFIX_DATA;
3416
            goto next_byte;
3417
        case 0x67:
3418
            prefixes |= PREFIX_ADR;
3419
            goto next_byte;
3420
        }
3421
        if (prefixes & PREFIX_DATA)
3422
            dflag ^= 1;
3423
        if (prefixes & PREFIX_ADR)
3424
            aflag ^= 1;
3425
    }
3426

    
3427
    s->prefix = prefixes;
3428
    s->aflag = aflag;
3429
    s->dflag = dflag;
3430

    
3431
    /* lock generation */
3432
    if (prefixes & PREFIX_LOCK)
3433
        tcg_gen_helper_0_0(helper_lock);
3434

    
3435
    /* now check op code */
3436
 reswitch:
3437
    switch(b) {
3438
    case 0x0f:
3439
        /**************************/
3440
        /* extended op code */
3441
        b = ldub_code(s->pc++) | 0x100;
3442
        goto reswitch;
3443

    
3444
        /**************************/
3445
        /* arith & logic */
3446
    case 0x00 ... 0x05:
3447
    case 0x08 ... 0x0d:
3448
    case 0x10 ... 0x15:
3449
    case 0x18 ... 0x1d:
3450
    case 0x20 ... 0x25:
3451
    case 0x28 ... 0x2d:
3452
    case 0x30 ... 0x35:
3453
    case 0x38 ... 0x3d:
3454
        {
3455
            int op, f, val;
3456
            op = (b >> 3) & 7;
3457
            f = (b >> 1) & 3;
3458

    
3459
            if ((b & 1) == 0)
3460
                ot = OT_BYTE;
3461
            else
3462
                ot = dflag + OT_WORD;
3463

    
3464
            switch(f) {
3465
            case 0: /* OP Ev, Gv */
3466
                modrm = ldub_code(s->pc++);
3467
                reg = ((modrm >> 3) & 7) | rex_r;
3468
                mod = (modrm >> 6) & 3;
3469
                rm = (modrm & 7) | REX_B(s);
3470
                if (mod != 3) {
3471
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3472
                    opreg = OR_TMP0;
3473
                } else if (op == OP_XORL && rm == reg) {
3474
                xor_zero:
3475
                    /* xor reg, reg optimisation */
3476
                    gen_op_movl_T0_0();
3477
                    s->cc_op = CC_OP_LOGICB + ot;
3478
                    gen_op_mov_reg_T0(ot, reg);
3479
                    gen_op_update1_cc();
3480
                    break;
3481
                } else {
3482
                    opreg = rm;
3483
                }
3484
                gen_op_mov_TN_reg(ot, 1, reg);
3485
                gen_op(s, op, ot, opreg);
3486
                break;
3487
            case 1: /* OP Gv, Ev */
3488
                modrm = ldub_code(s->pc++);
3489
                mod = (modrm >> 6) & 3;
3490
                reg = ((modrm >> 3) & 7) | rex_r;
3491
                rm = (modrm & 7) | REX_B(s);
3492
                if (mod != 3) {
3493
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3494
                    gen_op_ld_T1_A0(ot + s->mem_index);
3495
                } else if (op == OP_XORL && rm == reg) {
3496
                    goto xor_zero;
3497
                } else {
3498
                    gen_op_mov_TN_reg(ot, 1, rm);
3499
                }
3500
                gen_op(s, op, ot, reg);
3501
                break;
3502
            case 2: /* OP A, Iv */
3503
                val = insn_get(s, ot);
3504
                gen_op_movl_T1_im(val);
3505
                gen_op(s, op, ot, OR_EAX);
3506
                break;
3507
            }
3508
        }
3509
        break;
3510

    
3511
    case 0x80: /* GRP1 */
3512
    case 0x81:
3513
    case 0x82:
3514
    case 0x83:
3515
        {
3516
            int val;
3517

    
3518
            if ((b & 1) == 0)
3519
                ot = OT_BYTE;
3520
            else
3521
                ot = dflag + OT_WORD;
3522

    
3523
            modrm = ldub_code(s->pc++);
3524
            mod = (modrm >> 6) & 3;
3525
            rm = (modrm & 7) | REX_B(s);
3526
            op = (modrm >> 3) & 7;
3527

    
3528
            if (mod != 3) {
3529
                if (b == 0x83)
3530
                    s->rip_offset = 1;
3531
                else
3532
                    s->rip_offset = insn_const_size(ot);
3533
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3534
                opreg = OR_TMP0;
3535
            } else {
3536
                opreg = rm;
3537
            }
3538

    
3539
            switch(b) {
3540
            default:
3541
            case 0x80:
3542
            case 0x81:
3543
            case 0x82:
3544
                val = insn_get(s, ot);
3545
                break;
3546
            case 0x83:
3547
                val = (int8_t)insn_get(s, OT_BYTE);
3548
                break;
3549
            }
3550
            gen_op_movl_T1_im(val);
3551
            gen_op(s, op, ot, opreg);
3552
        }
3553
        break;
3554

    
3555
        /**************************/
3556
        /* inc, dec, and other misc arith */
3557
    case 0x40 ... 0x47: /* inc Gv */
3558
        ot = dflag ? OT_LONG : OT_WORD;
3559
        gen_inc(s, ot, OR_EAX + (b & 7), 1);
3560
        break;
3561
    case 0x48 ... 0x4f: /* dec Gv */
3562
        ot = dflag ? OT_LONG : OT_WORD;
3563
        gen_inc(s, ot, OR_EAX + (b & 7), -1);
3564
        break;
3565
    case 0xf6: /* GRP3 */
3566
    case 0xf7:
3567
        if ((b & 1) == 0)
3568
            ot = OT_BYTE;
3569
        else
3570
            ot = dflag + OT_WORD;
3571

    
3572
        modrm = ldub_code(s->pc++);
3573
        mod = (modrm >> 6) & 3;
3574
        rm = (modrm & 7) | REX_B(s);
3575
        op = (modrm >> 3) & 7;
3576
        if (mod != 3) {
3577
            if (op == 0)
3578
                s->rip_offset = insn_const_size(ot);
3579
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3580
            gen_op_ld_T0_A0(ot + s->mem_index);
3581
        } else {
3582
            gen_op_mov_TN_reg(ot, 0, rm);
3583
        }
3584

    
3585
        switch(op) {
3586
        case 0: /* test */
3587
            val = insn_get(s, ot);
3588
            gen_op_movl_T1_im(val);
3589
            gen_op_testl_T0_T1_cc();
3590
            s->cc_op = CC_OP_LOGICB + ot;
3591
            break;
3592
        case 2: /* not */
3593
            gen_op_notl_T0();
3594
            if (mod != 3) {
3595
                gen_op_st_T0_A0(ot + s->mem_index);
3596
            } else {
3597
                gen_op_mov_reg_T0(ot, rm);
3598
            }
3599
            break;
3600
        case 3: /* neg */
3601
            gen_op_negl_T0();
3602
            if (mod != 3) {
3603
                gen_op_st_T0_A0(ot + s->mem_index);
3604
            } else {
3605
                gen_op_mov_reg_T0(ot, rm);
3606
            }
3607
            gen_op_update_neg_cc();
3608
            s->cc_op = CC_OP_SUBB + ot;
3609
            break;
3610
        case 4: /* mul */
3611
            switch(ot) {
3612
            case OT_BYTE:
3613
                gen_op_mulb_AL_T0();
3614
                s->cc_op = CC_OP_MULB;
3615
                break;
3616
            case OT_WORD:
3617
                gen_op_mulw_AX_T0();
3618
                s->cc_op = CC_OP_MULW;
3619
                break;
3620
            default:
3621
            case OT_LONG:
3622
                gen_op_mull_EAX_T0();
3623
                s->cc_op = CC_OP_MULL;
3624
                break;
3625
#ifdef TARGET_X86_64
3626
            case OT_QUAD:
3627
                gen_op_mulq_EAX_T0();
3628
                s->cc_op = CC_OP_MULQ;
3629
                break;
3630
#endif
3631
            }
3632
            break;
3633
        case 5: /* imul */
3634
            switch(ot) {
3635
            case OT_BYTE:
3636
                gen_op_imulb_AL_T0();
3637
                s->cc_op = CC_OP_MULB;
3638
                break;
3639
            case OT_WORD:
3640
                gen_op_imulw_AX_T0();
3641
                s->cc_op = CC_OP_MULW;
3642
                break;
3643
            default:
3644
            case OT_LONG:
3645
                gen_op_imull_EAX_T0();
3646
                s->cc_op = CC_OP_MULL;
3647
                break;
3648
#ifdef TARGET_X86_64
3649
            case OT_QUAD:
3650
                gen_op_imulq_EAX_T0();
3651
                s->cc_op = CC_OP_MULQ;
3652
                break;
3653
#endif
3654
            }
3655
            break;
3656
        case 6: /* div */
3657
            switch(ot) {
3658
            case OT_BYTE:
3659
                gen_jmp_im(pc_start - s->cs_base);
3660
                tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
3661
                break;
3662
            case OT_WORD:
3663
                gen_jmp_im(pc_start - s->cs_base);
3664
                tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
3665
                break;
3666
            default:
3667
            case OT_LONG:
3668
                gen_jmp_im(pc_start - s->cs_base);
3669
                tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
3670
                break;
3671
#ifdef TARGET_X86_64
3672
            case OT_QUAD:
3673
                gen_jmp_im(pc_start - s->cs_base);
3674
                tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
3675
                break;
3676
#endif
3677
            }
3678
            break;
3679
        case 7: /* idiv */
3680
            switch(ot) {
3681
            case OT_BYTE:
3682
                gen_jmp_im(pc_start - s->cs_base);
3683
                tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
3684
                break;
3685
            case OT_WORD:
3686
                gen_jmp_im(pc_start - s->cs_base);
3687
                tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
3688
                break;
3689
            default:
3690
            case OT_LONG:
3691
                gen_jmp_im(pc_start - s->cs_base);
3692
                tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
3693
                break;
3694
#ifdef TARGET_X86_64
3695
            case OT_QUAD:
3696
                gen_jmp_im(pc_start - s->cs_base);
3697
                tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
3698
                break;
3699
#endif
3700
            }
3701
            break;
3702
        default:
3703
            goto illegal_op;
3704
        }
3705
        break;
3706

    
3707
    case 0xfe: /* GRP4 */
3708
    case 0xff: /* GRP5 */
3709
        if ((b & 1) == 0)
3710
            ot = OT_BYTE;
3711
        else
3712
            ot = dflag + OT_WORD;
3713

    
3714
        modrm = ldub_code(s->pc++);
3715
        mod = (modrm >> 6) & 3;
3716
        rm = (modrm & 7) | REX_B(s);
3717
        op = (modrm >> 3) & 7;
3718
        if (op >= 2 && b == 0xfe) {
3719
            goto illegal_op;
3720
        }
3721
        if (CODE64(s)) {
3722
            if (op == 2 || op == 4) {
3723
                /* operand size for jumps is 64 bit */
3724
                ot = OT_QUAD;
3725
            } else if (op == 3 || op == 5) {
3726
                /* for call calls, the operand is 16 or 32 bit, even
3727
                   in long mode */
3728
                ot = dflag ? OT_LONG : OT_WORD;
3729
            } else if (op == 6) {
3730
                /* default push size is 64 bit */
3731
                ot = dflag ? OT_QUAD : OT_WORD;
3732
            }
3733
        }
3734
        if (mod != 3) {
3735
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3736
            if (op >= 2 && op != 3 && op != 5)
3737
                gen_op_ld_T0_A0(ot + s->mem_index);
3738
        } else {
3739
            gen_op_mov_TN_reg(ot, 0, rm);
3740
        }
3741

    
3742
        switch(op) {
3743
        case 0: /* inc Ev */
3744
            if (mod != 3)
3745
                opreg = OR_TMP0;
3746
            else
3747
                opreg = rm;
3748
            gen_inc(s, ot, opreg, 1);
3749
            break;
3750
        case 1: /* dec Ev */
3751
            if (mod != 3)
3752
                opreg = OR_TMP0;
3753
            else
3754
                opreg = rm;
3755
            gen_inc(s, ot, opreg, -1);
3756
            break;
3757
        case 2: /* call Ev */
3758
            /* XXX: optimize if memory (no 'and' is necessary) */
3759
            if (s->dflag == 0)
3760
                gen_op_andl_T0_ffff();
3761
            next_eip = s->pc - s->cs_base;
3762
            gen_movtl_T1_im(next_eip);
3763
            gen_push_T1(s);
3764
            gen_op_jmp_T0();
3765
            gen_eob(s);
3766
            break;
3767
        case 3: /* lcall Ev */
3768
            gen_op_ld_T1_A0(ot + s->mem_index);
3769
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3770
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3771
        do_lcall:
3772
            if (s->pe && !s->vm86) {
3773
                if (s->cc_op != CC_OP_DYNAMIC)
3774
                    gen_op_set_cc_op(s->cc_op);
3775
                gen_jmp_im(pc_start - s->cs_base);
3776
                tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
3777
                tcg_gen_helper_0_4(helper_lcall_protected,
3778
                                   cpu_tmp2, cpu_T[1],
3779
                                   tcg_const_i32(dflag), 
3780
                                   tcg_const_i32(s->pc - pc_start));
3781
            } else {
3782
                tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
3783
                tcg_gen_helper_0_4(helper_lcall_real,
3784
                                   cpu_tmp2, cpu_T[1],
3785
                                   tcg_const_i32(dflag), 
3786
                                   tcg_const_i32(s->pc - s->cs_base));
3787
            }
3788
            gen_eob(s);
3789
            break;
3790
        case 4: /* jmp Ev */
3791
            if (s->dflag == 0)
3792
                gen_op_andl_T0_ffff();
3793
            gen_op_jmp_T0();
3794
            gen_eob(s);
3795
            break;
3796
        case 5: /* ljmp Ev */
3797
            gen_op_ld_T1_A0(ot + s->mem_index);
3798
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3799
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3800
        do_ljmp:
3801
            if (s->pe && !s->vm86) {
3802
                if (s->cc_op != CC_OP_DYNAMIC)
3803
                    gen_op_set_cc_op(s->cc_op);
3804
                gen_jmp_im(pc_start - s->cs_base);
3805
                tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
3806
                tcg_gen_helper_0_3(helper_ljmp_protected,
3807
                                   cpu_tmp2,
3808
                                   cpu_T[1],
3809
                                   tcg_const_i32(s->pc - pc_start));
3810
            } else {
3811
                gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3812
                gen_op_movl_T0_T1();
3813
                gen_op_jmp_T0();
3814
            }
3815
            gen_eob(s);
3816
            break;
3817
        case 6: /* push Ev */
3818
            gen_push_T0(s);
3819
            break;
3820
        default:
3821
            goto illegal_op;
3822
        }
3823
        break;
3824

    
3825
    case 0x84: /* test Ev, Gv */
3826
    case 0x85:
3827
        if ((b & 1) == 0)
3828
            ot = OT_BYTE;
3829
        else
3830
            ot = dflag + OT_WORD;
3831

    
3832
        modrm = ldub_code(s->pc++);
3833
        mod = (modrm >> 6) & 3;
3834
        rm = (modrm & 7) | REX_B(s);
3835
        reg = ((modrm >> 3) & 7) | rex_r;
3836

    
3837
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3838
        gen_op_mov_TN_reg(ot, 1, reg);
3839
        gen_op_testl_T0_T1_cc();
3840
        s->cc_op = CC_OP_LOGICB + ot;
3841
        break;
3842

    
3843
    case 0xa8: /* test eAX, Iv */
3844
    case 0xa9:
3845
        if ((b & 1) == 0)
3846
            ot = OT_BYTE;
3847
        else
3848
            ot = dflag + OT_WORD;
3849
        val = insn_get(s, ot);
3850

    
3851
        gen_op_mov_TN_reg(ot, 0, OR_EAX);
3852
        gen_op_movl_T1_im(val);
3853
        gen_op_testl_T0_T1_cc();
3854
        s->cc_op = CC_OP_LOGICB + ot;
3855
        break;
3856

    
3857
    case 0x98: /* CWDE/CBW */
3858
#ifdef TARGET_X86_64
3859
        if (dflag == 2) {
3860
            gen_op_movslq_RAX_EAX();
3861
        } else
3862
#endif
3863
        if (dflag == 1)
3864
            gen_op_movswl_EAX_AX();
3865
        else
3866
            gen_op_movsbw_AX_AL();
3867
        break;
3868
    case 0x99: /* CDQ/CWD */
3869
#ifdef TARGET_X86_64
3870
        if (dflag == 2) {
3871
            gen_op_movsqo_RDX_RAX();
3872
        } else
3873
#endif
3874
        if (dflag == 1)
3875
            gen_op_movslq_EDX_EAX();
3876
        else
3877
            gen_op_movswl_DX_AX();
3878
        break;
3879
    case 0x1af: /* imul Gv, Ev */
3880
    case 0x69: /* imul Gv, Ev, I */
3881
    case 0x6b:
3882
        ot = dflag + OT_WORD;
3883
        modrm = ldub_code(s->pc++);
3884
        reg = ((modrm >> 3) & 7) | rex_r;
3885
        if (b == 0x69)
3886
            s->rip_offset = insn_const_size(ot);
3887
        else if (b == 0x6b)
3888
            s->rip_offset = 1;
3889
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3890
        if (b == 0x69) {
3891
            val = insn_get(s, ot);
3892
            gen_op_movl_T1_im(val);
3893
        } else if (b == 0x6b) {
3894
            val = (int8_t)insn_get(s, OT_BYTE);
3895
            gen_op_movl_T1_im(val);
3896
        } else {
3897
            gen_op_mov_TN_reg(ot, 1, reg);
3898
        }
3899

    
3900
#ifdef TARGET_X86_64
3901
        if (ot == OT_QUAD) {
3902
            gen_op_imulq_T0_T1();
3903
        } else
3904
#endif
3905
        if (ot == OT_LONG) {
3906
            gen_op_imull_T0_T1();
3907
        } else {
3908
            gen_op_imulw_T0_T1();
3909
        }
3910
        gen_op_mov_reg_T0(ot, reg);
3911
        s->cc_op = CC_OP_MULB + ot;
3912
        break;
3913
    case 0x1c0:
3914
    case 0x1c1: /* xadd Ev, Gv */
3915
        if ((b & 1) == 0)
3916
            ot = OT_BYTE;
3917
        else
3918
            ot = dflag + OT_WORD;
3919
        modrm = ldub_code(s->pc++);
3920
        reg = ((modrm >> 3) & 7) | rex_r;
3921
        mod = (modrm >> 6) & 3;
3922
        if (mod == 3) {
3923
            rm = (modrm & 7) | REX_B(s);
3924
            gen_op_mov_TN_reg(ot, 0, reg);
3925
            gen_op_mov_TN_reg(ot, 1, rm);
3926
            gen_op_addl_T0_T1();
3927
            gen_op_mov_reg_T1(ot, reg);
3928
            gen_op_mov_reg_T0(ot, rm);
3929
        } else {
3930
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3931
            gen_op_mov_TN_reg(ot, 0, reg);
3932
            gen_op_ld_T1_A0(ot + s->mem_index);
3933
            gen_op_addl_T0_T1();
3934
            gen_op_st_T0_A0(ot + s->mem_index);
3935
            gen_op_mov_reg_T1(ot, reg);
3936
        }
3937
        gen_op_update2_cc();
3938
        s->cc_op = CC_OP_ADDB + ot;
3939
        break;
3940
    case 0x1b0:
3941
    case 0x1b1: /* cmpxchg Ev, Gv */
3942
        if ((b & 1) == 0)
3943
            ot = OT_BYTE;
3944
        else
3945
            ot = dflag + OT_WORD;
3946
        modrm = ldub_code(s->pc++);
3947
        reg = ((modrm >> 3) & 7) | rex_r;
3948
        mod = (modrm >> 6) & 3;
3949
        gen_op_mov_TN_reg(ot, 1, reg);
3950
        if (mod == 3) {
3951
            rm = (modrm & 7) | REX_B(s);
3952
            gen_op_mov_TN_reg(ot, 0, rm);
3953
            gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3954
            gen_op_mov_reg_T0(ot, rm);
3955
        } else {
3956
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3957
            gen_op_ld_T0_A0(ot + s->mem_index);
3958
            gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3959
        }
3960
        s->cc_op = CC_OP_SUBB + ot;
3961
        break;
3962
    case 0x1c7: /* cmpxchg8b */
3963
        modrm = ldub_code(s->pc++);
3964
        mod = (modrm >> 6) & 3;
3965
        if ((mod == 3) || ((modrm & 0x38) != 0x8))
3966
            goto illegal_op;
3967
        gen_jmp_im(pc_start - s->cs_base);
3968
        if (s->cc_op != CC_OP_DYNAMIC)
3969
            gen_op_set_cc_op(s->cc_op);
3970
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3971
        gen_op_cmpxchg8b();
3972
        s->cc_op = CC_OP_EFLAGS;
3973
        break;
3974

    
3975
        /**************************/
3976
        /* push/pop */
3977
    case 0x50 ... 0x57: /* push */
3978
        gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
3979
        gen_push_T0(s);
3980
        break;
3981
    case 0x58 ... 0x5f: /* pop */
3982
        if (CODE64(s)) {
3983
            ot = dflag ? OT_QUAD : OT_WORD;
3984
        } else {
3985
            ot = dflag + OT_WORD;
3986
        }
3987
        gen_pop_T0(s);
3988
        /* NOTE: order is important for pop %sp */
3989
        gen_pop_update(s);
3990
        gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
3991
        break;
3992
    case 0x60: /* pusha */
3993
        if (CODE64(s))
3994
            goto illegal_op;
3995
        gen_pusha(s);
3996
        break;
3997
    case 0x61: /* popa */
3998
        if (CODE64(s))
3999
            goto illegal_op;
4000
        gen_popa(s);
4001
        break;
4002
    case 0x68: /* push Iv */
4003
    case 0x6a:
4004
        if (CODE64(s)) {
4005
            ot = dflag ? OT_QUAD : OT_WORD;
4006
        } else {
4007
            ot = dflag + OT_WORD;
4008
        }
4009
        if (b == 0x68)
4010
            val = insn_get(s, ot);
4011
        else
4012
            val = (int8_t)insn_get(s, OT_BYTE);
4013
        gen_op_movl_T0_im(val);
4014
        gen_push_T0(s);
4015
        break;
4016
    case 0x8f: /* pop Ev */
4017
        if (CODE64(s)) {
4018
            ot = dflag ? OT_QUAD : OT_WORD;
4019
        } else {
4020
            ot = dflag + OT_WORD;
4021
        }
4022
        modrm = ldub_code(s->pc++);
4023
        mod = (modrm >> 6) & 3;
4024
        gen_pop_T0(s);
4025
        if (mod == 3) {
4026
            /* NOTE: order is important for pop %sp */
4027
            gen_pop_update(s);
4028
            rm = (modrm & 7) | REX_B(s);
4029
            gen_op_mov_reg_T0(ot, rm);
4030
        } else {
4031
            /* NOTE: order is important too for MMU exceptions */
4032
            s->popl_esp_hack = 1 << ot;
4033
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4034
            s->popl_esp_hack = 0;
4035
            gen_pop_update(s);
4036
        }
4037
        break;
4038
    case 0xc8: /* enter */
4039
        {
4040
            int level;
4041
            val = lduw_code(s->pc);
4042
            s->pc += 2;
4043
            level = ldub_code(s->pc++);
4044
            gen_enter(s, val, level);
4045
        }
4046
        break;
4047
    case 0xc9: /* leave */
4048
        /* XXX: exception not precise (ESP is updated before potential exception) */
4049
        if (CODE64(s)) {
4050
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4051
            gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4052
        } else if (s->ss32) {
4053
            gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4054
            gen_op_mov_reg_T0(OT_LONG, R_ESP);
4055
        } else {
4056
            gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4057
            gen_op_mov_reg_T0(OT_WORD, R_ESP);
4058
        }
4059
        gen_pop_T0(s);
4060
        if (CODE64(s)) {
4061
            ot = dflag ? OT_QUAD : OT_WORD;
4062
        } else {
4063
            ot = dflag + OT_WORD;
4064
        }
4065
        gen_op_mov_reg_T0(ot, R_EBP);
4066
        gen_pop_update(s);
4067
        break;
4068
    case 0x06: /* push es */
4069
    case 0x0e: /* push cs */
4070
    case 0x16: /* push ss */
4071
    case 0x1e: /* push ds */
4072
        if (CODE64(s))
4073
            goto illegal_op;
4074
        gen_op_movl_T0_seg(b >> 3);
4075
        gen_push_T0(s);
4076
        break;
4077
    case 0x1a0: /* push fs */
4078
    case 0x1a8: /* push gs */
4079
        gen_op_movl_T0_seg((b >> 3) & 7);
4080
        gen_push_T0(s);
4081
        break;
4082
    case 0x07: /* pop es */
4083
    case 0x17: /* pop ss */
4084
    case 0x1f: /* pop ds */
4085
        if (CODE64(s))
4086
            goto illegal_op;
4087
        reg = b >> 3;
4088
        gen_pop_T0(s);
4089
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4090
        gen_pop_update(s);
4091
        if (reg == R_SS) {
4092
            /* if reg == SS, inhibit interrupts/trace. */
4093
            /* If several instructions disable interrupts, only the
4094
               _first_ does it */
4095
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4096
                tcg_gen_helper_0_0(helper_set_inhibit_irq);
4097
            s->tf = 0;
4098
        }
4099
        if (s->is_jmp) {
4100
            gen_jmp_im(s->pc - s->cs_base);
4101
            gen_eob(s);
4102
        }
4103
        break;
4104
    case 0x1a1: /* pop fs */
4105
    case 0x1a9: /* pop gs */
4106
        gen_pop_T0(s);
4107
        gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4108
        gen_pop_update(s);
4109
        if (s->is_jmp) {
4110
            gen_jmp_im(s->pc - s->cs_base);
4111
            gen_eob(s);
4112
        }
4113
        break;
4114

    
4115
        /**************************/
4116
        /* mov */
4117
    case 0x88:
4118
    case 0x89: /* mov Gv, Ev */
4119
        if ((b & 1) == 0)
4120
            ot = OT_BYTE;
4121
        else
4122
            ot = dflag + OT_WORD;
4123
        modrm = ldub_code(s->pc++);
4124
        reg = ((modrm >> 3) & 7) | rex_r;
4125

    
4126
        /* generate a generic store */
4127
        gen_ldst_modrm(s, modrm, ot, reg, 1);
4128
        break;
4129
    case 0xc6:
4130
    case 0xc7: /* mov Ev, Iv */
4131
        if ((b & 1) == 0)
4132
            ot = OT_BYTE;
4133
        else
4134
            ot = dflag + OT_WORD;
4135
        modrm = ldub_code(s->pc++);
4136
        mod = (modrm >> 6) & 3;
4137
        if (mod != 3) {
4138
            s->rip_offset = insn_const_size(ot);
4139
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4140
        }
4141
        val = insn_get(s, ot);
4142
        gen_op_movl_T0_im(val);
4143
        if (mod != 3)
4144
            gen_op_st_T0_A0(ot + s->mem_index);
4145
        else
4146
            gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4147
        break;
4148
    case 0x8a:
4149
    case 0x8b: /* mov Ev, Gv */
4150
        if ((b & 1) == 0)
4151
            ot = OT_BYTE;
4152
        else
4153
            ot = OT_WORD + dflag;
4154
        modrm = ldub_code(s->pc++);
4155
        reg = ((modrm >> 3) & 7) | rex_r;
4156

    
4157
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4158
        gen_op_mov_reg_T0(ot, reg);
4159
        break;
4160
    case 0x8e: /* mov seg, Gv */
4161
        modrm = ldub_code(s->pc++);
4162
        reg = (modrm >> 3) & 7;
4163
        if (reg >= 6 || reg == R_CS)
4164
            goto illegal_op;
4165
        gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4166
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4167
        if (reg == R_SS) {
4168
            /* if reg == SS, inhibit interrupts/trace */
4169
            /* If several instructions disable interrupts, only the
4170
               _first_ does it */
4171
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4172
                tcg_gen_helper_0_0(helper_set_inhibit_irq);
4173
            s->tf = 0;
4174
        }
4175
        if (s->is_jmp) {
4176
            gen_jmp_im(s->pc - s->cs_base);
4177
            gen_eob(s);
4178
        }
4179
        break;
4180
    case 0x8c: /* mov Gv, seg */
4181
        modrm = ldub_code(s->pc++);
4182
        reg = (modrm >> 3) & 7;
4183
        mod = (modrm >> 6) & 3;
4184
        if (reg >= 6)
4185
            goto illegal_op;
4186
        gen_op_movl_T0_seg(reg);
4187
        if (mod == 3)
4188
            ot = OT_WORD + dflag;
4189
        else
4190
            ot = OT_WORD;
4191
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4192
        break;
4193

    
4194
    case 0x1b6: /* movzbS Gv, Eb */
4195
    case 0x1b7: /* movzwS Gv, Eb */
4196
    case 0x1be: /* movsbS Gv, Eb */
4197
    case 0x1bf: /* movswS Gv, Eb */
4198
        {
4199
            int d_ot;
4200
            /* d_ot is the size of destination */
4201
            d_ot = dflag + OT_WORD;
4202
            /* ot is the size of source */
4203
            ot = (b & 1) + OT_BYTE;
4204
            modrm = ldub_code(s->pc++);
4205
            reg = ((modrm >> 3) & 7) | rex_r;
4206
            mod = (modrm >> 6) & 3;
4207
            rm = (modrm & 7) | REX_B(s);
4208

    
4209
            if (mod == 3) {
4210
                gen_op_mov_TN_reg(ot, 0, rm);
4211
                switch(ot | (b & 8)) {
4212
                case OT_BYTE:
4213
                    gen_op_movzbl_T0_T0();
4214
                    break;
4215
                case OT_BYTE | 8:
4216
                    gen_op_movsbl_T0_T0();
4217
                    break;
4218
                case OT_WORD:
4219
                    gen_op_movzwl_T0_T0();
4220
                    break;
4221
                default:
4222
                case OT_WORD | 8:
4223
                    gen_op_movswl_T0_T0();
4224
                    break;
4225
                }
4226
                gen_op_mov_reg_T0(d_ot, reg);
4227
            } else {
4228
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4229
                if (b & 8) {
4230
                    gen_op_lds_T0_A0(ot + s->mem_index);
4231
                } else {
4232
                    gen_op_ldu_T0_A0(ot + s->mem_index);
4233
                }
4234
                gen_op_mov_reg_T0(d_ot, reg);
4235
            }
4236
        }
4237
        break;
4238

    
4239
    case 0x8d: /* lea */
4240
        ot = dflag + OT_WORD;
4241
        modrm = ldub_code(s->pc++);
4242
        mod = (modrm >> 6) & 3;
4243
        if (mod == 3)
4244
            goto illegal_op;
4245
        reg = ((modrm >> 3) & 7) | rex_r;
4246
        /* we must ensure that no segment is added */
4247
        s->override = -1;
4248
        val = s->addseg;
4249
        s->addseg = 0;
4250
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4251
        s->addseg = val;
4252
        gen_op_mov_reg_A0(ot - OT_WORD, reg);
4253
        break;
4254

    
4255
    case 0xa0: /* mov EAX, Ov */
4256
    case 0xa1:
4257
    case 0xa2: /* mov Ov, EAX */
4258
    case 0xa3:
4259
        {
4260
            target_ulong offset_addr;
4261

    
4262
            if ((b & 1) == 0)
4263
                ot = OT_BYTE;
4264
            else
4265
                ot = dflag + OT_WORD;
4266
#ifdef TARGET_X86_64
4267
            if (s->aflag == 2) {
4268
                offset_addr = ldq_code(s->pc);
4269
                s->pc += 8;
4270
                gen_op_movq_A0_im(offset_addr);
4271
            } else
4272
#endif
4273
            {
4274
                if (s->aflag) {
4275
                    offset_addr = insn_get(s, OT_LONG);
4276
                } else {
4277
                    offset_addr = insn_get(s, OT_WORD);
4278
                }
4279
                gen_op_movl_A0_im(offset_addr);
4280
            }
4281
            gen_add_A0_ds_seg(s);
4282
            if ((b & 2) == 0) {
4283
                gen_op_ld_T0_A0(ot + s->mem_index);
4284
                gen_op_mov_reg_T0(ot, R_EAX);
4285
            } else {
4286
                gen_op_mov_TN_reg(ot, 0, R_EAX);
4287
                gen_op_st_T0_A0(ot + s->mem_index);
4288
            }
4289
        }
4290
        break;
4291
    case 0xd7: /* xlat */
4292
#ifdef TARGET_X86_64
4293
        if (s->aflag == 2) {
4294
            gen_op_movq_A0_reg(R_EBX);
4295
            gen_op_addq_A0_AL();
4296
        } else
4297
#endif
4298
        {
4299
            gen_op_movl_A0_reg(R_EBX);
4300
            gen_op_addl_A0_AL();
4301
            if (s->aflag == 0)
4302
                gen_op_andl_A0_ffff();
4303
        }
4304
        gen_add_A0_ds_seg(s);
4305
        gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4306
        gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4307
        break;
4308
    case 0xb0 ... 0xb7: /* mov R, Ib */
4309
        val = insn_get(s, OT_BYTE);
4310
        gen_op_movl_T0_im(val);
4311
        gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4312
        break;
4313
    case 0xb8 ... 0xbf: /* mov R, Iv */
4314
#ifdef TARGET_X86_64
4315
        if (dflag == 2) {
4316
            uint64_t tmp;
4317
            /* 64 bit case */
4318
            tmp = ldq_code(s->pc);
4319
            s->pc += 8;
4320
            reg = (b & 7) | REX_B(s);
4321
            gen_movtl_T0_im(tmp);
4322
            gen_op_mov_reg_T0(OT_QUAD, reg);
4323
        } else
4324
#endif
4325
        {
4326
            ot = dflag ? OT_LONG : OT_WORD;
4327
            val = insn_get(s, ot);
4328
            reg = (b & 7) | REX_B(s);
4329
            gen_op_movl_T0_im(val);
4330
            gen_op_mov_reg_T0(ot, reg);
4331
        }
4332
        break;
4333

    
4334
    case 0x91 ... 0x97: /* xchg R, EAX */
4335
        ot = dflag + OT_WORD;
4336
        reg = (b & 7) | REX_B(s);
4337
        rm = R_EAX;
4338
        goto do_xchg_reg;
4339
    case 0x86:
4340
    case 0x87: /* xchg Ev, Gv */
4341
        if ((b & 1) == 0)
4342
            ot = OT_BYTE;
4343
        else
4344
            ot = dflag + OT_WORD;
4345
        modrm = ldub_code(s->pc++);
4346
        reg = ((modrm >> 3) & 7) | rex_r;
4347
        mod = (modrm >> 6) & 3;
4348
        if (mod == 3) {
4349
            rm = (modrm & 7) | REX_B(s);
4350
        do_xchg_reg:
4351
            gen_op_mov_TN_reg(ot, 0, reg);
4352
            gen_op_mov_TN_reg(ot, 1, rm);
4353
            gen_op_mov_reg_T0(ot, rm);
4354
            gen_op_mov_reg_T1(ot, reg);
4355
        } else {
4356
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4357
            gen_op_mov_TN_reg(ot, 0, reg);
4358
            /* for xchg, lock is implicit */
4359
            if (!(prefixes & PREFIX_LOCK))
4360
                tcg_gen_helper_0_0(helper_lock);
4361
            gen_op_ld_T1_A0(ot + s->mem_index);
4362
            gen_op_st_T0_A0(ot + s->mem_index);
4363
            if (!(prefixes & PREFIX_LOCK))
4364
                tcg_gen_helper_0_0(helper_unlock);
4365
            gen_op_mov_reg_T1(ot, reg);
4366
        }
4367
        break;
4368
    case 0xc4: /* les Gv */
4369
        if (CODE64(s))
4370
            goto illegal_op;
4371
        op = R_ES;
4372
        goto do_lxx;
4373
    case 0xc5: /* lds Gv */
4374
        if (CODE64(s))
4375
            goto illegal_op;
4376
        op = R_DS;
4377
        goto do_lxx;
4378
    case 0x1b2: /* lss Gv */
4379
        op = R_SS;
4380
        goto do_lxx;
4381
    case 0x1b4: /* lfs Gv */
4382
        op = R_FS;
4383
        goto do_lxx;
4384
    case 0x1b5: /* lgs Gv */
4385
        op = R_GS;
4386
    do_lxx:
4387
        ot = dflag ? OT_LONG : OT_WORD;
4388
        modrm = ldub_code(s->pc++);
4389
        reg = ((modrm >> 3) & 7) | rex_r;
4390
        mod = (modrm >> 6) & 3;
4391
        if (mod == 3)
4392
            goto illegal_op;
4393
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4394
        gen_op_ld_T1_A0(ot + s->mem_index);
4395
        gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4396
        /* load the segment first to handle exceptions properly */
4397
        gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4398
        gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4399
        /* then put the data */
4400
        gen_op_mov_reg_T1(ot, reg);
4401
        if (s->is_jmp) {
4402
            gen_jmp_im(s->pc - s->cs_base);
4403
            gen_eob(s);
4404
        }
4405
        break;
4406

    
4407
        /************************/
4408
        /* shifts */
4409
    case 0xc0:
4410
    case 0xc1:
4411
        /* shift Ev,Ib */
4412
        shift = 2;
4413
    grp2:
4414
        {
4415
            if ((b & 1) == 0)
4416
                ot = OT_BYTE;
4417
            else
4418
                ot = dflag + OT_WORD;
4419

    
4420
            modrm = ldub_code(s->pc++);
4421
            mod = (modrm >> 6) & 3;
4422
            op = (modrm >> 3) & 7;
4423

    
4424
            if (mod != 3) {
4425
                if (shift == 2) {
4426
                    s->rip_offset = 1;
4427
                }
4428
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4429
                opreg = OR_TMP0;
4430
            } else {
4431
                opreg = (modrm & 7) | REX_B(s);
4432
            }
4433

    
4434
            /* simpler op */
4435
            if (shift == 0) {
4436
                gen_shift(s, op, ot, opreg, OR_ECX);
4437
            } else {
4438
                if (shift == 2) {
4439
                    shift = ldub_code(s->pc++);
4440
                }
4441
                gen_shifti(s, op, ot, opreg, shift);
4442
            }
4443
        }
4444
        break;
4445
    case 0xd0:
4446
    case 0xd1:
4447
        /* shift Ev,1 */
4448
        shift = 1;
4449
        goto grp2;
4450
    case 0xd2:
4451
    case 0xd3:
4452
        /* shift Ev,cl */
4453
        shift = 0;
4454
        goto grp2;
4455

    
4456
    case 0x1a4: /* shld imm */
4457
        op = 0;
4458
        shift = 1;
4459
        goto do_shiftd;
4460
    case 0x1a5: /* shld cl */
4461
        op = 0;
4462
        shift = 0;
4463
        goto do_shiftd;
4464
    case 0x1ac: /* shrd imm */
4465
        op = 1;
4466
        shift = 1;
4467
        goto do_shiftd;
4468
    case 0x1ad: /* shrd cl */
4469
        op = 1;
4470
        shift = 0;
4471
    do_shiftd:
4472
        ot = dflag + OT_WORD;
4473
        modrm = ldub_code(s->pc++);
4474
        mod = (modrm >> 6) & 3;
4475
        rm = (modrm & 7) | REX_B(s);
4476
        reg = ((modrm >> 3) & 7) | rex_r;
4477

    
4478
        if (mod != 3) {
4479
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4480
            gen_op_ld_T0_A0(ot + s->mem_index);
4481
        } else {
4482
            gen_op_mov_TN_reg(ot, 0, rm);
4483
        }
4484
        gen_op_mov_TN_reg(ot, 1, reg);
4485

    
4486
        if (shift) {
4487
            val = ldub_code(s->pc++);
4488
            if (ot == OT_QUAD)
4489
                val &= 0x3f;
4490
            else
4491
                val &= 0x1f;
4492
            if (val) {
4493
                if (mod == 3)
4494
                    gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4495
                else
4496
                    gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4497
                if (op == 0 && ot != OT_WORD)
4498
                    s->cc_op = CC_OP_SHLB + ot;
4499
                else
4500
                    s->cc_op = CC_OP_SARB + ot;
4501
            }
4502
        } else {
4503
            if (s->cc_op != CC_OP_DYNAMIC)
4504
                gen_op_set_cc_op(s->cc_op);
4505
            if (mod == 3)
4506
                gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4507
            else
4508
                gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4509
            s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4510
        }
4511
        if (mod == 3) {
4512
            gen_op_mov_reg_T0(ot, rm);
4513
        }
4514
        break;
4515

    
4516
        /************************/
4517
        /* floats */
4518
    case 0xd8 ... 0xdf:
4519
        if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4520
            /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4521
            /* XXX: what to do if illegal op ? */
4522
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4523
            break;
4524
        }
4525
        modrm = ldub_code(s->pc++);
4526
        mod = (modrm >> 6) & 3;
4527
        rm = modrm & 7;
4528
        op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4529
        if (mod != 3) {
4530
            /* memory op */
4531
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4532
            switch(op) {
4533
            case 0x00 ... 0x07: /* fxxxs */
4534
            case 0x10 ... 0x17: /* fixxxl */
4535
            case 0x20 ... 0x27: /* fxxxl */
4536
            case 0x30 ... 0x37: /* fixxx */
4537
                {
4538
                    int op1;
4539
                    op1 = op & 7;
4540

    
4541
                    switch(op >> 4) {
4542
                    case 0:
4543
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4544
                        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4545
                        tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2);
4546
                        break;
4547
                    case 1:
4548
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4549
                        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4550
                        tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2);
4551
                        break;
4552
                    case 2:
4553
                        tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0, 
4554
                                          (s->mem_index >> 2) - 1);
4555
                        tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1);
4556
                        break;
4557
                    case 3:
4558
                    default:
4559
                        gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4560
                        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4561
                        tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2);
4562
                        break;
4563
                    }
4564

    
4565
                    tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
4566
                    if (op1 == 3) {
4567
                        /* fcomp needs pop */
4568
                        tcg_gen_helper_0_0(helper_fpop);
4569
                    }
4570
                }
4571
                break;
4572
            case 0x08: /* flds */
4573
            case 0x0a: /* fsts */
4574
            case 0x0b: /* fstps */
4575
            case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4576
            case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4577
            case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4578
                switch(op & 7) {
4579
                case 0:
4580
                    switch(op >> 4) {
4581
                    case 0:
4582
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4583
                        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4584
                        tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2);
4585
                        break;
4586
                    case 1:
4587
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4588
                        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4589
                        tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2);
4590
                        break;
4591
                    case 2:
4592
                        tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0, 
4593
                                          (s->mem_index >> 2) - 1);
4594
                        tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1);
4595
                        break;
4596
                    case 3:
4597
                    default:
4598
                        gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4599
                        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4600
                        tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2);
4601
                        break;
4602
                    }
4603
                    break;
4604
                case 1:
4605
                    /* XXX: the corresponding CPUID bit must be tested ! */
4606
                    switch(op >> 4) {
4607
                    case 1:
4608
                        tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2);
4609
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4610
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4611
                        break;
4612
                    case 2:
4613
                        tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1);
4614
                        tcg_gen_qemu_st64(cpu_tmp1, cpu_A0, 
4615
                                          (s->mem_index >> 2) - 1);
4616
                        break;
4617
                    case 3:
4618
                    default:
4619
                        tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2);
4620
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4621
                        gen_op_st_T0_A0(OT_WORD + s->mem_index);
4622
                        break;
4623
                    }
4624
                    tcg_gen_helper_0_0(helper_fpop);
4625
                    break;
4626
                default:
4627
                    switch(op >> 4) {
4628
                    case 0:
4629
                        tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2);
4630
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4631
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4632
                        break;
4633
                    case 1:
4634
                        tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2);
4635
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4636
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4637
                        break;
4638
                    case 2:
4639
                        tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1);
4640
                        tcg_gen_qemu_st64(cpu_tmp1, cpu_A0, 
4641
                                          (s->mem_index >> 2) - 1);
4642
                        break;
4643
                    case 3:
4644
                    default:
4645
                        tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2);
4646
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4647
                        gen_op_st_T0_A0(OT_WORD + s->mem_index);
4648
                        break;
4649
                    }
4650
                    if ((op & 7) == 3)
4651
                        tcg_gen_helper_0_0(helper_fpop);
4652
                    break;
4653
                }
4654
                break;
4655
            case 0x0c: /* fldenv mem */
4656
                if (s->cc_op != CC_OP_DYNAMIC)
4657
                    gen_op_set_cc_op(s->cc_op);
4658
                gen_jmp_im(pc_start - s->cs_base);
4659
                tcg_gen_helper_0_2(helper_fldenv, 
4660
                                   cpu_A0, tcg_const_i32(s->dflag));
4661
                break;
4662
            case 0x0d: /* fldcw mem */
4663
                gen_op_ld_T0_A0(OT_WORD + s->mem_index);
4664
                tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4665
                tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2);
4666
                break;
4667
            case 0x0e: /* fnstenv mem */
4668
                if (s->cc_op != CC_OP_DYNAMIC)
4669
                    gen_op_set_cc_op(s->cc_op);
4670
                gen_jmp_im(pc_start - s->cs_base);
4671
                tcg_gen_helper_0_2(helper_fstenv,
4672
                                   cpu_A0, tcg_const_i32(s->dflag));
4673
                break;
4674
            case 0x0f: /* fnstcw mem */
4675
                tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2);
4676
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4677
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
4678
                break;
4679
            case 0x1d: /* fldt mem */
4680
                if (s->cc_op != CC_OP_DYNAMIC)
4681
                    gen_op_set_cc_op(s->cc_op);
4682
                gen_jmp_im(pc_start - s->cs_base);
4683
                tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
4684
                break;
4685
            case 0x1f: /* fstpt mem */
4686
                if (s->cc_op != CC_OP_DYNAMIC)
4687
                    gen_op_set_cc_op(s->cc_op);
4688
                gen_jmp_im(pc_start - s->cs_base);
4689
                tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
4690
                tcg_gen_helper_0_0(helper_fpop);
4691
                break;
4692
            case 0x2c: /* frstor mem */
4693
                if (s->cc_op != CC_OP_DYNAMIC)
4694
                    gen_op_set_cc_op(s->cc_op);
4695
                gen_jmp_im(pc_start - s->cs_base);
4696
                tcg_gen_helper_0_2(helper_frstor,
4697
                                   cpu_A0, tcg_const_i32(s->dflag));
4698
                break;
4699
            case 0x2e: /* fnsave mem */
4700
                if (s->cc_op != CC_OP_DYNAMIC)
4701
                    gen_op_set_cc_op(s->cc_op);
4702
                gen_jmp_im(pc_start - s->cs_base);
4703
                tcg_gen_helper_0_2(helper_fsave,
4704
                                   cpu_A0, tcg_const_i32(s->dflag));
4705
                break;
4706
            case 0x2f: /* fnstsw mem */
4707
                tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2);
4708
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4709
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
4710
                break;
4711
            case 0x3c: /* fbld */
4712
                if (s->cc_op != CC_OP_DYNAMIC)
4713
                    gen_op_set_cc_op(s->cc_op);
4714
                gen_jmp_im(pc_start - s->cs_base);
4715
                tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
4716
                break;
4717
            case 0x3e: /* fbstp */
4718
                if (s->cc_op != CC_OP_DYNAMIC)
4719
                    gen_op_set_cc_op(s->cc_op);
4720
                gen_jmp_im(pc_start - s->cs_base);
4721
                tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
4722
                tcg_gen_helper_0_0(helper_fpop);
4723
                break;
4724
            case 0x3d: /* fildll */
4725
                tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0, 
4726
                                  (s->mem_index >> 2) - 1);
4727
                tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1);
4728
                break;
4729
            case 0x3f: /* fistpll */
4730
                tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1);
4731
                tcg_gen_qemu_st64(cpu_tmp1, cpu_A0, 
4732
                                  (s->mem_index >> 2) - 1);
4733
                tcg_gen_helper_0_0(helper_fpop);
4734
                break;
4735
            default:
4736
                goto illegal_op;
4737
            }
4738
        } else {
4739
            /* register float ops */
4740
            opreg = rm;
4741

    
4742
            switch(op) {
4743
            case 0x08: /* fld sti */
4744
                tcg_gen_helper_0_0(helper_fpush);
4745
                tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
4746
                break;
4747
            case 0x09: /* fxchg sti */
4748
            case 0x29: /* fxchg4 sti, undocumented op */
4749
            case 0x39: /* fxchg7 sti, undocumented op */
4750
                tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
4751
                break;
4752
            case 0x0a: /* grp d9/2 */
4753
                switch(rm) {
4754
                case 0: /* fnop */
4755
                    /* check exceptions (FreeBSD FPU probe) */
4756
                    if (s->cc_op != CC_OP_DYNAMIC)
4757
                        gen_op_set_cc_op(s->cc_op);
4758
                    gen_jmp_im(pc_start - s->cs_base);
4759
                    tcg_gen_helper_0_0(helper_fwait);
4760
                    break;
4761
                default:
4762
                    goto illegal_op;
4763
                }
4764
                break;
4765
            case 0x0c: /* grp d9/4 */
4766
                switch(rm) {
4767
                case 0: /* fchs */
4768
                    tcg_gen_helper_0_0(helper_fchs_ST0);
4769
                    break;
4770
                case 1: /* fabs */
4771
                    tcg_gen_helper_0_0(helper_fabs_ST0);
4772
                    break;
4773
                case 4: /* ftst */
4774
                    tcg_gen_helper_0_0(helper_fldz_FT0);
4775
                    tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
4776
                    break;
4777
                case 5: /* fxam */
4778
                    tcg_gen_helper_0_0(helper_fxam_ST0);
4779
                    break;
4780
                default:
4781
                    goto illegal_op;
4782
                }
4783
                break;
4784
            case 0x0d: /* grp d9/5 */
4785
                {
4786
                    switch(rm) {
4787
                    case 0:
4788
                        tcg_gen_helper_0_0(helper_fpush);
4789
                        tcg_gen_helper_0_0(helper_fld1_ST0);
4790
                        break;
4791
                    case 1:
4792
                        tcg_gen_helper_0_0(helper_fpush);
4793
                        tcg_gen_helper_0_0(helper_fldl2t_ST0);
4794
                        break;
4795
                    case 2:
4796
                        tcg_gen_helper_0_0(helper_fpush);
4797
                        tcg_gen_helper_0_0(helper_fldl2e_ST0);
4798
                        break;
4799
                    case 3:
4800
                        tcg_gen_helper_0_0(helper_fpush);
4801
                        tcg_gen_helper_0_0(helper_fldpi_ST0);
4802
                        break;
4803
                    case 4:
4804
                        tcg_gen_helper_0_0(helper_fpush);
4805
                        tcg_gen_helper_0_0(helper_fldlg2_ST0);
4806
                        break;
4807
                    case 5:
4808
                        tcg_gen_helper_0_0(helper_fpush);
4809
                        tcg_gen_helper_0_0(helper_fldln2_ST0);
4810
                        break;
4811
                    case 6:
4812
                        tcg_gen_helper_0_0(helper_fpush);
4813
                        tcg_gen_helper_0_0(helper_fldz_ST0);
4814
                        break;
4815
                    default:
4816
                        goto illegal_op;
4817
                    }
4818
                }
4819
                break;
4820
            case 0x0e: /* grp d9/6 */
4821
                switch(rm) {
4822
                case 0: /* f2xm1 */
4823
                    tcg_gen_helper_0_0(helper_f2xm1);
4824
                    break;
4825
                case 1: /* fyl2x */
4826
                    tcg_gen_helper_0_0(helper_fyl2x);
4827
                    break;
4828
                case 2: /* fptan */
4829
                    tcg_gen_helper_0_0(helper_fptan);
4830
                    break;
4831
                case 3: /* fpatan */
4832
                    tcg_gen_helper_0_0(helper_fpatan);
4833
                    break;
4834
                case 4: /* fxtract */
4835
                    tcg_gen_helper_0_0(helper_fxtract);
4836
                    break;
4837
                case 5: /* fprem1 */
4838
                    tcg_gen_helper_0_0(helper_fprem1);
4839
                    break;
4840
                case 6: /* fdecstp */
4841
                    tcg_gen_helper_0_0(helper_fdecstp);
4842
                    break;
4843
                default:
4844
                case 7: /* fincstp */
4845
                    tcg_gen_helper_0_0(helper_fincstp);
4846
                    break;
4847
                }
4848
                break;
4849
            case 0x0f: /* grp d9/7 */
4850
                switch(rm) {
4851
                case 0: /* fprem */
4852
                    tcg_gen_helper_0_0(helper_fprem);
4853
                    break;
4854
                case 1: /* fyl2xp1 */
4855
                    tcg_gen_helper_0_0(helper_fyl2xp1);
4856
                    break;
4857
                case 2: /* fsqrt */
4858
                    tcg_gen_helper_0_0(helper_fsqrt);
4859
                    break;
4860
                case 3: /* fsincos */
4861
                    tcg_gen_helper_0_0(helper_fsincos);
4862
                    break;
4863
                case 5: /* fscale */
4864
                    tcg_gen_helper_0_0(helper_fscale);
4865
                    break;
4866
                case 4: /* frndint */
4867
                    tcg_gen_helper_0_0(helper_frndint);
4868
                    break;
4869
                case 6: /* fsin */
4870
                    tcg_gen_helper_0_0(helper_fsin);
4871
                    break;
4872
                default:
4873
                case 7: /* fcos */
4874
                    tcg_gen_helper_0_0(helper_fcos);
4875
                    break;
4876
                }
4877
                break;
4878
            case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4879
            case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4880
            case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4881
                {
4882
                    int op1;
4883

    
4884
                    op1 = op & 7;
4885
                    if (op >= 0x20) {
4886
                        tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
4887
                        if (op >= 0x30)
4888
                            tcg_gen_helper_0_0(helper_fpop);
4889
                    } else {
4890
                        tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4891
                        tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
4892
                    }
4893
                }
4894
                break;
4895
            case 0x02: /* fcom */
4896
            case 0x22: /* fcom2, undocumented op */
4897
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4898
                tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
4899
                break;
4900
            case 0x03: /* fcomp */
4901
            case 0x23: /* fcomp3, undocumented op */
4902
            case 0x32: /* fcomp5, undocumented op */
4903
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4904
                tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
4905
                tcg_gen_helper_0_0(helper_fpop);
4906
                break;
4907
            case 0x15: /* da/5 */
4908
                switch(rm) {
4909
                case 1: /* fucompp */
4910
                    tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
4911
                    tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
4912
                    tcg_gen_helper_0_0(helper_fpop);
4913
                    tcg_gen_helper_0_0(helper_fpop);
4914
                    break;
4915
                default:
4916
                    goto illegal_op;
4917
                }
4918
                break;
4919
            case 0x1c:
4920
                switch(rm) {
4921
                case 0: /* feni (287 only, just do nop here) */
4922
                    break;
4923
                case 1: /* fdisi (287 only, just do nop here) */
4924
                    break;
4925
                case 2: /* fclex */
4926
                    tcg_gen_helper_0_0(helper_fclex);
4927
                    break;
4928
                case 3: /* fninit */
4929
                    tcg_gen_helper_0_0(helper_fninit);
4930
                    break;
4931
                case 4: /* fsetpm (287 only, just do nop here) */
4932
                    break;
4933
                default:
4934
                    goto illegal_op;
4935
                }
4936
                break;
4937
            case 0x1d: /* fucomi */
4938
                if (s->cc_op != CC_OP_DYNAMIC)
4939
                    gen_op_set_cc_op(s->cc_op);
4940
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4941
                tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
4942
                gen_op_fcomi_dummy();
4943
                s->cc_op = CC_OP_EFLAGS;
4944
                break;
4945
            case 0x1e: /* fcomi */
4946
                if (s->cc_op != CC_OP_DYNAMIC)
4947
                    gen_op_set_cc_op(s->cc_op);
4948
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4949
                tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
4950
                gen_op_fcomi_dummy();
4951
                s->cc_op = CC_OP_EFLAGS;
4952
                break;
4953
            case 0x28: /* ffree sti */
4954
                tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
4955
                break;
4956
            case 0x2a: /* fst sti */
4957
                tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
4958
                break;
4959
            case 0x2b: /* fstp sti */
4960
            case 0x0b: /* fstp1 sti, undocumented op */
4961
            case 0x3a: /* fstp8 sti, undocumented op */
4962
            case 0x3b: /* fstp9 sti, undocumented op */
4963
                tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
4964
                tcg_gen_helper_0_0(helper_fpop);
4965
                break;
4966
            case 0x2c: /* fucom st(i) */
4967
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4968
                tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
4969
                break;
4970
            case 0x2d: /* fucomp st(i) */
4971
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4972
                tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
4973
                tcg_gen_helper_0_0(helper_fpop);
4974
                break;
4975
            case 0x33: /* de/3 */
4976
                switch(rm) {
4977
                case 1: /* fcompp */
4978
                    tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
4979
                    tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
4980
                    tcg_gen_helper_0_0(helper_fpop);
4981
                    tcg_gen_helper_0_0(helper_fpop);
4982
                    break;
4983
                default:
4984
                    goto illegal_op;
4985
                }
4986
                break;
4987
            case 0x38: /* ffreep sti, undocumented op */
4988
                tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
4989
                tcg_gen_helper_0_0(helper_fpop);
4990
                break;
4991
            case 0x3c: /* df/4 */
4992
                switch(rm) {
4993
                case 0:
4994
                    tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2);
4995
                    tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4996
                    gen_op_mov_reg_T0(OT_WORD, R_EAX);
4997
                    break;
4998
                default:
4999
                    goto illegal_op;
5000
                }
5001
                break;
5002
            case 0x3d: /* fucomip */
5003
                if (s->cc_op != CC_OP_DYNAMIC)
5004
                    gen_op_set_cc_op(s->cc_op);
5005
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5006
                tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5007
                tcg_gen_helper_0_0(helper_fpop);
5008
                gen_op_fcomi_dummy();
5009
                s->cc_op = CC_OP_EFLAGS;
5010
                break;
5011
            case 0x3e: /* fcomip */
5012
                if (s->cc_op != CC_OP_DYNAMIC)
5013
                    gen_op_set_cc_op(s->cc_op);
5014
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5015
                tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5016
                tcg_gen_helper_0_0(helper_fpop);
5017
                gen_op_fcomi_dummy();
5018
                s->cc_op = CC_OP_EFLAGS;
5019
                break;
5020
            case 0x10 ... 0x13: /* fcmovxx */
5021
            case 0x18 ... 0x1b:
5022
                {
5023
                    int op1, l1;
5024
                    const static uint8_t fcmov_cc[8] = {
5025
                        (JCC_B << 1),
5026
                        (JCC_Z << 1),
5027
                        (JCC_BE << 1),
5028
                        (JCC_P << 1),
5029
                    };
5030
                    op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
5031
                    gen_setcc(s, op1);
5032
                    l1 = gen_new_label();
5033
                    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), l1);
5034
                    tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
5035
                    gen_set_label(l1);
5036
                }
5037
                break;
5038
            default:
5039
                goto illegal_op;
5040
            }
5041
        }
5042
        break;
5043
        /************************/
5044
        /* string ops */
5045

    
5046
    case 0xa4: /* movsS */
5047
    case 0xa5:
5048
        if ((b & 1) == 0)
5049
            ot = OT_BYTE;
5050
        else
5051
            ot = dflag + OT_WORD;
5052

    
5053
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5054
            gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5055
        } else {
5056
            gen_movs(s, ot);
5057
        }
5058
        break;
5059

    
5060
    case 0xaa: /* stosS */
5061
    case 0xab:
5062
        if ((b & 1) == 0)
5063
            ot = OT_BYTE;
5064
        else
5065
            ot = dflag + OT_WORD;
5066

    
5067
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5068
            gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5069
        } else {
5070
            gen_stos(s, ot);
5071
        }
5072
        break;
5073
    case 0xac: /* lodsS */
5074
    case 0xad:
5075
        if ((b & 1) == 0)
5076
            ot = OT_BYTE;
5077
        else
5078
            ot = dflag + OT_WORD;
5079
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5080
            gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5081
        } else {
5082
            gen_lods(s, ot);
5083
        }
5084
        break;
5085
    case 0xae: /* scasS */
5086
    case 0xaf:
5087
        if ((b & 1) == 0)
5088
            ot = OT_BYTE;
5089
        else
5090
            ot = dflag + OT_WORD;
5091
        if (prefixes & PREFIX_REPNZ) {
5092
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5093
        } else if (prefixes & PREFIX_REPZ) {
5094
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5095
        } else {
5096
            gen_scas(s, ot);
5097
            s->cc_op = CC_OP_SUBB + ot;
5098
        }
5099
        break;
5100

    
5101
    case 0xa6: /* cmpsS */
5102
    case 0xa7:
5103
        if ((b & 1) == 0)
5104
            ot = OT_BYTE;
5105
        else
5106
            ot = dflag + OT_WORD;
5107
        if (prefixes & PREFIX_REPNZ) {
5108
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5109
        } else if (prefixes & PREFIX_REPZ) {
5110
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5111
        } else {
5112
            gen_cmps(s, ot);
5113
            s->cc_op = CC_OP_SUBB + ot;
5114
        }
5115
        break;
5116
    case 0x6c: /* insS */
5117
    case 0x6d:
5118
        if ((b & 1) == 0)
5119
            ot = OT_BYTE;
5120
        else
5121
            ot = dflag ? OT_LONG : OT_WORD;
5122
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5123
        gen_op_andl_T0_ffff();
5124
        gen_check_io(s, ot, pc_start - s->cs_base, 
5125
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
5126
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5127
            gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5128
        } else {
5129
            gen_ins(s, ot);
5130
        }
5131
        break;
5132
    case 0x6e: /* outsS */
5133
    case 0x6f:
5134
        if ((b & 1) == 0)
5135
            ot = OT_BYTE;
5136
        else
5137
            ot = dflag ? OT_LONG : OT_WORD;
5138
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5139
        gen_op_andl_T0_ffff();
5140
        gen_check_io(s, ot, pc_start - s->cs_base,
5141
                     svm_is_rep(prefixes) | 4);
5142
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5143
            gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5144
        } else {
5145
            gen_outs(s, ot);
5146
        }
5147
        break;
5148

    
5149
        /************************/
5150
        /* port I/O */
5151

    
5152
    case 0xe4:
5153
    case 0xe5:
5154
        if ((b & 1) == 0)
5155
            ot = OT_BYTE;
5156
        else
5157
            ot = dflag ? OT_LONG : OT_WORD;
5158
        val = ldub_code(s->pc++);
5159
        gen_op_movl_T0_im(val);
5160
        gen_check_io(s, ot, pc_start - s->cs_base,
5161
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5162
        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
5163
        tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2);
5164
        gen_op_mov_reg_T1(ot, R_EAX);
5165
        break;
5166
    case 0xe6:
5167
    case 0xe7:
5168
        if ((b & 1) == 0)
5169
            ot = OT_BYTE;
5170
        else
5171
            ot = dflag ? OT_LONG : OT_WORD;
5172
        val = ldub_code(s->pc++);
5173
        gen_op_movl_T0_im(val);
5174
        gen_check_io(s, ot, pc_start - s->cs_base,
5175
                     svm_is_rep(prefixes));
5176
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5177

    
5178
        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
5179
        tcg_gen_andi_i32(cpu_tmp2, cpu_tmp2, 0xffff);
5180
        tcg_gen_trunc_tl_i32(cpu_tmp3, cpu_T[1]);
5181
        tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2, cpu_tmp3);
5182
        break;
5183
    case 0xec:
5184
    case 0xed:
5185
        if ((b & 1) == 0)
5186
            ot = OT_BYTE;
5187
        else
5188
            ot = dflag ? OT_LONG : OT_WORD;
5189
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5190
        gen_op_andl_T0_ffff();
5191
        gen_check_io(s, ot, pc_start - s->cs_base,
5192
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5193
        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
5194
        tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2);
5195
        gen_op_mov_reg_T1(ot, R_EAX);
5196
        break;
5197
    case 0xee:
5198
    case 0xef:
5199
        if ((b & 1) == 0)
5200
            ot = OT_BYTE;
5201
        else
5202
            ot = dflag ? OT_LONG : OT_WORD;
5203
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5204
        gen_op_andl_T0_ffff();
5205
        gen_check_io(s, ot, pc_start - s->cs_base,
5206
                     svm_is_rep(prefixes));
5207
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5208

    
5209
        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
5210
        tcg_gen_andi_i32(cpu_tmp2, cpu_tmp2, 0xffff);
5211
        tcg_gen_trunc_tl_i32(cpu_tmp3, cpu_T[1]);
5212
        tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2, cpu_tmp3);
5213
        break;
5214

    
5215
        /************************/
5216
        /* control */
5217
    case 0xc2: /* ret im */
5218
        val = ldsw_code(s->pc);
5219
        s->pc += 2;
5220
        gen_pop_T0(s);
5221
        if (CODE64(s) && s->dflag)
5222
            s->dflag = 2;
5223
        gen_stack_update(s, val + (2 << s->dflag));
5224
        if (s->dflag == 0)
5225
            gen_op_andl_T0_ffff();
5226
        gen_op_jmp_T0();
5227
        gen_eob(s);
5228
        break;
5229
    case 0xc3: /* ret */
5230
        gen_pop_T0(s);
5231
        gen_pop_update(s);
5232
        if (s->dflag == 0)
5233
            gen_op_andl_T0_ffff();
5234
        gen_op_jmp_T0();
5235
        gen_eob(s);
5236
        break;
5237
    case 0xca: /* lret im */
5238
        val = ldsw_code(s->pc);
5239
        s->pc += 2;
5240
    do_lret:
5241
        if (s->pe && !s->vm86) {
5242
            if (s->cc_op != CC_OP_DYNAMIC)
5243
                gen_op_set_cc_op(s->cc_op);
5244
            gen_jmp_im(pc_start - s->cs_base);
5245
            tcg_gen_helper_0_2(helper_lret_protected,
5246
                               tcg_const_i32(s->dflag), 
5247
                               tcg_const_i32(val));
5248
        } else {
5249
            gen_stack_A0(s);
5250
            /* pop offset */
5251
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5252
            if (s->dflag == 0)
5253
                gen_op_andl_T0_ffff();
5254
            /* NOTE: keeping EIP updated is not a problem in case of
5255
               exception */
5256
            gen_op_jmp_T0();
5257
            /* pop selector */
5258
            gen_op_addl_A0_im(2 << s->dflag);
5259
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5260
            gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5261
            /* add stack offset */
5262
            gen_stack_update(s, val + (4 << s->dflag));
5263
        }
5264
        gen_eob(s);
5265
        break;
5266
    case 0xcb: /* lret */
5267
        val = 0;
5268
        goto do_lret;
5269
    case 0xcf: /* iret */
5270
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5271
            break;
5272
        if (!s->pe) {
5273
            /* real mode */
5274
            tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5275
            s->cc_op = CC_OP_EFLAGS;
5276
        } else if (s->vm86) {
5277
            if (s->iopl != 3) {
5278
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5279
            } else {
5280
                tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5281
                s->cc_op = CC_OP_EFLAGS;
5282
            }
5283
        } else {
5284
            if (s->cc_op != CC_OP_DYNAMIC)
5285
                gen_op_set_cc_op(s->cc_op);
5286
            gen_jmp_im(pc_start - s->cs_base);
5287
            tcg_gen_helper_0_2(helper_iret_protected,
5288
                               tcg_const_i32(s->dflag), 
5289
                               tcg_const_i32(s->pc - s->cs_base));
5290
            s->cc_op = CC_OP_EFLAGS;
5291
        }
5292
        gen_eob(s);
5293
        break;
5294
    case 0xe8: /* call im */
5295
        {
5296
            if (dflag)
5297
                tval = (int32_t)insn_get(s, OT_LONG);
5298
            else
5299
                tval = (int16_t)insn_get(s, OT_WORD);
5300
            next_eip = s->pc - s->cs_base;
5301
            tval += next_eip;
5302
            if (s->dflag == 0)
5303
                tval &= 0xffff;
5304
            gen_movtl_T0_im(next_eip);
5305
            gen_push_T0(s);
5306
            gen_jmp(s, tval);
5307
        }
5308
        break;
5309
    case 0x9a: /* lcall im */
5310
        {
5311
            unsigned int selector, offset;
5312

    
5313
            if (CODE64(s))
5314
                goto illegal_op;
5315
            ot = dflag ? OT_LONG : OT_WORD;
5316
            offset = insn_get(s, ot);
5317
            selector = insn_get(s, OT_WORD);
5318

    
5319
            gen_op_movl_T0_im(selector);
5320
            gen_op_movl_T1_imu(offset);
5321
        }
5322
        goto do_lcall;
5323
    case 0xe9: /* jmp im */
5324
        if (dflag)
5325
            tval = (int32_t)insn_get(s, OT_LONG);
5326
        else
5327
            tval = (int16_t)insn_get(s, OT_WORD);
5328
        tval += s->pc - s->cs_base;
5329
        if (s->dflag == 0)
5330
            tval &= 0xffff;
5331
        gen_jmp(s, tval);
5332
        break;
5333
    case 0xea: /* ljmp im */
5334
        {
5335
            unsigned int selector, offset;
5336

    
5337
            if (CODE64(s))
5338
                goto illegal_op;
5339
            ot = dflag ? OT_LONG : OT_WORD;
5340
            offset = insn_get(s, ot);
5341
            selector = insn_get(s, OT_WORD);
5342

    
5343
            gen_op_movl_T0_im(selector);
5344
            gen_op_movl_T1_imu(offset);
5345
        }
5346
        goto do_ljmp;
5347
    case 0xeb: /* jmp Jb */
5348
        tval = (int8_t)insn_get(s, OT_BYTE);
5349
        tval += s->pc - s->cs_base;
5350
        if (s->dflag == 0)
5351
            tval &= 0xffff;
5352
        gen_jmp(s, tval);
5353
        break;
5354
    case 0x70 ... 0x7f: /* jcc Jb */
5355
        tval = (int8_t)insn_get(s, OT_BYTE);
5356
        goto do_jcc;
5357
    case 0x180 ... 0x18f: /* jcc Jv */
5358
        if (dflag) {
5359
            tval = (int32_t)insn_get(s, OT_LONG);
5360
        } else {
5361
            tval = (int16_t)insn_get(s, OT_WORD);
5362
        }
5363
    do_jcc:
5364
        next_eip = s->pc - s->cs_base;
5365
        tval += next_eip;
5366
        if (s->dflag == 0)
5367
            tval &= 0xffff;
5368
        gen_jcc(s, b, tval, next_eip);
5369
        break;
5370

    
5371
    case 0x190 ... 0x19f: /* setcc Gv */
5372
        modrm = ldub_code(s->pc++);
5373
        gen_setcc(s, b);
5374
        gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5375
        break;
5376
    case 0x140 ... 0x14f: /* cmov Gv, Ev */
5377
        ot = dflag + OT_WORD;
5378
        modrm = ldub_code(s->pc++);
5379
        reg = ((modrm >> 3) & 7) | rex_r;
5380
        mod = (modrm >> 6) & 3;
5381
        gen_setcc(s, b);
5382
        if (mod != 3) {
5383
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5384
            gen_op_ld_T1_A0(ot + s->mem_index);
5385
        } else {
5386
            rm = (modrm & 7) | REX_B(s);
5387
            gen_op_mov_TN_reg(ot, 1, rm);
5388
        }
5389
        gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5390
        break;
5391

    
5392
        /************************/
5393
        /* flags */
5394
    case 0x9c: /* pushf */
5395
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5396
            break;
5397
        if (s->vm86 && s->iopl != 3) {
5398
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5399
        } else {
5400
            if (s->cc_op != CC_OP_DYNAMIC)
5401
                gen_op_set_cc_op(s->cc_op);
5402
            gen_op_movl_T0_eflags();
5403
            gen_push_T0(s);
5404
        }
5405
        break;
5406
    case 0x9d: /* popf */
5407
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5408
            break;
5409
        if (s->vm86 && s->iopl != 3) {
5410
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5411
        } else {
5412
            gen_pop_T0(s);
5413
            if (s->cpl == 0) {
5414
                if (s->dflag) {
5415
                    gen_op_movl_eflags_T0_cpl0();
5416
                } else {
5417
                    gen_op_movw_eflags_T0_cpl0();
5418
                }
5419
            } else {
5420
                if (s->cpl <= s->iopl) {
5421
                    if (s->dflag) {
5422
                        gen_op_movl_eflags_T0_io();
5423
                    } else {
5424
                        gen_op_movw_eflags_T0_io();
5425
                    }
5426
                } else {
5427
                    if (s->dflag) {
5428
                        gen_op_movl_eflags_T0();
5429
                    } else {
5430
                        gen_op_movw_eflags_T0();
5431
                    }
5432
                }
5433
            }
5434
            gen_pop_update(s);
5435
            s->cc_op = CC_OP_EFLAGS;
5436
            /* abort translation because TF flag may change */
5437
            gen_jmp_im(s->pc - s->cs_base);
5438
            gen_eob(s);
5439
        }
5440
        break;
5441
    case 0x9e: /* sahf */
5442
        if (CODE64(s))
5443
            goto illegal_op;
5444
        gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5445
        if (s->cc_op != CC_OP_DYNAMIC)
5446
            gen_op_set_cc_op(s->cc_op);
5447
        gen_op_movb_eflags_T0();
5448
        s->cc_op = CC_OP_EFLAGS;
5449
        break;
5450
    case 0x9f: /* lahf */
5451
        if (CODE64(s))
5452
            goto illegal_op;
5453
        if (s->cc_op != CC_OP_DYNAMIC)
5454
            gen_op_set_cc_op(s->cc_op);
5455
        gen_op_movl_T0_eflags();
5456
        gen_op_mov_reg_T0(OT_BYTE, R_AH);
5457
        break;
5458
    case 0xf5: /* cmc */
5459
        if (s->cc_op != CC_OP_DYNAMIC)
5460
            gen_op_set_cc_op(s->cc_op);
5461
        gen_op_cmc();
5462
        s->cc_op = CC_OP_EFLAGS;
5463
        break;
5464
    case 0xf8: /* clc */
5465
        if (s->cc_op != CC_OP_DYNAMIC)
5466
            gen_op_set_cc_op(s->cc_op);
5467
        gen_op_clc();
5468
        s->cc_op = CC_OP_EFLAGS;
5469
        break;
5470
    case 0xf9: /* stc */
5471
        if (s->cc_op != CC_OP_DYNAMIC)
5472
            gen_op_set_cc_op(s->cc_op);
5473
        gen_op_stc();
5474
        s->cc_op = CC_OP_EFLAGS;
5475
        break;
5476
    case 0xfc: /* cld */
5477
        tcg_gen_movi_i32(cpu_tmp2, 1);
5478
        tcg_gen_st_i32(cpu_tmp2, cpu_env, offsetof(CPUState, df));
5479
        break;
5480
    case 0xfd: /* std */
5481
        tcg_gen_movi_i32(cpu_tmp2, -1);
5482
        tcg_gen_st_i32(cpu_tmp2, cpu_env, offsetof(CPUState, df));
5483
        break;
5484

    
5485
        /************************/
5486
        /* bit operations */
5487
    case 0x1ba: /* bt/bts/btr/btc Gv, im */
5488
        ot = dflag + OT_WORD;
5489
        modrm = ldub_code(s->pc++);
5490
        op = (modrm >> 3) & 7;
5491
        mod = (modrm >> 6) & 3;
5492
        rm = (modrm & 7) | REX_B(s);
5493
        if (mod != 3) {
5494
            s->rip_offset = 1;
5495
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5496
            gen_op_ld_T0_A0(ot + s->mem_index);
5497
        } else {
5498
            gen_op_mov_TN_reg(ot, 0, rm);
5499
        }
5500
        /* load shift */
5501
        val = ldub_code(s->pc++);
5502
        gen_op_movl_T1_im(val);
5503
        if (op < 4)
5504
            goto illegal_op;
5505
        op -= 4;
5506
        gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5507
        s->cc_op = CC_OP_SARB + ot;
5508
        if (op != 0) {
5509
            if (mod != 3)
5510
                gen_op_st_T0_A0(ot + s->mem_index);
5511
            else
5512
                gen_op_mov_reg_T0(ot, rm);
5513
            gen_op_update_bt_cc();
5514
        }
5515
        break;
5516
    case 0x1a3: /* bt Gv, Ev */
5517
        op = 0;
5518
        goto do_btx;
5519
    case 0x1ab: /* bts */
5520
        op = 1;
5521
        goto do_btx;
5522
    case 0x1b3: /* btr */
5523
        op = 2;
5524
        goto do_btx;
5525
    case 0x1bb: /* btc */
5526
        op = 3;
5527
    do_btx:
5528
        ot = dflag + OT_WORD;
5529
        modrm = ldub_code(s->pc++);
5530
        reg = ((modrm >> 3) & 7) | rex_r;
5531
        mod = (modrm >> 6) & 3;
5532
        rm = (modrm & 7) | REX_B(s);
5533
        gen_op_mov_TN_reg(OT_LONG, 1, reg);
5534
        if (mod != 3) {
5535
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5536
            /* specific case: we need to add a displacement */
5537
            gen_op_add_bit_A0_T1[ot - OT_WORD]();
5538
            gen_op_ld_T0_A0(ot + s->mem_index);
5539
        } else {
5540
            gen_op_mov_TN_reg(ot, 0, rm);
5541
        }
5542
        gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5543
        s->cc_op = CC_OP_SARB + ot;
5544
        if (op != 0) {
5545
            if (mod != 3)
5546
                gen_op_st_T0_A0(ot + s->mem_index);
5547
            else
5548
                gen_op_mov_reg_T0(ot, rm);
5549
            gen_op_update_bt_cc();
5550
        }
5551
        break;
5552
    case 0x1bc: /* bsf */
5553
    case 0x1bd: /* bsr */
5554
        ot = dflag + OT_WORD;
5555
        modrm = ldub_code(s->pc++);
5556
        reg = ((modrm >> 3) & 7) | rex_r;
5557
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5558
        /* NOTE: in order to handle the 0 case, we must load the
5559
           result. It could be optimized with a generated jump */
5560
        gen_op_mov_TN_reg(ot, 1, reg);
5561
        gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5562
        gen_op_mov_reg_T1(ot, reg);
5563
        s->cc_op = CC_OP_LOGICB + ot;
5564
        break;
5565
        /************************/
5566
        /* bcd */
5567
    case 0x27: /* daa */
5568
        if (CODE64(s))
5569
            goto illegal_op;
5570
        if (s->cc_op != CC_OP_DYNAMIC)
5571
            gen_op_set_cc_op(s->cc_op);
5572
        gen_op_daa();
5573
        s->cc_op = CC_OP_EFLAGS;
5574
        break;
5575
    case 0x2f: /* das */
5576
        if (CODE64(s))
5577
            goto illegal_op;
5578
        if (s->cc_op != CC_OP_DYNAMIC)
5579
            gen_op_set_cc_op(s->cc_op);
5580
        gen_op_das();
5581
        s->cc_op = CC_OP_EFLAGS;
5582
        break;
5583
    case 0x37: /* aaa */
5584
        if (CODE64(s))
5585
            goto illegal_op;
5586
        if (s->cc_op != CC_OP_DYNAMIC)
5587
            gen_op_set_cc_op(s->cc_op);
5588
        gen_op_aaa();
5589
        s->cc_op = CC_OP_EFLAGS;
5590
        break;
5591
    case 0x3f: /* aas */
5592
        if (CODE64(s))
5593
            goto illegal_op;
5594
        if (s->cc_op != CC_OP_DYNAMIC)
5595
            gen_op_set_cc_op(s->cc_op);
5596
        gen_op_aas();
5597
        s->cc_op = CC_OP_EFLAGS;
5598
        break;
5599
    case 0xd4: /* aam */
5600
        if (CODE64(s))
5601
            goto illegal_op;
5602
        val = ldub_code(s->pc++);
5603
        if (val == 0) {
5604
            gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5605
        } else {
5606
            gen_op_aam(val);
5607
            s->cc_op = CC_OP_LOGICB;
5608
        }
5609
        break;
5610
    case 0xd5: /* aad */
5611
        if (CODE64(s))
5612
            goto illegal_op;
5613
        val = ldub_code(s->pc++);
5614
        gen_op_aad(val);
5615
        s->cc_op = CC_OP_LOGICB;
5616
        break;
5617
        /************************/
5618
        /* misc */
5619
    case 0x90: /* nop */
5620
        /* XXX: xchg + rex handling */
5621
        /* XXX: correct lock test for all insn */
5622
        if (prefixes & PREFIX_LOCK)
5623
            goto illegal_op;
5624
        if (prefixes & PREFIX_REPZ) {
5625
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
5626
        }
5627
        break;
5628
    case 0x9b: /* fwait */
5629
        if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5630
            (HF_MP_MASK | HF_TS_MASK)) {
5631
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5632
        } else {
5633
            if (s->cc_op != CC_OP_DYNAMIC)
5634
                gen_op_set_cc_op(s->cc_op);
5635
            gen_jmp_im(pc_start - s->cs_base);
5636
            tcg_gen_helper_0_0(helper_fwait);
5637
        }
5638
        break;
5639
    case 0xcc: /* int3 */
5640
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5641
            break;
5642
        gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5643
        break;
5644
    case 0xcd: /* int N */
5645
        val = ldub_code(s->pc++);
5646
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5647
            break;
5648
        if (s->vm86 && s->iopl != 3) {
5649
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5650
        } else {
5651
            gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5652
        }
5653
        break;
5654
    case 0xce: /* into */
5655
        if (CODE64(s))
5656
            goto illegal_op;
5657
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5658
            break;
5659
        if (s->cc_op != CC_OP_DYNAMIC)
5660
            gen_op_set_cc_op(s->cc_op);
5661
        gen_jmp_im(pc_start - s->cs_base);
5662
        gen_op_into(s->pc - pc_start);
5663
        break;
5664
    case 0xf1: /* icebp (undocumented, exits to external debugger) */
5665
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
5666
            break;
5667
#if 1
5668
        gen_debug(s, pc_start - s->cs_base);
5669
#else
5670
        /* start debug */
5671
        tb_flush(cpu_single_env);
5672
        cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5673
#endif
5674
        break;
5675
    case 0xfa: /* cli */
5676
        if (!s->vm86) {
5677
            if (s->cpl <= s->iopl) {
5678
                tcg_gen_helper_0_0(helper_cli);
5679
            } else {
5680
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5681
            }
5682
        } else {
5683
            if (s->iopl == 3) {
5684
                tcg_gen_helper_0_0(helper_cli);
5685
            } else {
5686
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5687
            }
5688
        }
5689
        break;
5690
    case 0xfb: /* sti */
5691
        if (!s->vm86) {
5692
            if (s->cpl <= s->iopl) {
5693
            gen_sti:
5694
                tcg_gen_helper_0_0(helper_sti);
5695
                /* interruptions are enabled only the first insn after sti */
5696
                /* If several instructions disable interrupts, only the
5697
                   _first_ does it */
5698
                if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5699
                    tcg_gen_helper_0_0(helper_set_inhibit_irq);
5700
                /* give a chance to handle pending irqs */
5701
                gen_jmp_im(s->pc - s->cs_base);
5702
                gen_eob(s);
5703
            } else {
5704
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5705
            }
5706
        } else {
5707
            if (s->iopl == 3) {
5708
                goto gen_sti;
5709
            } else {
5710
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5711
            }
5712
        }
5713
        break;
5714
    case 0x62: /* bound */
5715
        if (CODE64(s))
5716
            goto illegal_op;
5717
        ot = dflag ? OT_LONG : OT_WORD;
5718
        modrm = ldub_code(s->pc++);
5719
        reg = (modrm >> 3) & 7;
5720
        mod = (modrm >> 6) & 3;
5721
        if (mod == 3)
5722
            goto illegal_op;
5723
        gen_op_mov_TN_reg(ot, 0, reg);
5724
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5725
        gen_jmp_im(pc_start - s->cs_base);
5726
        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
5727
        if (ot == OT_WORD)
5728
            tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2);
5729
        else
5730
            tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2);
5731
        break;
5732
    case 0x1c8 ... 0x1cf: /* bswap reg */
5733
        reg = (b & 7) | REX_B(s);
5734
#ifdef TARGET_X86_64
5735
        if (dflag == 2) {
5736
            gen_op_mov_TN_reg(OT_QUAD, 0, reg);
5737
            tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
5738
            gen_op_mov_reg_T0(OT_QUAD, reg);
5739
        } else
5740
        {
5741
            TCGv tmp0;
5742
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
5743
            
5744
            tmp0 = tcg_temp_new(TCG_TYPE_I32);
5745
            tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
5746
            tcg_gen_bswap_i32(tmp0, tmp0);
5747
            tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
5748
            gen_op_mov_reg_T0(OT_LONG, reg);
5749
        }
5750
#else
5751
        {
5752
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
5753
            tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
5754
            gen_op_mov_reg_T0(OT_LONG, reg);
5755
        }
5756
#endif
5757
        break;
5758
    case 0xd6: /* salc */
5759
        if (CODE64(s))
5760
            goto illegal_op;
5761
        if (s->cc_op != CC_OP_DYNAMIC)
5762
            gen_op_set_cc_op(s->cc_op);
5763
        gen_op_salc();
5764
        break;
5765
    case 0xe0: /* loopnz */
5766
    case 0xe1: /* loopz */
5767
        if (s->cc_op != CC_OP_DYNAMIC)
5768
            gen_op_set_cc_op(s->cc_op);
5769
        /* FALL THRU */
5770
    case 0xe2: /* loop */
5771
    case 0xe3: /* jecxz */
5772
        {
5773
            int l1, l2;
5774

    
5775
            tval = (int8_t)insn_get(s, OT_BYTE);
5776
            next_eip = s->pc - s->cs_base;
5777
            tval += next_eip;
5778
            if (s->dflag == 0)
5779
                tval &= 0xffff;
5780

    
5781
            l1 = gen_new_label();
5782
            l2 = gen_new_label();
5783
            b &= 3;
5784
            if (b == 3) {
5785
                gen_op_jz_ecx[s->aflag](l1);
5786
            } else {
5787
                gen_op_dec_ECX[s->aflag]();
5788
                if (b <= 1)
5789
                    gen_op_mov_T0_cc();
5790
                gen_op_loop[s->aflag][b](l1);
5791
            }
5792

    
5793
            gen_jmp_im(next_eip);
5794
            gen_op_jmp_label(l2);
5795
            gen_set_label(l1);
5796
            gen_jmp_im(tval);
5797
            gen_set_label(l2);
5798
            gen_eob(s);
5799
        }
5800
        break;
5801
    case 0x130: /* wrmsr */
5802
    case 0x132: /* rdmsr */
5803
        if (s->cpl != 0) {
5804
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5805
        } else {
5806
            int retval = 0;
5807
            if (b & 2) {
5808
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
5809
                tcg_gen_helper_0_0(helper_rdmsr);
5810
            } else {
5811
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
5812
                tcg_gen_helper_0_0(helper_wrmsr);
5813
            }
5814
            if(retval)
5815
                gen_eob(s);
5816
        }
5817
        break;
5818
    case 0x131: /* rdtsc */
5819
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
5820
            break;
5821
        gen_jmp_im(pc_start - s->cs_base);
5822
        tcg_gen_helper_0_0(helper_rdtsc);
5823
        break;
5824
    case 0x133: /* rdpmc */
5825
        gen_jmp_im(pc_start - s->cs_base);
5826
        tcg_gen_helper_0_0(helper_rdpmc);
5827
        break;
5828
    case 0x134: /* sysenter */
5829
        if (CODE64(s))
5830
            goto illegal_op;
5831
        if (!s->pe) {
5832
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5833
        } else {
5834
            if (s->cc_op != CC_OP_DYNAMIC) {
5835
                gen_op_set_cc_op(s->cc_op);
5836
                s->cc_op = CC_OP_DYNAMIC;
5837
            }
5838
            gen_jmp_im(pc_start - s->cs_base);
5839
            tcg_gen_helper_0_0(helper_sysenter);
5840
            gen_eob(s);
5841
        }
5842
        break;
5843
    case 0x135: /* sysexit */
5844
        if (CODE64(s))
5845
            goto illegal_op;
5846
        if (!s->pe) {
5847
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5848
        } else {
5849
            if (s->cc_op != CC_OP_DYNAMIC) {
5850
                gen_op_set_cc_op(s->cc_op);
5851
                s->cc_op = CC_OP_DYNAMIC;
5852
            }
5853
            gen_jmp_im(pc_start - s->cs_base);
5854
            tcg_gen_helper_0_0(helper_sysexit);
5855
            gen_eob(s);
5856
        }
5857
        break;
5858
#ifdef TARGET_X86_64
5859
    case 0x105: /* syscall */
5860
        /* XXX: is it usable in real mode ? */
5861
        if (s->cc_op != CC_OP_DYNAMIC) {
5862
            gen_op_set_cc_op(s->cc_op);
5863
            s->cc_op = CC_OP_DYNAMIC;
5864
        }
5865
        gen_jmp_im(pc_start - s->cs_base);
5866
        tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
5867
        gen_eob(s);
5868
        break;
5869
    case 0x107: /* sysret */
5870
        if (!s->pe) {
5871
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5872
        } else {
5873
            if (s->cc_op != CC_OP_DYNAMIC) {
5874
                gen_op_set_cc_op(s->cc_op);
5875
                s->cc_op = CC_OP_DYNAMIC;
5876
            }
5877
            gen_jmp_im(pc_start - s->cs_base);
5878
            tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
5879
            /* condition codes are modified only in long mode */
5880
            if (s->lma)
5881
                s->cc_op = CC_OP_EFLAGS;
5882
            gen_eob(s);
5883
        }
5884
        break;
5885
#endif
5886
    case 0x1a2: /* cpuid */
5887
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
5888
            break;
5889
        tcg_gen_helper_0_0(helper_cpuid);
5890
        break;
5891
    case 0xf4: /* hlt */
5892
        if (s->cpl != 0) {
5893
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5894
        } else {
5895
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
5896
                break;
5897
            if (s->cc_op != CC_OP_DYNAMIC)
5898
                gen_op_set_cc_op(s->cc_op);
5899
            gen_jmp_im(s->pc - s->cs_base);
5900
            tcg_gen_helper_0_0(helper_hlt);
5901
            s->is_jmp = 3;
5902
        }
5903
        break;
5904
    case 0x100:
5905
        modrm = ldub_code(s->pc++);
5906
        mod = (modrm >> 6) & 3;
5907
        op = (modrm >> 3) & 7;
5908
        switch(op) {
5909
        case 0: /* sldt */
5910
            if (!s->pe || s->vm86)
5911
                goto illegal_op;
5912
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
5913
                break;
5914
            gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5915
            ot = OT_WORD;
5916
            if (mod == 3)
5917
                ot += s->dflag;
5918
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5919
            break;
5920
        case 2: /* lldt */
5921
            if (!s->pe || s->vm86)
5922
                goto illegal_op;
5923
            if (s->cpl != 0) {
5924
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5925
            } else {
5926
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
5927
                    break;
5928
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5929
                gen_jmp_im(pc_start - s->cs_base);
5930
                tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
5931
                tcg_gen_helper_0_1(helper_lldt, cpu_tmp2);
5932
            }
5933
            break;
5934
        case 1: /* str */
5935
            if (!s->pe || s->vm86)
5936
                goto illegal_op;
5937
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
5938
                break;
5939
            gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5940
            ot = OT_WORD;
5941
            if (mod == 3)
5942
                ot += s->dflag;
5943
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5944
            break;
5945
        case 3: /* ltr */
5946
            if (!s->pe || s->vm86)
5947
                goto illegal_op;
5948
            if (s->cpl != 0) {
5949
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5950
            } else {
5951
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
5952
                    break;
5953
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5954
                gen_jmp_im(pc_start - s->cs_base);
5955
                tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
5956
                tcg_gen_helper_0_1(helper_ltr, cpu_tmp2);
5957
            }
5958
            break;
5959
        case 4: /* verr */
5960
        case 5: /* verw */
5961
            if (!s->pe || s->vm86)
5962
                goto illegal_op;
5963
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5964
            if (s->cc_op != CC_OP_DYNAMIC)
5965
                gen_op_set_cc_op(s->cc_op);
5966
            if (op == 4)
5967
                gen_op_verr();
5968
            else
5969
                gen_op_verw();
5970
            s->cc_op = CC_OP_EFLAGS;
5971
            break;
5972
        default:
5973
            goto illegal_op;
5974
        }
5975
        break;
5976
    case 0x101:
5977
        modrm = ldub_code(s->pc++);
5978
        mod = (modrm >> 6) & 3;
5979
        op = (modrm >> 3) & 7;
5980
        rm = modrm & 7;
5981
        switch(op) {
5982
        case 0: /* sgdt */
5983
            if (mod == 3)
5984
                goto illegal_op;
5985
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
5986
                break;
5987
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5988
            gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5989
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
5990
            gen_add_A0_im(s, 2);
5991
            gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5992
            if (!s->dflag)
5993
                gen_op_andl_T0_im(0xffffff);
5994
            gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5995
            break;
5996
        case 1:
5997
            if (mod == 3) {
5998
                switch (rm) {
5999
                case 0: /* monitor */
6000
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6001
                        s->cpl != 0)
6002
                        goto illegal_op;
6003
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
6004
                        break;
6005
                    gen_jmp_im(pc_start - s->cs_base);
6006
#ifdef TARGET_X86_64
6007
                    if (s->aflag == 2) {
6008
                        gen_op_movq_A0_reg(R_EBX);
6009
                        gen_op_addq_A0_AL();
6010
                    } else
6011
#endif
6012
                    {
6013
                        gen_op_movl_A0_reg(R_EBX);
6014
                        gen_op_addl_A0_AL();
6015
                        if (s->aflag == 0)
6016
                            gen_op_andl_A0_ffff();
6017
                    }
6018
                    gen_add_A0_ds_seg(s);
6019
                    tcg_gen_helper_0_1(helper_monitor, cpu_A0);
6020
                    break;
6021
                case 1: /* mwait */
6022
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6023
                        s->cpl != 0)
6024
                        goto illegal_op;
6025
                    if (s->cc_op != CC_OP_DYNAMIC) {
6026
                        gen_op_set_cc_op(s->cc_op);
6027
                        s->cc_op = CC_OP_DYNAMIC;
6028
                    }
6029
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
6030
                        break;
6031
                    gen_jmp_im(s->pc - s->cs_base);
6032
                    tcg_gen_helper_0_0(helper_mwait);
6033
                    gen_eob(s);
6034
                    break;
6035
                default:
6036
                    goto illegal_op;
6037
                }
6038
            } else { /* sidt */
6039
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
6040
                    break;
6041
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6042
                gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
6043
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
6044
                gen_add_A0_im(s, 2);
6045
                gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
6046
                if (!s->dflag)
6047
                    gen_op_andl_T0_im(0xffffff);
6048
                gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6049
            }
6050
            break;
6051
        case 2: /* lgdt */
6052
        case 3: /* lidt */
6053
            if (mod == 3) {
6054
                switch(rm) {
6055
                case 0: /* VMRUN */
6056
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
6057
                        break;
6058
                    if (s->cc_op != CC_OP_DYNAMIC)
6059
                        gen_op_set_cc_op(s->cc_op);
6060
                    gen_jmp_im(s->pc - s->cs_base);
6061
                    tcg_gen_helper_0_0(helper_vmrun);
6062
                    s->cc_op = CC_OP_EFLAGS;
6063
                    gen_eob(s);
6064
                    break;
6065
                case 1: /* VMMCALL */
6066
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
6067
                         break;
6068
                    /* FIXME: cause #UD if hflags & SVM */
6069
                    tcg_gen_helper_0_0(helper_vmmcall);
6070
                    break;
6071
                case 2: /* VMLOAD */
6072
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
6073
                         break;
6074
                    tcg_gen_helper_0_0(helper_vmload);
6075
                    break;
6076
                case 3: /* VMSAVE */
6077
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
6078
                         break;
6079
                    tcg_gen_helper_0_0(helper_vmsave);
6080
                    break;
6081
                case 4: /* STGI */
6082
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
6083
                         break;
6084
                    tcg_gen_helper_0_0(helper_stgi);
6085
                    break;
6086
                case 5: /* CLGI */
6087
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
6088
                         break;
6089
                    tcg_gen_helper_0_0(helper_clgi);
6090
                    break;
6091
                case 6: /* SKINIT */
6092
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
6093
                         break;
6094
                    tcg_gen_helper_0_0(helper_skinit);
6095
                    break;
6096
                case 7: /* INVLPGA */
6097
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
6098
                         break;
6099
                    tcg_gen_helper_0_0(helper_invlpga);
6100
                    break;
6101
                default:
6102
                    goto illegal_op;
6103
                }
6104
            } else if (s->cpl != 0) {
6105
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6106
            } else {
6107
                if (gen_svm_check_intercept(s, pc_start,
6108
                                            op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
6109
                    break;
6110
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6111
                gen_op_ld_T1_A0(OT_WORD + s->mem_index);
6112
                gen_add_A0_im(s, 2);
6113
                gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6114
                if (!s->dflag)
6115
                    gen_op_andl_T0_im(0xffffff);
6116
                if (op == 2) {
6117
                    gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
6118
                    gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
6119
                } else {
6120
                    gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
6121
                    gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
6122
                }
6123
            }
6124
            break;
6125
        case 4: /* smsw */
6126
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
6127
                break;
6128
            gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
6129
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6130
            break;
6131
        case 6: /* lmsw */
6132
            if (s->cpl != 0) {
6133
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6134
            } else {
6135
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
6136
                    break;
6137
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6138
                tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
6139
                gen_jmp_im(s->pc - s->cs_base);
6140
                gen_eob(s);
6141
            }
6142
            break;
6143
        case 7: /* invlpg */
6144
            if (s->cpl != 0) {
6145
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6146
            } else {
6147
                if (mod == 3) {
6148
#ifdef TARGET_X86_64
6149
                    if (CODE64(s) && rm == 0) {
6150
                        /* swapgs */
6151
                        gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
6152
                        gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
6153
                        gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
6154
                        gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
6155
                    } else
6156
#endif
6157
                    {
6158
                        goto illegal_op;
6159
                    }
6160
                } else {
6161
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
6162
                        break;
6163
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6164
                    tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
6165
                    gen_jmp_im(s->pc - s->cs_base);
6166
                    gen_eob(s);
6167
                }
6168
            }
6169
            break;
6170
        default:
6171
            goto illegal_op;
6172
        }
6173
        break;
6174
    case 0x108: /* invd */
6175
    case 0x109: /* wbinvd */
6176
        if (s->cpl != 0) {
6177
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6178
        } else {
6179
            if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6180
                break;
6181
            /* nothing to do */
6182
        }
6183
        break;
6184
    case 0x63: /* arpl or movslS (x86_64) */
6185
#ifdef TARGET_X86_64
6186
        if (CODE64(s)) {
6187
            int d_ot;
6188
            /* d_ot is the size of destination */
6189
            d_ot = dflag + OT_WORD;
6190

    
6191
            modrm = ldub_code(s->pc++);
6192
            reg = ((modrm >> 3) & 7) | rex_r;
6193
            mod = (modrm >> 6) & 3;
6194
            rm = (modrm & 7) | REX_B(s);
6195

    
6196
            if (mod == 3) {
6197
                gen_op_mov_TN_reg(OT_LONG, 0, rm);
6198
                /* sign extend */
6199
                if (d_ot == OT_QUAD)
6200
                    gen_op_movslq_T0_T0();
6201
                gen_op_mov_reg_T0(d_ot, reg);
6202
            } else {
6203
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6204
                if (d_ot == OT_QUAD) {
6205
                    gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6206
                } else {
6207
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6208
                }
6209
                gen_op_mov_reg_T0(d_ot, reg);
6210
            }
6211
        } else
6212
#endif
6213
        {
6214
            if (!s->pe || s->vm86)
6215
                goto illegal_op;
6216
            ot = dflag ? OT_LONG : OT_WORD;
6217
            modrm = ldub_code(s->pc++);
6218
            reg = (modrm >> 3) & 7;
6219
            mod = (modrm >> 6) & 3;
6220
            rm = modrm & 7;
6221
            if (mod != 3) {
6222
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6223
                gen_op_ld_T0_A0(ot + s->mem_index);
6224
            } else {
6225
                gen_op_mov_TN_reg(ot, 0, rm);
6226
            }
6227
            gen_op_mov_TN_reg(ot, 1, reg);
6228
            if (s->cc_op != CC_OP_DYNAMIC)
6229
                gen_op_set_cc_op(s->cc_op);
6230
            gen_op_arpl();
6231
            s->cc_op = CC_OP_EFLAGS;
6232
            if (mod != 3) {
6233
                gen_op_st_T0_A0(ot + s->mem_index);
6234
            } else {
6235
                gen_op_mov_reg_T0(ot, rm);
6236
            }
6237
            gen_op_arpl_update();
6238
        }
6239
        break;
6240
    case 0x102: /* lar */
6241
    case 0x103: /* lsl */
6242
        if (!s->pe || s->vm86)
6243
            goto illegal_op;
6244
        ot = dflag ? OT_LONG : OT_WORD;
6245
        modrm = ldub_code(s->pc++);
6246
        reg = ((modrm >> 3) & 7) | rex_r;
6247
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6248
        gen_op_mov_TN_reg(ot, 1, reg);
6249
        if (s->cc_op != CC_OP_DYNAMIC)
6250
            gen_op_set_cc_op(s->cc_op);
6251
        if (b == 0x102)
6252
            gen_op_lar();
6253
        else
6254
            gen_op_lsl();
6255
        s->cc_op = CC_OP_EFLAGS;
6256
        gen_op_mov_reg_T1(ot, reg);
6257
        break;
6258
    case 0x118:
6259
        modrm = ldub_code(s->pc++);
6260
        mod = (modrm >> 6) & 3;
6261
        op = (modrm >> 3) & 7;
6262
        switch(op) {
6263
        case 0: /* prefetchnta */
6264
        case 1: /* prefetchnt0 */
6265
        case 2: /* prefetchnt0 */
6266
        case 3: /* prefetchnt0 */
6267
            if (mod == 3)
6268
                goto illegal_op;
6269
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6270
            /* nothing more to do */
6271
            break;
6272
        default: /* nop (multi byte) */
6273
            gen_nop_modrm(s, modrm);
6274
            break;
6275
        }
6276
        break;
6277
    case 0x119 ... 0x11f: /* nop (multi byte) */
6278
        modrm = ldub_code(s->pc++);
6279
        gen_nop_modrm(s, modrm);
6280
        break;
6281
    case 0x120: /* mov reg, crN */
6282
    case 0x122: /* mov crN, reg */
6283
        if (s->cpl != 0) {
6284
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6285
        } else {
6286
            modrm = ldub_code(s->pc++);
6287
            if ((modrm & 0xc0) != 0xc0)
6288
                goto illegal_op;
6289
            rm = (modrm & 7) | REX_B(s);
6290
            reg = ((modrm >> 3) & 7) | rex_r;
6291
            if (CODE64(s))
6292
                ot = OT_QUAD;
6293
            else
6294
                ot = OT_LONG;
6295
            switch(reg) {
6296
            case 0:
6297
            case 2:
6298
            case 3:
6299
            case 4:
6300
            case 8:
6301
                if (b & 2) {
6302
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6303
                    gen_op_mov_TN_reg(ot, 0, rm);
6304
                    tcg_gen_helper_0_2(helper_movl_crN_T0, 
6305
                                       tcg_const_i32(reg), cpu_T[0]);
6306
                    gen_jmp_im(s->pc - s->cs_base);
6307
                    gen_eob(s);
6308
                } else {
6309
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6310
#if !defined(CONFIG_USER_ONLY)
6311
                    if (reg == 8)
6312
                        tcg_gen_helper_1_0(helper_movtl_T0_cr8, cpu_T[0]);
6313
                    else
6314
#endif
6315
                        gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6316
                    gen_op_mov_reg_T0(ot, rm);
6317
                }
6318
                break;
6319
            default:
6320
                goto illegal_op;
6321
            }
6322
        }
6323
        break;
6324
    case 0x121: /* mov reg, drN */
6325
    case 0x123: /* mov drN, reg */
6326
        if (s->cpl != 0) {
6327
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6328
        } else {
6329
            modrm = ldub_code(s->pc++);
6330
            if ((modrm & 0xc0) != 0xc0)
6331
                goto illegal_op;
6332
            rm = (modrm & 7) | REX_B(s);
6333
            reg = ((modrm >> 3) & 7) | rex_r;
6334
            if (CODE64(s))
6335
                ot = OT_QUAD;
6336
            else
6337
                ot = OT_LONG;
6338
            /* XXX: do it dynamically with CR4.DE bit */
6339
            if (reg == 4 || reg == 5 || reg >= 8)
6340
                goto illegal_op;
6341
            if (b & 2) {
6342
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6343
                gen_op_mov_TN_reg(ot, 0, rm);
6344
                tcg_gen_helper_0_2(helper_movl_drN_T0,
6345
                                   tcg_const_i32(reg), cpu_T[0]);
6346
                gen_jmp_im(s->pc - s->cs_base);
6347
                gen_eob(s);
6348
            } else {
6349
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6350
                gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6351
                gen_op_mov_reg_T0(ot, rm);
6352
            }
6353
        }
6354
        break;
6355
    case 0x106: /* clts */
6356
        if (s->cpl != 0) {
6357
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6358
        } else {
6359
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6360
            tcg_gen_helper_0_0(helper_clts);
6361
            /* abort block because static cpu state changed */
6362
            gen_jmp_im(s->pc - s->cs_base);
6363
            gen_eob(s);
6364
        }
6365
        break;
6366
    /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6367
    case 0x1c3: /* MOVNTI reg, mem */
6368
        if (!(s->cpuid_features & CPUID_SSE2))
6369
            goto illegal_op;
6370
        ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6371
        modrm = ldub_code(s->pc++);
6372
        mod = (modrm >> 6) & 3;
6373
        if (mod == 3)
6374
            goto illegal_op;
6375
        reg = ((modrm >> 3) & 7) | rex_r;
6376
        /* generate a generic store */
6377
        gen_ldst_modrm(s, modrm, ot, reg, 1);
6378
        break;
6379
    case 0x1ae:
6380
        modrm = ldub_code(s->pc++);
6381
        mod = (modrm >> 6) & 3;
6382
        op = (modrm >> 3) & 7;
6383
        switch(op) {
6384
        case 0: /* fxsave */
6385
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6386
                (s->flags & HF_EM_MASK))
6387
                goto illegal_op;
6388
            if (s->flags & HF_TS_MASK) {
6389
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6390
                break;
6391
            }
6392
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6393
            if (s->cc_op != CC_OP_DYNAMIC)
6394
                gen_op_set_cc_op(s->cc_op);
6395
            gen_jmp_im(pc_start - s->cs_base);
6396
            tcg_gen_helper_0_2(helper_fxsave, 
6397
                               cpu_A0, tcg_const_i32((s->dflag == 2)));
6398
            break;
6399
        case 1: /* fxrstor */
6400
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6401
                (s->flags & HF_EM_MASK))
6402
                goto illegal_op;
6403
            if (s->flags & HF_TS_MASK) {
6404
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6405
                break;
6406
            }
6407
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6408
            if (s->cc_op != CC_OP_DYNAMIC)
6409
                gen_op_set_cc_op(s->cc_op);
6410
            gen_jmp_im(pc_start - s->cs_base);
6411
            tcg_gen_helper_0_2(helper_fxrstor,
6412
                               cpu_A0, tcg_const_i32((s->dflag == 2)));
6413
            break;
6414
        case 2: /* ldmxcsr */
6415
        case 3: /* stmxcsr */
6416
            if (s->flags & HF_TS_MASK) {
6417
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6418
                break;
6419
            }
6420
            if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6421
                mod == 3)
6422
                goto illegal_op;
6423
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6424
            if (op == 2) {
6425
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6426
                gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6427
            } else {
6428
                gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6429
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
6430
            }
6431
            break;
6432
        case 5: /* lfence */
6433
        case 6: /* mfence */
6434
            if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6435
                goto illegal_op;
6436
            break;
6437
        case 7: /* sfence / clflush */
6438
            if ((modrm & 0xc7) == 0xc0) {
6439
                /* sfence */
6440
                /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6441
                if (!(s->cpuid_features & CPUID_SSE))
6442
                    goto illegal_op;
6443
            } else {
6444
                /* clflush */
6445
                if (!(s->cpuid_features & CPUID_CLFLUSH))
6446
                    goto illegal_op;
6447
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6448
            }
6449
            break;
6450
        default:
6451
            goto illegal_op;
6452
        }
6453
        break;
6454
    case 0x10d: /* 3DNow! prefetch(w) */
6455
        modrm = ldub_code(s->pc++);
6456
        mod = (modrm >> 6) & 3;
6457
        if (mod == 3)
6458
            goto illegal_op;
6459
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6460
        /* ignore for now */
6461
        break;
6462
    case 0x1aa: /* rsm */
6463
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6464
            break;
6465
        if (!(s->flags & HF_SMM_MASK))
6466
            goto illegal_op;
6467
        if (s->cc_op != CC_OP_DYNAMIC) {
6468
            gen_op_set_cc_op(s->cc_op);
6469
            s->cc_op = CC_OP_DYNAMIC;
6470
        }
6471
        gen_jmp_im(s->pc - s->cs_base);
6472
        tcg_gen_helper_0_0(helper_rsm);
6473
        gen_eob(s);
6474
        break;
6475
    case 0x10e ... 0x10f:
6476
        /* 3DNow! instructions, ignore prefixes */
6477
        s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
6478
    case 0x110 ... 0x117:
6479
    case 0x128 ... 0x12f:
6480
    case 0x150 ... 0x177:
6481
    case 0x17c ... 0x17f:
6482
    case 0x1c2:
6483
    case 0x1c4 ... 0x1c6:
6484
    case 0x1d0 ... 0x1fe:
6485
        gen_sse(s, b, pc_start, rex_r);
6486
        break;
6487
    default:
6488
        goto illegal_op;
6489
    }
6490
    /* lock generation */
6491
    if (s->prefix & PREFIX_LOCK)
6492
        tcg_gen_helper_0_0(helper_unlock);
6493
    return s->pc;
6494
 illegal_op:
6495
    if (s->prefix & PREFIX_LOCK)
6496
        tcg_gen_helper_0_0(helper_unlock);
6497
    /* XXX: ensure that no lock was generated */
6498
    gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6499
    return s->pc;
6500
}
6501

    
6502
#define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6503
#define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6504

    
6505
/* flags read by an operation */
6506
static uint16_t opc_read_flags[NB_OPS] = {
6507
    [INDEX_op_aas] = CC_A,
6508
    [INDEX_op_aaa] = CC_A,
6509
    [INDEX_op_das] = CC_A | CC_C,
6510
    [INDEX_op_daa] = CC_A | CC_C,
6511

    
6512
    /* subtle: due to the incl/decl implementation, C is used */
6513
    [INDEX_op_update_inc_cc] = CC_C,
6514

    
6515
    [INDEX_op_into] = CC_O,
6516

    
6517
    [INDEX_op_jb_subb] = CC_C,
6518
    [INDEX_op_jb_subw] = CC_C,
6519
    [INDEX_op_jb_subl] = CC_C,
6520

    
6521
    [INDEX_op_jz_subb] = CC_Z,
6522
    [INDEX_op_jz_subw] = CC_Z,
6523
    [INDEX_op_jz_subl] = CC_Z,
6524

    
6525
    [INDEX_op_jbe_subb] = CC_Z | CC_C,
6526
    [INDEX_op_jbe_subw] = CC_Z | CC_C,
6527
    [INDEX_op_jbe_subl] = CC_Z | CC_C,
6528

    
6529
    [INDEX_op_js_subb] = CC_S,
6530
    [INDEX_op_js_subw] = CC_S,
6531
    [INDEX_op_js_subl] = CC_S,
6532

    
6533
    [INDEX_op_jl_subb] = CC_O | CC_S,
6534
    [INDEX_op_jl_subw] = CC_O | CC_S,
6535
    [INDEX_op_jl_subl] = CC_O | CC_S,
6536

    
6537
    [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6538
    [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6539
    [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6540

    
6541
    [INDEX_op_loopnzw] = CC_Z,
6542
    [INDEX_op_loopnzl] = CC_Z,
6543
    [INDEX_op_loopzw] = CC_Z,
6544
    [INDEX_op_loopzl] = CC_Z,
6545

    
6546
    [INDEX_op_seto_T0_cc] = CC_O,
6547
    [INDEX_op_setb_T0_cc] = CC_C,
6548
    [INDEX_op_setz_T0_cc] = CC_Z,
6549
    [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6550
    [INDEX_op_sets_T0_cc] = CC_S,
6551
    [INDEX_op_setp_T0_cc] = CC_P,
6552
    [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6553
    [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6554

    
6555
    [INDEX_op_setb_T0_subb] = CC_C,
6556
    [INDEX_op_setb_T0_subw] = CC_C,
6557
    [INDEX_op_setb_T0_subl] = CC_C,
6558

    
6559
    [INDEX_op_setz_T0_subb] = CC_Z,
6560
    [INDEX_op_setz_T0_subw] = CC_Z,
6561
    [INDEX_op_setz_T0_subl] = CC_Z,
6562

    
6563
    [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6564
    [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6565
    [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6566

    
6567
    [INDEX_op_sets_T0_subb] = CC_S,
6568
    [INDEX_op_sets_T0_subw] = CC_S,
6569
    [INDEX_op_sets_T0_subl] = CC_S,
6570

    
6571
    [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6572
    [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6573
    [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6574

    
6575
    [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6576
    [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6577
    [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6578

    
6579
    [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6580
    [INDEX_op_cmc] = CC_C,
6581
    [INDEX_op_salc] = CC_C,
6582

    
6583
    /* needed for correct flag optimisation before string ops */
6584
    [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6585
    [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6586
    [INDEX_op_jz_ecxw] = CC_OSZAPC,
6587
    [INDEX_op_jz_ecxl] = CC_OSZAPC,
6588

    
6589
#ifdef TARGET_X86_64
6590
    [INDEX_op_jb_subq] = CC_C,
6591
    [INDEX_op_jz_subq] = CC_Z,
6592
    [INDEX_op_jbe_subq] = CC_Z | CC_C,
6593
    [INDEX_op_js_subq] = CC_S,
6594
    [INDEX_op_jl_subq] = CC_O | CC_S,
6595
    [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6596

    
6597
    [INDEX_op_loopnzq] = CC_Z,
6598
    [INDEX_op_loopzq] = CC_Z,
6599

    
6600
    [INDEX_op_setb_T0_subq] = CC_C,
6601
    [INDEX_op_setz_T0_subq] = CC_Z,
6602
    [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6603
    [INDEX_op_sets_T0_subq] = CC_S,
6604
    [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6605
    [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6606

    
6607
    [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6608
    [INDEX_op_jz_ecxq] = CC_OSZAPC,
6609
#endif
6610

    
6611
#define DEF_READF(SUFFIX)\
6612
    [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6613
    [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6614
    [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6615
    X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6616
    [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6617
    [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6618
    [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6619
    X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6620
\
6621
    [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6622
    [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6623
    [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6624
    X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6625
    [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6626
    [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6627
    [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6628
    X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6629

    
6630
    DEF_READF( )
6631
    DEF_READF(_raw)
6632
#ifndef CONFIG_USER_ONLY
6633
    DEF_READF(_kernel)
6634
    DEF_READF(_user)
6635
#endif
6636
};
6637

    
6638
/* flags written by an operation */
6639
static uint16_t opc_write_flags[NB_OPS] = {
6640
    [INDEX_op_update2_cc] = CC_OSZAPC,
6641
    [INDEX_op_update1_cc] = CC_OSZAPC,
6642
    [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6643
    [INDEX_op_update_neg_cc] = CC_OSZAPC,
6644
    /* subtle: due to the incl/decl implementation, C is used */
6645
    [INDEX_op_update_inc_cc] = CC_OSZAPC,
6646
    [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6647

    
6648
    [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6649
    [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6650
    [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6651
    X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6652
    [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6653
    [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6654
    [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6655
    X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6656
    [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6657
    [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6658
    X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6659

    
6660
    /* sse */
6661
    [INDEX_op_com_dummy] = CC_OSZAPC,
6662
    [INDEX_op_com_dummy] = CC_OSZAPC,
6663
    [INDEX_op_com_dummy] = CC_OSZAPC,
6664
    [INDEX_op_com_dummy] = CC_OSZAPC,
6665

    
6666
    /* bcd */
6667
    [INDEX_op_aam] = CC_OSZAPC,
6668
    [INDEX_op_aad] = CC_OSZAPC,
6669
    [INDEX_op_aas] = CC_OSZAPC,
6670
    [INDEX_op_aaa] = CC_OSZAPC,
6671
    [INDEX_op_das] = CC_OSZAPC,
6672
    [INDEX_op_daa] = CC_OSZAPC,
6673

    
6674
    [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6675
    [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6676
    [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6677
    [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6678
    [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6679
    [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6680
    [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6681
    [INDEX_op_clc] = CC_C,
6682
    [INDEX_op_stc] = CC_C,
6683
    [INDEX_op_cmc] = CC_C,
6684

    
6685
    [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6686
    [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6687
    X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6688
    [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6689
    [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6690
    X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6691
    [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6692
    [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6693
    X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6694
    [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6695
    [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6696
    X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6697

    
6698
    [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6699
    [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6700
    X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6701
    [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6702
    [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6703
    X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6704

    
6705
    [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6706
    [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6707
    [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6708
    X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6709

    
6710
    [INDEX_op_cmpxchg8b] = CC_Z,
6711
    [INDEX_op_lar] = CC_Z,
6712
    [INDEX_op_lsl] = CC_Z,
6713
    [INDEX_op_verr] = CC_Z,
6714
    [INDEX_op_verw] = CC_Z,
6715
    [INDEX_op_fcomi_dummy] = CC_Z | CC_P | CC_C,
6716
    [INDEX_op_fcomi_dummy] = CC_Z | CC_P | CC_C,
6717

    
6718
#define DEF_WRITEF(SUFFIX)\
6719
    [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6720
    [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6721
    [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6722
    X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6723
    [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6724
    [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6725
    [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6726
    X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6727
\
6728
    [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6729
    [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6730
    [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6731
    X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6732
    [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6733
    [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6734
    [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6735
    X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6736
\
6737
    [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6738
    [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6739
    [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6740
    X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6741
    [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6742
    [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6743
    [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6744
    X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6745
\
6746
    [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6747
    [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6748
    [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6749
    X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6750
\
6751
    [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6752
    [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6753
    [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6754
    X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6755
\
6756
    [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6757
    [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6758
    [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6759
    X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6760
\
6761
    [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6762
    [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6763
    X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6764
    [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6765
    [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6766
    X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6767
\
6768
    [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6769
    [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6770
    X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6771
    [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6772
    [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6773
    X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6774
\
6775
    [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6776
    [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6777
    [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6778
    X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6779

    
6780

    
6781
    DEF_WRITEF( )
6782
    DEF_WRITEF(_raw)
6783
#ifndef CONFIG_USER_ONLY
6784
    DEF_WRITEF(_kernel)
6785
    DEF_WRITEF(_user)
6786
#endif
6787
};
6788

    
6789
/* simpler form of an operation if no flags need to be generated */
6790
static uint16_t opc_simpler[NB_OPS] = {
6791
    [INDEX_op_update2_cc] = INDEX_op_nop,
6792
    [INDEX_op_update1_cc] = INDEX_op_nop,
6793
    [INDEX_op_update_neg_cc] = INDEX_op_nop,
6794
#if 0
6795
    /* broken: CC_OP logic must be rewritten */
6796
    [INDEX_op_update_inc_cc] = INDEX_op_nop,
6797
#endif
6798

    
6799
    [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6800
    [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6801
    [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6802
    X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6803

    
6804
    [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6805
    [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6806
    [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6807
    X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6808

    
6809
    [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6810
    [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6811
    [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6812
    X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6813

    
6814
#define DEF_SIMPLER(SUFFIX)\
6815
    [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6816
    [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6817
    [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6818
    X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6819
\
6820
    [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6821
    [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6822
    [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6823
    X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6824

    
6825
    DEF_SIMPLER( )
6826
    DEF_SIMPLER(_raw)
6827
#ifndef CONFIG_USER_ONLY
6828
    DEF_SIMPLER(_kernel)
6829
    DEF_SIMPLER(_user)
6830
#endif
6831
};
6832

    
6833
static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
6834
{
6835
    switch(macro_id) {
6836
#ifdef MACRO_TEST
6837
    case MACRO_TEST:
6838
        tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
6839
        break;
6840
#endif
6841
    }
6842
}
6843

    
6844
void optimize_flags_init(void)
6845
{
6846
    int i;
6847
    /* put default values in arrays */
6848
    for(i = 0; i < NB_OPS; i++) {
6849
        if (opc_simpler[i] == 0)
6850
            opc_simpler[i] = i;
6851
    }
6852

    
6853
    tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
6854

    
6855
    cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
6856
#if TARGET_LONG_BITS > HOST_LONG_BITS
6857
    cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL, 
6858
                                  TCG_AREG0, offsetof(CPUState, t0), "T0");
6859
    cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
6860
                                  TCG_AREG0, offsetof(CPUState, t1), "T1");
6861
    cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
6862
                                TCG_AREG0, offsetof(CPUState, t2), "A0");
6863
#else
6864
    cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
6865
    cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
6866
    cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
6867
#endif
6868
#if defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS)
6869
    /* XXX: must be suppressed once there are less fixed registers */
6870
    cpu_tmp1 = tcg_global_reg2_new_hack(TCG_TYPE_I64, TCG_AREG1, TCG_AREG2, "tmp1");
6871
#endif
6872
}
6873

    
6874
/* CPU flags computation optimization: we move backward thru the
6875
   generated code to see which flags are needed. The operation is
6876
   modified if suitable */
6877
static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6878
{
6879
    uint16_t *opc_ptr;
6880
    int live_flags, write_flags, op;
6881

    
6882
    opc_ptr = opc_buf + opc_buf_len;
6883
    /* live_flags contains the flags needed by the next instructions
6884
       in the code. At the end of the block, we consider that all the
6885
       flags are live. */
6886
    live_flags = CC_OSZAPC;
6887
    while (opc_ptr > opc_buf) {
6888
        op = *--opc_ptr;
6889
        /* if none of the flags written by the instruction is used,
6890
           then we can try to find a simpler instruction */
6891
        write_flags = opc_write_flags[op];
6892
        if ((live_flags & write_flags) == 0) {
6893
            *opc_ptr = opc_simpler[op];
6894
        }
6895
        /* compute the live flags before the instruction */
6896
        live_flags &= ~write_flags;
6897
        live_flags |= opc_read_flags[op];
6898
    }
6899
}
6900

    
6901
/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6902
   basic block 'tb'. If search_pc is TRUE, also generate PC
6903
   information for each intermediate instruction. */
6904
static inline int gen_intermediate_code_internal(CPUState *env,
6905
                                                 TranslationBlock *tb,
6906
                                                 int search_pc)
6907
{
6908
    DisasContext dc1, *dc = &dc1;
6909
    target_ulong pc_ptr;
6910
    uint16_t *gen_opc_end;
6911
    int j, lj, cflags;
6912
    uint64_t flags;
6913
    target_ulong pc_start;
6914
    target_ulong cs_base;
6915

    
6916
    /* generate intermediate code */
6917
    pc_start = tb->pc;
6918
    cs_base = tb->cs_base;
6919
    flags = tb->flags;
6920
    cflags = tb->cflags;
6921

    
6922
    dc->pe = (flags >> HF_PE_SHIFT) & 1;
6923
    dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6924
    dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6925
    dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6926
    dc->f_st = 0;
6927
    dc->vm86 = (flags >> VM_SHIFT) & 1;
6928
    dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6929
    dc->iopl = (flags >> IOPL_SHIFT) & 3;
6930
    dc->tf = (flags >> TF_SHIFT) & 1;
6931
    dc->singlestep_enabled = env->singlestep_enabled;
6932
    dc->cc_op = CC_OP_DYNAMIC;
6933
    dc->cs_base = cs_base;
6934
    dc->tb = tb;
6935
    dc->popl_esp_hack = 0;
6936
    /* select memory access functions */
6937
    dc->mem_index = 0;
6938
    if (flags & HF_SOFTMMU_MASK) {
6939
        if (dc->cpl == 3)
6940
            dc->mem_index = 2 * 4;
6941
        else
6942
            dc->mem_index = 1 * 4;
6943
    }
6944
    dc->cpuid_features = env->cpuid_features;
6945
    dc->cpuid_ext_features = env->cpuid_ext_features;
6946
    dc->cpuid_ext2_features = env->cpuid_ext2_features;
6947
#ifdef TARGET_X86_64
6948
    dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6949
    dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6950
#endif
6951
    dc->flags = flags;
6952
    dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6953
                    (flags & HF_INHIBIT_IRQ_MASK)
6954
#ifndef CONFIG_SOFTMMU
6955
                    || (flags & HF_SOFTMMU_MASK)
6956
#endif
6957
                    );
6958
#if 0
6959
    /* check addseg logic */
6960
    if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6961
        printf("ERROR addseg\n");
6962
#endif
6963

    
6964
    cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
6965
#if !(defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS))
6966
    cpu_tmp1 = tcg_temp_new(TCG_TYPE_I64);
6967
#endif
6968
    cpu_tmp2 = tcg_temp_new(TCG_TYPE_I32);
6969
    cpu_tmp3 = tcg_temp_new(TCG_TYPE_I32);
6970
    cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
6971
    cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
6972

    
6973
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6974

    
6975
    dc->is_jmp = DISAS_NEXT;
6976
    pc_ptr = pc_start;
6977
    lj = -1;
6978

    
6979
    for(;;) {
6980
        if (env->nb_breakpoints > 0) {
6981
            for(j = 0; j < env->nb_breakpoints; j++) {
6982
                if (env->breakpoints[j] == pc_ptr) {
6983
                    gen_debug(dc, pc_ptr - dc->cs_base);
6984
                    break;
6985
                }
6986
            }
6987
        }
6988
        if (search_pc) {
6989
            j = gen_opc_ptr - gen_opc_buf;
6990
            if (lj < j) {
6991
                lj++;
6992
                while (lj < j)
6993
                    gen_opc_instr_start[lj++] = 0;
6994
            }
6995
            gen_opc_pc[lj] = pc_ptr;
6996
            gen_opc_cc_op[lj] = dc->cc_op;
6997
            gen_opc_instr_start[lj] = 1;
6998
        }
6999
        pc_ptr = disas_insn(dc, pc_ptr);
7000
        /* stop translation if indicated */
7001
        if (dc->is_jmp)
7002
            break;
7003
        /* if single step mode, we generate only one instruction and
7004
           generate an exception */
7005
        /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
7006
           the flag and abort the translation to give the irqs a
7007
           change to be happen */
7008
        if (dc->tf || dc->singlestep_enabled ||
7009
            (flags & HF_INHIBIT_IRQ_MASK) ||
7010
            (cflags & CF_SINGLE_INSN)) {
7011
            gen_jmp_im(pc_ptr - dc->cs_base);
7012
            gen_eob(dc);
7013
            break;
7014
        }
7015
        /* if too long translation, stop generation too */
7016
        if (gen_opc_ptr >= gen_opc_end ||
7017
            (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
7018
            gen_jmp_im(pc_ptr - dc->cs_base);
7019
            gen_eob(dc);
7020
            break;
7021
        }
7022
    }
7023
    *gen_opc_ptr = INDEX_op_end;
7024
    /* we don't forget to fill the last values */
7025
    if (search_pc) {
7026
        j = gen_opc_ptr - gen_opc_buf;
7027
        lj++;
7028
        while (lj <= j)
7029
            gen_opc_instr_start[lj++] = 0;
7030
    }
7031

    
7032
#ifdef DEBUG_DISAS
7033
    if (loglevel & CPU_LOG_TB_CPU) {
7034
        cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
7035
    }
7036
    if (loglevel & CPU_LOG_TB_IN_ASM) {
7037
        int disas_flags;
7038
        fprintf(logfile, "----------------\n");
7039
        fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
7040
#ifdef TARGET_X86_64
7041
        if (dc->code64)
7042
            disas_flags = 2;
7043
        else
7044
#endif
7045
            disas_flags = !dc->code32;
7046
        target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
7047
        fprintf(logfile, "\n");
7048
        if (loglevel & CPU_LOG_TB_OP_OPT) {
7049
            fprintf(logfile, "OP before opt:\n");
7050
            tcg_dump_ops(&tcg_ctx, logfile);
7051
            fprintf(logfile, "\n");
7052
        }
7053
    }
7054
#endif
7055

    
7056
    /* optimize flag computations */
7057
    optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
7058

    
7059
    if (!search_pc)
7060
        tb->size = pc_ptr - pc_start;
7061
    return 0;
7062
}
7063

    
7064
int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
7065
{
7066
    return gen_intermediate_code_internal(env, tb, 0);
7067
}
7068

    
7069
int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
7070
{
7071
    return gen_intermediate_code_internal(env, tb, 1);
7072
}
7073

    
7074
void gen_pc_load(CPUState *env, TranslationBlock *tb,
7075
                unsigned long searched_pc, int pc_pos, void *puc)
7076
{
7077
    int cc_op;
7078
#ifdef DEBUG_DISAS
7079
    if (loglevel & CPU_LOG_TB_OP) {
7080
        int i;
7081
        fprintf(logfile, "RESTORE:\n");
7082
        for(i = 0;i <= pc_pos; i++) {
7083
            if (gen_opc_instr_start[i]) {
7084
                fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7085
            }
7086
        }
7087
        fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7088
                searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7089
                (uint32_t)tb->cs_base);
7090
    }
7091
#endif
7092
    env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7093
    cc_op = gen_opc_cc_op[pc_pos];
7094
    if (cc_op != CC_OP_DYNAMIC)
7095
        env->cc_op = cc_op;
7096
}