Statistics
| Branch: | Revision:

root / target-i386 / translate.c @ ad848875

History | View | Annotate | Download (206 kB)

1
/*
2
 *  i386 translation
3
 *
4
 *  Copyright (c) 2003 Fabrice Bellard
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20
#include <stdarg.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23
#include <string.h>
24
#include <inttypes.h>
25
#include <signal.h>
26
#include <assert.h>
27

    
28
#include "cpu.h"
29
#include "exec-all.h"
30
#include "disas.h"
31

    
32
/* XXX: move that elsewhere */
33
static uint16_t *gen_opc_ptr;
34
static uint32_t *gen_opparam_ptr;
35

    
36
#define PREFIX_REPZ   0x01
37
#define PREFIX_REPNZ  0x02
38
#define PREFIX_LOCK   0x04
39
#define PREFIX_DATA   0x08
40
#define PREFIX_ADR    0x10
41

    
42
#ifdef TARGET_X86_64
43
#define X86_64_ONLY(x) x
44
#define X86_64_DEF(x...) x
45
#define CODE64(s) ((s)->code64)
46
#define REX_X(s) ((s)->rex_x)
47
#define REX_B(s) ((s)->rex_b)
48
/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
49
#if 1
50
#define BUGGY_64(x) NULL
51
#endif
52
#else
53
#define X86_64_ONLY(x) NULL
54
#define X86_64_DEF(x...)
55
#define CODE64(s) 0
56
#define REX_X(s) 0
57
#define REX_B(s) 0
58
#endif
59

    
60
#ifdef TARGET_X86_64
61
static int x86_64_hregs;
62
#endif
63

    
64
#ifdef USE_DIRECT_JUMP
65
#define TBPARAM(x)
66
#else
67
#define TBPARAM(x) (long)(x)
68
#endif
69

    
70
typedef struct DisasContext {
71
    /* current insn context */
72
    int override; /* -1 if no override */
73
    int prefix;
74
    int aflag, dflag;
75
    target_ulong pc; /* pc = eip + cs_base */
76
    int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
77
                   static state change (stop translation) */
78
    /* current block context */
79
    target_ulong cs_base; /* base of CS segment */
80
    int pe;     /* protected mode */
81
    int code32; /* 32 bit code segment */
82
#ifdef TARGET_X86_64
83
    int lma;    /* long mode active */
84
    int code64; /* 64 bit code segment */
85
    int rex_x, rex_b;
86
#endif
87
    int ss32;   /* 32 bit stack segment */
88
    int cc_op;  /* current CC operation */
89
    int addseg; /* non zero if either DS/ES/SS have a non zero base */
90
    int f_st;   /* currently unused */
91
    int vm86;   /* vm86 mode */
92
    int cpl;
93
    int iopl;
94
    int tf;     /* TF cpu flag */
95
    int singlestep_enabled; /* "hardware" single step enabled */
96
    int jmp_opt; /* use direct block chaining for direct jumps */
97
    int mem_index; /* select memory access functions */
98
    uint64_t flags; /* all execution flags */
99
    struct TranslationBlock *tb;
100
    int popl_esp_hack; /* for correct popl with esp base handling */
101
    int rip_offset; /* only used in x86_64, but left for simplicity */
102
    int cpuid_features;
103
    int cpuid_ext_features;
104
} DisasContext;
105

    
106
static void gen_eob(DisasContext *s);
107
static void gen_jmp(DisasContext *s, target_ulong eip);
108
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
109

    
110
/* i386 arith/logic operations */
111
enum {
112
    OP_ADDL,
113
    OP_ORL,
114
    OP_ADCL,
115
    OP_SBBL,
116
    OP_ANDL,
117
    OP_SUBL,
118
    OP_XORL,
119
    OP_CMPL,
120
};
121

    
122
/* i386 shift ops */
123
enum {
124
    OP_ROL,
125
    OP_ROR,
126
    OP_RCL,
127
    OP_RCR,
128
    OP_SHL,
129
    OP_SHR,
130
    OP_SHL1, /* undocumented */
131
    OP_SAR = 7,
132
};
133

    
134
enum {
135
#define DEF(s, n, copy_size) INDEX_op_ ## s,
136
#include "opc.h"
137
#undef DEF
138
    NB_OPS,
139
};
140

    
141
#include "gen-op.h"
142

    
143
/* operand size */
144
enum {
145
    OT_BYTE = 0,
146
    OT_WORD,
147
    OT_LONG,
148
    OT_QUAD,
149
};
150

    
151
enum {
152
    /* I386 int registers */
153
    OR_EAX,   /* MUST be even numbered */
154
    OR_ECX,
155
    OR_EDX,
156
    OR_EBX,
157
    OR_ESP,
158
    OR_EBP,
159
    OR_ESI,
160
    OR_EDI,
161

    
162
    OR_TMP0 = 16,    /* temporary operand register */
163
    OR_TMP1,
164
    OR_A0, /* temporary register used when doing address evaluation */
165
};
166

    
167
#ifdef TARGET_X86_64
168

    
169
#define NB_OP_SIZES 4
170

    
171
#define DEF_REGS(prefix, suffix) \
172
  prefix ## EAX ## suffix,\
173
  prefix ## ECX ## suffix,\
174
  prefix ## EDX ## suffix,\
175
  prefix ## EBX ## suffix,\
176
  prefix ## ESP ## suffix,\
177
  prefix ## EBP ## suffix,\
178
  prefix ## ESI ## suffix,\
179
  prefix ## EDI ## suffix,\
180
  prefix ## R8 ## suffix,\
181
  prefix ## R9 ## suffix,\
182
  prefix ## R10 ## suffix,\
183
  prefix ## R11 ## suffix,\
184
  prefix ## R12 ## suffix,\
185
  prefix ## R13 ## suffix,\
186
  prefix ## R14 ## suffix,\
187
  prefix ## R15 ## suffix,
188

    
189
#define DEF_BREGS(prefixb, prefixh, suffix)             \
190
                                                        \
191
static void prefixb ## ESP ## suffix ## _wrapper(void)  \
192
{                                                       \
193
    if (x86_64_hregs)                                 \
194
        prefixb ## ESP ## suffix ();                    \
195
    else                                                \
196
        prefixh ## EAX ## suffix ();                    \
197
}                                                       \
198
                                                        \
199
static void prefixb ## EBP ## suffix ## _wrapper(void)  \
200
{                                                       \
201
    if (x86_64_hregs)                                 \
202
        prefixb ## EBP ## suffix ();                    \
203
    else                                                \
204
        prefixh ## ECX ## suffix ();                    \
205
}                                                       \
206
                                                        \
207
static void prefixb ## ESI ## suffix ## _wrapper(void)  \
208
{                                                       \
209
    if (x86_64_hregs)                                 \
210
        prefixb ## ESI ## suffix ();                    \
211
    else                                                \
212
        prefixh ## EDX ## suffix ();                    \
213
}                                                       \
214
                                                        \
215
static void prefixb ## EDI ## suffix ## _wrapper(void)  \
216
{                                                       \
217
    if (x86_64_hregs)                                 \
218
        prefixb ## EDI ## suffix ();                    \
219
    else                                                \
220
        prefixh ## EBX ## suffix ();                    \
221
}
222

    
223
DEF_BREGS(gen_op_movb_, gen_op_movh_, _T0)
224
DEF_BREGS(gen_op_movb_, gen_op_movh_, _T1)
225
DEF_BREGS(gen_op_movl_T0_, gen_op_movh_T0_, )
226
DEF_BREGS(gen_op_movl_T1_, gen_op_movh_T1_, )
227

    
228
#else /* !TARGET_X86_64 */
229

    
230
#define NB_OP_SIZES 3
231

    
232
#define DEF_REGS(prefix, suffix) \
233
  prefix ## EAX ## suffix,\
234
  prefix ## ECX ## suffix,\
235
  prefix ## EDX ## suffix,\
236
  prefix ## EBX ## suffix,\
237
  prefix ## ESP ## suffix,\
238
  prefix ## EBP ## suffix,\
239
  prefix ## ESI ## suffix,\
240
  prefix ## EDI ## suffix,
241

    
242
#endif /* !TARGET_X86_64 */
243

    
244
static GenOpFunc *gen_op_mov_reg_T0[NB_OP_SIZES][CPU_NB_REGS] = {
245
    [OT_BYTE] = {
246
        gen_op_movb_EAX_T0,
247
        gen_op_movb_ECX_T0,
248
        gen_op_movb_EDX_T0,
249
        gen_op_movb_EBX_T0,
250
#ifdef TARGET_X86_64
251
        gen_op_movb_ESP_T0_wrapper,
252
        gen_op_movb_EBP_T0_wrapper,
253
        gen_op_movb_ESI_T0_wrapper,
254
        gen_op_movb_EDI_T0_wrapper,
255
        gen_op_movb_R8_T0,
256
        gen_op_movb_R9_T0,
257
        gen_op_movb_R10_T0,
258
        gen_op_movb_R11_T0,
259
        gen_op_movb_R12_T0,
260
        gen_op_movb_R13_T0,
261
        gen_op_movb_R14_T0,
262
        gen_op_movb_R15_T0,
263
#else
264
        gen_op_movh_EAX_T0,
265
        gen_op_movh_ECX_T0,
266
        gen_op_movh_EDX_T0,
267
        gen_op_movh_EBX_T0,
268
#endif
269
    },
270
    [OT_WORD] = {
271
        DEF_REGS(gen_op_movw_, _T0)
272
    },
273
    [OT_LONG] = {
274
        DEF_REGS(gen_op_movl_, _T0)
275
    },
276
#ifdef TARGET_X86_64
277
    [OT_QUAD] = {
278
        DEF_REGS(gen_op_movq_, _T0)
279
    },
280
#endif
281
};
282

    
283
static GenOpFunc *gen_op_mov_reg_T1[NB_OP_SIZES][CPU_NB_REGS] = {
284
    [OT_BYTE] = {
285
        gen_op_movb_EAX_T1,
286
        gen_op_movb_ECX_T1,
287
        gen_op_movb_EDX_T1,
288
        gen_op_movb_EBX_T1,
289
#ifdef TARGET_X86_64
290
        gen_op_movb_ESP_T1_wrapper,
291
        gen_op_movb_EBP_T1_wrapper,
292
        gen_op_movb_ESI_T1_wrapper,
293
        gen_op_movb_EDI_T1_wrapper,
294
        gen_op_movb_R8_T1,
295
        gen_op_movb_R9_T1,
296
        gen_op_movb_R10_T1,
297
        gen_op_movb_R11_T1,
298
        gen_op_movb_R12_T1,
299
        gen_op_movb_R13_T1,
300
        gen_op_movb_R14_T1,
301
        gen_op_movb_R15_T1,
302
#else
303
        gen_op_movh_EAX_T1,
304
        gen_op_movh_ECX_T1,
305
        gen_op_movh_EDX_T1,
306
        gen_op_movh_EBX_T1,
307
#endif
308
    },
309
    [OT_WORD] = {
310
        DEF_REGS(gen_op_movw_, _T1)
311
    },
312
    [OT_LONG] = {
313
        DEF_REGS(gen_op_movl_, _T1)
314
    },
315
#ifdef TARGET_X86_64
316
    [OT_QUAD] = {
317
        DEF_REGS(gen_op_movq_, _T1)
318
    },
319
#endif
320
};
321

    
322
static GenOpFunc *gen_op_mov_reg_A0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
323
    [0] = {
324
        DEF_REGS(gen_op_movw_, _A0)
325
    },
326
    [1] = {
327
        DEF_REGS(gen_op_movl_, _A0)
328
    },
329
#ifdef TARGET_X86_64
330
    [2] = {
331
        DEF_REGS(gen_op_movq_, _A0)
332
    },
333
#endif
334
};
335

    
336
static GenOpFunc *gen_op_mov_TN_reg[NB_OP_SIZES][2][CPU_NB_REGS] =
337
{
338
    [OT_BYTE] = {
339
        {
340
            gen_op_movl_T0_EAX,
341
            gen_op_movl_T0_ECX,
342
            gen_op_movl_T0_EDX,
343
            gen_op_movl_T0_EBX,
344
#ifdef TARGET_X86_64
345
            gen_op_movl_T0_ESP_wrapper,
346
            gen_op_movl_T0_EBP_wrapper,
347
            gen_op_movl_T0_ESI_wrapper,
348
            gen_op_movl_T0_EDI_wrapper,
349
            gen_op_movl_T0_R8,
350
            gen_op_movl_T0_R9,
351
            gen_op_movl_T0_R10,
352
            gen_op_movl_T0_R11,
353
            gen_op_movl_T0_R12,
354
            gen_op_movl_T0_R13,
355
            gen_op_movl_T0_R14,
356
            gen_op_movl_T0_R15,
357
#else
358
            gen_op_movh_T0_EAX,
359
            gen_op_movh_T0_ECX,
360
            gen_op_movh_T0_EDX,
361
            gen_op_movh_T0_EBX,
362
#endif
363
        },
364
        {
365
            gen_op_movl_T1_EAX,
366
            gen_op_movl_T1_ECX,
367
            gen_op_movl_T1_EDX,
368
            gen_op_movl_T1_EBX,
369
#ifdef TARGET_X86_64
370
            gen_op_movl_T1_ESP_wrapper,
371
            gen_op_movl_T1_EBP_wrapper,
372
            gen_op_movl_T1_ESI_wrapper,
373
            gen_op_movl_T1_EDI_wrapper,
374
            gen_op_movl_T1_R8,
375
            gen_op_movl_T1_R9,
376
            gen_op_movl_T1_R10,
377
            gen_op_movl_T1_R11,
378
            gen_op_movl_T1_R12,
379
            gen_op_movl_T1_R13,
380
            gen_op_movl_T1_R14,
381
            gen_op_movl_T1_R15,
382
#else
383
            gen_op_movh_T1_EAX,
384
            gen_op_movh_T1_ECX,
385
            gen_op_movh_T1_EDX,
386
            gen_op_movh_T1_EBX,
387
#endif
388
        },
389
    },
390
    [OT_WORD] = {
391
        {
392
            DEF_REGS(gen_op_movl_T0_, )
393
        },
394
        {
395
            DEF_REGS(gen_op_movl_T1_, )
396
        },
397
    },
398
    [OT_LONG] = {
399
        {
400
            DEF_REGS(gen_op_movl_T0_, )
401
        },
402
        {
403
            DEF_REGS(gen_op_movl_T1_, )
404
        },
405
    },
406
#ifdef TARGET_X86_64
407
    [OT_QUAD] = {
408
        {
409
            DEF_REGS(gen_op_movl_T0_, )
410
        },
411
        {
412
            DEF_REGS(gen_op_movl_T1_, )
413
        },
414
    },
415
#endif
416
};
417

    
418
static GenOpFunc *gen_op_movl_A0_reg[CPU_NB_REGS] = {
419
    DEF_REGS(gen_op_movl_A0_, )
420
};
421

    
422
static GenOpFunc *gen_op_addl_A0_reg_sN[4][CPU_NB_REGS] = {
423
    [0] = {
424
        DEF_REGS(gen_op_addl_A0_, )
425
    },
426
    [1] = {
427
        DEF_REGS(gen_op_addl_A0_, _s1)
428
    },
429
    [2] = {
430
        DEF_REGS(gen_op_addl_A0_, _s2)
431
    },
432
    [3] = {
433
        DEF_REGS(gen_op_addl_A0_, _s3)
434
    },
435
};
436

    
437
#ifdef TARGET_X86_64
438
static GenOpFunc *gen_op_movq_A0_reg[CPU_NB_REGS] = {
439
    DEF_REGS(gen_op_movq_A0_, )
440
};
441

    
442
static GenOpFunc *gen_op_addq_A0_reg_sN[4][CPU_NB_REGS] = {
443
    [0] = {
444
        DEF_REGS(gen_op_addq_A0_, )
445
    },
446
    [1] = {
447
        DEF_REGS(gen_op_addq_A0_, _s1)
448
    },
449
    [2] = {
450
        DEF_REGS(gen_op_addq_A0_, _s2)
451
    },
452
    [3] = {
453
        DEF_REGS(gen_op_addq_A0_, _s3)
454
    },
455
};
456
#endif
457

    
458
static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
459
    [0] = {
460
        DEF_REGS(gen_op_cmovw_, _T1_T0)
461
    },
462
    [1] = {
463
        DEF_REGS(gen_op_cmovl_, _T1_T0)
464
    },
465
#ifdef TARGET_X86_64
466
    [2] = {
467
        DEF_REGS(gen_op_cmovq_, _T1_T0)
468
    },
469
#endif
470
};
471

    
472
static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
473
    NULL,
474
    gen_op_orl_T0_T1,
475
    NULL,
476
    NULL,
477
    gen_op_andl_T0_T1,
478
    NULL,
479
    gen_op_xorl_T0_T1,
480
    NULL,
481
};
482

    
483
#define DEF_ARITHC(SUFFIX)\
484
    {\
485
        gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
486
        gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
487
    },\
488
    {\
489
        gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
490
        gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
491
    },\
492
    {\
493
        gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
494
        gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
495
    },\
496
    {\
497
        X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
498
        X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
499
    },
500

    
501
static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
502
    DEF_ARITHC( )
503
};
504

    
505
static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
506
    DEF_ARITHC(_raw)
507
#ifndef CONFIG_USER_ONLY
508
    DEF_ARITHC(_kernel)
509
    DEF_ARITHC(_user)
510
#endif
511
};
512

    
513
static const int cc_op_arithb[8] = {
514
    CC_OP_ADDB,
515
    CC_OP_LOGICB,
516
    CC_OP_ADDB,
517
    CC_OP_SUBB,
518
    CC_OP_LOGICB,
519
    CC_OP_SUBB,
520
    CC_OP_LOGICB,
521
    CC_OP_SUBB,
522
};
523

    
524
#define DEF_CMPXCHG(SUFFIX)\
525
    gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
526
    gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
527
    gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
528
    X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
529

    
530
static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
531
    DEF_CMPXCHG( )
532
};
533

    
534
static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
535
    DEF_CMPXCHG(_raw)
536
#ifndef CONFIG_USER_ONLY
537
    DEF_CMPXCHG(_kernel)
538
    DEF_CMPXCHG(_user)
539
#endif
540
};
541

    
542
#define DEF_SHIFT(SUFFIX)\
543
    {\
544
        gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
545
        gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
546
        gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
547
        gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
548
        gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
549
        gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
550
        gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
551
        gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
552
    },\
553
    {\
554
        gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
555
        gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
556
        gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
557
        gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
558
        gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
559
        gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
560
        gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
561
        gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
562
    },\
563
    {\
564
        gen_op_roll ## SUFFIX ## _T0_T1_cc,\
565
        gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
566
        gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
567
        gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
568
        gen_op_shll ## SUFFIX ## _T0_T1_cc,\
569
        gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
570
        gen_op_shll ## SUFFIX ## _T0_T1_cc,\
571
        gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
572
    },\
573
    {\
574
        X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
575
        X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
576
        X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
577
        X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
578
        X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
579
        X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
580
        X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
581
        X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
582
    },
583

    
584
static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
585
    DEF_SHIFT( )
586
};
587

    
588
static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
589
    DEF_SHIFT(_raw)
590
#ifndef CONFIG_USER_ONLY
591
    DEF_SHIFT(_kernel)
592
    DEF_SHIFT(_user)
593
#endif
594
};
595

    
596
#define DEF_SHIFTD(SUFFIX, op)\
597
    {\
598
        NULL,\
599
        NULL,\
600
    },\
601
    {\
602
        gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
603
        gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
604
     },\
605
    {\
606
        gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
607
        gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
608
    },\
609
    {\
610
X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
611
           gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
612
    },
613

    
614
static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
615
    DEF_SHIFTD(, im)
616
};
617

    
618
static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
619
    DEF_SHIFTD(, ECX)
620
};
621

    
622
static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
623
    DEF_SHIFTD(_raw, im)
624
#ifndef CONFIG_USER_ONLY
625
    DEF_SHIFTD(_kernel, im)
626
    DEF_SHIFTD(_user, im)
627
#endif
628
};
629

    
630
static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
631
    DEF_SHIFTD(_raw, ECX)
632
#ifndef CONFIG_USER_ONLY
633
    DEF_SHIFTD(_kernel, ECX)
634
    DEF_SHIFTD(_user, ECX)
635
#endif
636
};
637

    
638
static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
639
    [0] = {
640
        gen_op_btw_T0_T1_cc,
641
        gen_op_btsw_T0_T1_cc,
642
        gen_op_btrw_T0_T1_cc,
643
        gen_op_btcw_T0_T1_cc,
644
    },
645
    [1] = {
646
        gen_op_btl_T0_T1_cc,
647
        gen_op_btsl_T0_T1_cc,
648
        gen_op_btrl_T0_T1_cc,
649
        gen_op_btcl_T0_T1_cc,
650
    },
651
#ifdef TARGET_X86_64
652
    [2] = {
653
        gen_op_btq_T0_T1_cc,
654
        gen_op_btsq_T0_T1_cc,
655
        gen_op_btrq_T0_T1_cc,
656
        gen_op_btcq_T0_T1_cc,
657
    },
658
#endif
659
};
660

    
661
static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
662
    gen_op_add_bitw_A0_T1,
663
    gen_op_add_bitl_A0_T1,
664
    X86_64_ONLY(gen_op_add_bitq_A0_T1),
665
};
666

    
667
static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
668
    [0] = {
669
        gen_op_bsfw_T0_cc,
670
        gen_op_bsrw_T0_cc,
671
    },
672
    [1] = {
673
        gen_op_bsfl_T0_cc,
674
        gen_op_bsrl_T0_cc,
675
    },
676
#ifdef TARGET_X86_64
677
    [2] = {
678
        gen_op_bsfq_T0_cc,
679
        gen_op_bsrq_T0_cc,
680
    },
681
#endif
682
};
683

    
684
static GenOpFunc *gen_op_lds_T0_A0[3 * 4] = {
685
    gen_op_ldsb_raw_T0_A0,
686
    gen_op_ldsw_raw_T0_A0,
687
    X86_64_ONLY(gen_op_ldsl_raw_T0_A0),
688
    NULL,
689
#ifndef CONFIG_USER_ONLY
690
    gen_op_ldsb_kernel_T0_A0,
691
    gen_op_ldsw_kernel_T0_A0,
692
    X86_64_ONLY(gen_op_ldsl_kernel_T0_A0),
693
    NULL,
694

    
695
    gen_op_ldsb_user_T0_A0,
696
    gen_op_ldsw_user_T0_A0,
697
    X86_64_ONLY(gen_op_ldsl_user_T0_A0),
698
    NULL,
699
#endif
700
};
701

    
702
static GenOpFunc *gen_op_ldu_T0_A0[3 * 4] = {
703
    gen_op_ldub_raw_T0_A0,
704
    gen_op_lduw_raw_T0_A0,
705
    NULL,
706
    NULL,
707

    
708
#ifndef CONFIG_USER_ONLY
709
    gen_op_ldub_kernel_T0_A0,
710
    gen_op_lduw_kernel_T0_A0,
711
    NULL,
712
    NULL,
713

    
714
    gen_op_ldub_user_T0_A0,
715
    gen_op_lduw_user_T0_A0,
716
    NULL,
717
    NULL,
718
#endif
719
};
720

    
721
/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
722
static GenOpFunc *gen_op_ld_T0_A0[3 * 4] = {
723
    gen_op_ldub_raw_T0_A0,
724
    gen_op_lduw_raw_T0_A0,
725
    gen_op_ldl_raw_T0_A0,
726
    X86_64_ONLY(gen_op_ldq_raw_T0_A0),
727

    
728
#ifndef CONFIG_USER_ONLY
729
    gen_op_ldub_kernel_T0_A0,
730
    gen_op_lduw_kernel_T0_A0,
731
    gen_op_ldl_kernel_T0_A0,
732
    X86_64_ONLY(gen_op_ldq_kernel_T0_A0),
733

    
734
    gen_op_ldub_user_T0_A0,
735
    gen_op_lduw_user_T0_A0,
736
    gen_op_ldl_user_T0_A0,
737
    X86_64_ONLY(gen_op_ldq_user_T0_A0),
738
#endif
739
};
740

    
741
static GenOpFunc *gen_op_ld_T1_A0[3 * 4] = {
742
    gen_op_ldub_raw_T1_A0,
743
    gen_op_lduw_raw_T1_A0,
744
    gen_op_ldl_raw_T1_A0,
745
    X86_64_ONLY(gen_op_ldq_raw_T1_A0),
746

    
747
#ifndef CONFIG_USER_ONLY
748
    gen_op_ldub_kernel_T1_A0,
749
    gen_op_lduw_kernel_T1_A0,
750
    gen_op_ldl_kernel_T1_A0,
751
    X86_64_ONLY(gen_op_ldq_kernel_T1_A0),
752

    
753
    gen_op_ldub_user_T1_A0,
754
    gen_op_lduw_user_T1_A0,
755
    gen_op_ldl_user_T1_A0,
756
    X86_64_ONLY(gen_op_ldq_user_T1_A0),
757
#endif
758
};
759

    
760
static GenOpFunc *gen_op_st_T0_A0[3 * 4] = {
761
    gen_op_stb_raw_T0_A0,
762
    gen_op_stw_raw_T0_A0,
763
    gen_op_stl_raw_T0_A0,
764
    X86_64_ONLY(gen_op_stq_raw_T0_A0),
765

    
766
#ifndef CONFIG_USER_ONLY
767
    gen_op_stb_kernel_T0_A0,
768
    gen_op_stw_kernel_T0_A0,
769
    gen_op_stl_kernel_T0_A0,
770
    X86_64_ONLY(gen_op_stq_kernel_T0_A0),
771

    
772
    gen_op_stb_user_T0_A0,
773
    gen_op_stw_user_T0_A0,
774
    gen_op_stl_user_T0_A0,
775
    X86_64_ONLY(gen_op_stq_user_T0_A0),
776
#endif
777
};
778

    
779
static GenOpFunc *gen_op_st_T1_A0[3 * 4] = {
780
    NULL,
781
    gen_op_stw_raw_T1_A0,
782
    gen_op_stl_raw_T1_A0,
783
    X86_64_ONLY(gen_op_stq_raw_T1_A0),
784

    
785
#ifndef CONFIG_USER_ONLY
786
    NULL,
787
    gen_op_stw_kernel_T1_A0,
788
    gen_op_stl_kernel_T1_A0,
789
    X86_64_ONLY(gen_op_stq_kernel_T1_A0),
790

    
791
    NULL,
792
    gen_op_stw_user_T1_A0,
793
    gen_op_stl_user_T1_A0,
794
    X86_64_ONLY(gen_op_stq_user_T1_A0),
795
#endif
796
};
797

    
798
static inline void gen_jmp_im(target_ulong pc)
799
{
800
#ifdef TARGET_X86_64
801
    if (pc == (uint32_t)pc) {
802
        gen_op_movl_eip_im(pc);
803
    } else if (pc == (int32_t)pc) {
804
        gen_op_movq_eip_im(pc);
805
    } else {
806
        gen_op_movq_eip_im64(pc >> 32, pc);
807
    }
808
#else
809
    gen_op_movl_eip_im(pc);
810
#endif
811
}
812

    
813
static inline void gen_string_movl_A0_ESI(DisasContext *s)
814
{
815
    int override;
816

    
817
    override = s->override;
818
#ifdef TARGET_X86_64
819
    if (s->aflag == 2) {
820
        if (override >= 0) {
821
            gen_op_movq_A0_seg(offsetof(CPUX86State,segs[override].base));
822
            gen_op_addq_A0_reg_sN[0][R_ESI]();
823
        } else {
824
            gen_op_movq_A0_reg[R_ESI]();
825
        }
826
    } else
827
#endif
828
    if (s->aflag) {
829
        /* 32 bit address */
830
        if (s->addseg && override < 0)
831
            override = R_DS;
832
        if (override >= 0) {
833
            gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
834
            gen_op_addl_A0_reg_sN[0][R_ESI]();
835
        } else {
836
            gen_op_movl_A0_reg[R_ESI]();
837
        }
838
    } else {
839
        /* 16 address, always override */
840
        if (override < 0)
841
            override = R_DS;
842
        gen_op_movl_A0_reg[R_ESI]();
843
        gen_op_andl_A0_ffff();
844
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
845
    }
846
}
847

    
848
static inline void gen_string_movl_A0_EDI(DisasContext *s)
849
{
850
#ifdef TARGET_X86_64
851
    if (s->aflag == 2) {
852
        gen_op_movq_A0_reg[R_EDI]();
853
    } else
854
#endif
855
    if (s->aflag) {
856
        if (s->addseg) {
857
            gen_op_movl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
858
            gen_op_addl_A0_reg_sN[0][R_EDI]();
859
        } else {
860
            gen_op_movl_A0_reg[R_EDI]();
861
        }
862
    } else {
863
        gen_op_movl_A0_reg[R_EDI]();
864
        gen_op_andl_A0_ffff();
865
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
866
    }
867
}
868

    
869
static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
870
    gen_op_movl_T0_Dshiftb,
871
    gen_op_movl_T0_Dshiftw,
872
    gen_op_movl_T0_Dshiftl,
873
    X86_64_ONLY(gen_op_movl_T0_Dshiftq),
874
};
875

    
876
static GenOpFunc1 *gen_op_jnz_ecx[3] = {
877
    gen_op_jnz_ecxw,
878
    gen_op_jnz_ecxl,
879
    X86_64_ONLY(gen_op_jnz_ecxq),
880
};
881

    
882
static GenOpFunc1 *gen_op_jz_ecx[3] = {
883
    gen_op_jz_ecxw,
884
    gen_op_jz_ecxl,
885
    X86_64_ONLY(gen_op_jz_ecxq),
886
};
887

    
888
static GenOpFunc *gen_op_dec_ECX[3] = {
889
    gen_op_decw_ECX,
890
    gen_op_decl_ECX,
891
    X86_64_ONLY(gen_op_decq_ECX),
892
};
893

    
894
static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
895
    {
896
        gen_op_jnz_subb,
897
        gen_op_jnz_subw,
898
        gen_op_jnz_subl,
899
        X86_64_ONLY(gen_op_jnz_subq),
900
    },
901
    {
902
        gen_op_jz_subb,
903
        gen_op_jz_subw,
904
        gen_op_jz_subl,
905
        X86_64_ONLY(gen_op_jz_subq),
906
    },
907
};
908

    
909
static GenOpFunc *gen_op_in_DX_T0[3] = {
910
    gen_op_inb_DX_T0,
911
    gen_op_inw_DX_T0,
912
    gen_op_inl_DX_T0,
913
};
914

    
915
static GenOpFunc *gen_op_out_DX_T0[3] = {
916
    gen_op_outb_DX_T0,
917
    gen_op_outw_DX_T0,
918
    gen_op_outl_DX_T0,
919
};
920

    
921
static GenOpFunc *gen_op_in[3] = {
922
    gen_op_inb_T0_T1,
923
    gen_op_inw_T0_T1,
924
    gen_op_inl_T0_T1,
925
};
926

    
927
static GenOpFunc *gen_op_out[3] = {
928
    gen_op_outb_T0_T1,
929
    gen_op_outw_T0_T1,
930
    gen_op_outl_T0_T1,
931
};
932

    
933
static GenOpFunc *gen_check_io_T0[3] = {
934
    gen_op_check_iob_T0,
935
    gen_op_check_iow_T0,
936
    gen_op_check_iol_T0,
937
};
938

    
939
static GenOpFunc *gen_check_io_DX[3] = {
940
    gen_op_check_iob_DX,
941
    gen_op_check_iow_DX,
942
    gen_op_check_iol_DX,
943
};
944

    
945
static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
946
{
947
    if (s->pe && (s->cpl > s->iopl || s->vm86)) {
948
        if (s->cc_op != CC_OP_DYNAMIC)
949
            gen_op_set_cc_op(s->cc_op);
950
        gen_jmp_im(cur_eip);
951
        if (use_dx)
952
            gen_check_io_DX[ot]();
953
        else
954
            gen_check_io_T0[ot]();
955
    }
956
}
957

    
958
static inline void gen_movs(DisasContext *s, int ot)
959
{
960
    gen_string_movl_A0_ESI(s);
961
    gen_op_ld_T0_A0[ot + s->mem_index]();
962
    gen_string_movl_A0_EDI(s);
963
    gen_op_st_T0_A0[ot + s->mem_index]();
964
    gen_op_movl_T0_Dshift[ot]();
965
#ifdef TARGET_X86_64
966
    if (s->aflag == 2) {
967
        gen_op_addq_ESI_T0();
968
        gen_op_addq_EDI_T0();
969
    } else
970
#endif
971
    if (s->aflag) {
972
        gen_op_addl_ESI_T0();
973
        gen_op_addl_EDI_T0();
974
    } else {
975
        gen_op_addw_ESI_T0();
976
        gen_op_addw_EDI_T0();
977
    }
978
}
979

    
980
static inline void gen_update_cc_op(DisasContext *s)
981
{
982
    if (s->cc_op != CC_OP_DYNAMIC) {
983
        gen_op_set_cc_op(s->cc_op);
984
        s->cc_op = CC_OP_DYNAMIC;
985
    }
986
}
987

    
988
/* XXX: does not work with gdbstub "ice" single step - not a
989
   serious problem */
990
static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
991
{
992
    int l1, l2;
993

    
994
    l1 = gen_new_label();
995
    l2 = gen_new_label();
996
    gen_op_jnz_ecx[s->aflag](l1);
997
    gen_set_label(l2);
998
    gen_jmp_tb(s, next_eip, 1);
999
    gen_set_label(l1);
1000
    return l2;
1001
}
1002

    
1003
static inline void gen_stos(DisasContext *s, int ot)
1004
{
1005
    gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1006
    gen_string_movl_A0_EDI(s);
1007
    gen_op_st_T0_A0[ot + s->mem_index]();
1008
    gen_op_movl_T0_Dshift[ot]();
1009
#ifdef TARGET_X86_64
1010
    if (s->aflag == 2) {
1011
        gen_op_addq_EDI_T0();
1012
    } else
1013
#endif
1014
    if (s->aflag) {
1015
        gen_op_addl_EDI_T0();
1016
    } else {
1017
        gen_op_addw_EDI_T0();
1018
    }
1019
}
1020

    
1021
static inline void gen_lods(DisasContext *s, int ot)
1022
{
1023
    gen_string_movl_A0_ESI(s);
1024
    gen_op_ld_T0_A0[ot + s->mem_index]();
1025
    gen_op_mov_reg_T0[ot][R_EAX]();
1026
    gen_op_movl_T0_Dshift[ot]();
1027
#ifdef TARGET_X86_64
1028
    if (s->aflag == 2) {
1029
        gen_op_addq_ESI_T0();
1030
    } else
1031
#endif
1032
    if (s->aflag) {
1033
        gen_op_addl_ESI_T0();
1034
    } else {
1035
        gen_op_addw_ESI_T0();
1036
    }
1037
}
1038

    
1039
static inline void gen_scas(DisasContext *s, int ot)
1040
{
1041
    gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1042
    gen_string_movl_A0_EDI(s);
1043
    gen_op_ld_T1_A0[ot + s->mem_index]();
1044
    gen_op_cmpl_T0_T1_cc();
1045
    gen_op_movl_T0_Dshift[ot]();
1046
#ifdef TARGET_X86_64
1047
    if (s->aflag == 2) {
1048
        gen_op_addq_EDI_T0();
1049
    } else
1050
#endif
1051
    if (s->aflag) {
1052
        gen_op_addl_EDI_T0();
1053
    } else {
1054
        gen_op_addw_EDI_T0();
1055
    }
1056
}
1057

    
1058
static inline void gen_cmps(DisasContext *s, int ot)
1059
{
1060
    gen_string_movl_A0_ESI(s);
1061
    gen_op_ld_T0_A0[ot + s->mem_index]();
1062
    gen_string_movl_A0_EDI(s);
1063
    gen_op_ld_T1_A0[ot + s->mem_index]();
1064
    gen_op_cmpl_T0_T1_cc();
1065
    gen_op_movl_T0_Dshift[ot]();
1066
#ifdef TARGET_X86_64
1067
    if (s->aflag == 2) {
1068
        gen_op_addq_ESI_T0();
1069
        gen_op_addq_EDI_T0();
1070
    } else
1071
#endif
1072
    if (s->aflag) {
1073
        gen_op_addl_ESI_T0();
1074
        gen_op_addl_EDI_T0();
1075
    } else {
1076
        gen_op_addw_ESI_T0();
1077
        gen_op_addw_EDI_T0();
1078
    }
1079
}
1080

    
1081
static inline void gen_ins(DisasContext *s, int ot)
1082
{
1083
    gen_string_movl_A0_EDI(s);
1084
    gen_op_movl_T0_0();
1085
    gen_op_st_T0_A0[ot + s->mem_index]();
1086
    gen_op_in_DX_T0[ot]();
1087
    gen_op_st_T0_A0[ot + s->mem_index]();
1088
    gen_op_movl_T0_Dshift[ot]();
1089
#ifdef TARGET_X86_64
1090
    if (s->aflag == 2) {
1091
        gen_op_addq_EDI_T0();
1092
    } else
1093
#endif
1094
    if (s->aflag) {
1095
        gen_op_addl_EDI_T0();
1096
    } else {
1097
        gen_op_addw_EDI_T0();
1098
    }
1099
}
1100

    
1101
static inline void gen_outs(DisasContext *s, int ot)
1102
{
1103
    gen_string_movl_A0_ESI(s);
1104
    gen_op_ld_T0_A0[ot + s->mem_index]();
1105
    gen_op_out_DX_T0[ot]();
1106
    gen_op_movl_T0_Dshift[ot]();
1107
#ifdef TARGET_X86_64
1108
    if (s->aflag == 2) {
1109
        gen_op_addq_ESI_T0();
1110
    } else
1111
#endif
1112
    if (s->aflag) {
1113
        gen_op_addl_ESI_T0();
1114
    } else {
1115
        gen_op_addw_ESI_T0();
1116
    }
1117
}
1118

    
1119
/* same method as Valgrind : we generate jumps to current or next
1120
   instruction */
1121
#define GEN_REPZ(op)                                                          \
1122
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1123
                                 target_ulong cur_eip, target_ulong next_eip) \
1124
{                                                                             \
1125
    int l2;\
1126
    gen_update_cc_op(s);                                                      \
1127
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1128
    gen_ ## op(s, ot);                                                        \
1129
    gen_op_dec_ECX[s->aflag]();                                               \
1130
    /* a loop would cause two single step exceptions if ECX = 1               \
1131
       before rep string_insn */                                              \
1132
    if (!s->jmp_opt)                                                          \
1133
        gen_op_jz_ecx[s->aflag](l2);                                          \
1134
    gen_jmp(s, cur_eip);                                                      \
1135
}
1136

    
1137
#define GEN_REPZ2(op)                                                         \
1138
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1139
                                   target_ulong cur_eip,                      \
1140
                                   target_ulong next_eip,                     \
1141
                                   int nz)                                    \
1142
{                                                                             \
1143
    int l2;\
1144
    gen_update_cc_op(s);                                                      \
1145
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1146
    gen_ ## op(s, ot);                                                        \
1147
    gen_op_dec_ECX[s->aflag]();                                               \
1148
    gen_op_set_cc_op(CC_OP_SUBB + ot);                                        \
1149
    gen_op_string_jnz_sub[nz][ot](l2);\
1150
    if (!s->jmp_opt)                                                          \
1151
        gen_op_jz_ecx[s->aflag](l2);                                          \
1152
    gen_jmp(s, cur_eip);                                                      \
1153
}
1154

    
1155
GEN_REPZ(movs)
1156
GEN_REPZ(stos)
1157
GEN_REPZ(lods)
1158
GEN_REPZ(ins)
1159
GEN_REPZ(outs)
1160
GEN_REPZ2(scas)
1161
GEN_REPZ2(cmps)
1162

    
1163
enum {
1164
    JCC_O,
1165
    JCC_B,
1166
    JCC_Z,
1167
    JCC_BE,
1168
    JCC_S,
1169
    JCC_P,
1170
    JCC_L,
1171
    JCC_LE,
1172
};
1173

    
1174
static GenOpFunc1 *gen_jcc_sub[4][8] = {
1175
    [OT_BYTE] = {
1176
        NULL,
1177
        gen_op_jb_subb,
1178
        gen_op_jz_subb,
1179
        gen_op_jbe_subb,
1180
        gen_op_js_subb,
1181
        NULL,
1182
        gen_op_jl_subb,
1183
        gen_op_jle_subb,
1184
    },
1185
    [OT_WORD] = {
1186
        NULL,
1187
        gen_op_jb_subw,
1188
        gen_op_jz_subw,
1189
        gen_op_jbe_subw,
1190
        gen_op_js_subw,
1191
        NULL,
1192
        gen_op_jl_subw,
1193
        gen_op_jle_subw,
1194
    },
1195
    [OT_LONG] = {
1196
        NULL,
1197
        gen_op_jb_subl,
1198
        gen_op_jz_subl,
1199
        gen_op_jbe_subl,
1200
        gen_op_js_subl,
1201
        NULL,
1202
        gen_op_jl_subl,
1203
        gen_op_jle_subl,
1204
    },
1205
#ifdef TARGET_X86_64
1206
    [OT_QUAD] = {
1207
        NULL,
1208
        BUGGY_64(gen_op_jb_subq),
1209
        gen_op_jz_subq,
1210
        BUGGY_64(gen_op_jbe_subq),
1211
        gen_op_js_subq,
1212
        NULL,
1213
        BUGGY_64(gen_op_jl_subq),
1214
        BUGGY_64(gen_op_jle_subq),
1215
    },
1216
#endif
1217
};
1218
static GenOpFunc1 *gen_op_loop[3][4] = {
1219
    [0] = {
1220
        gen_op_loopnzw,
1221
        gen_op_loopzw,
1222
        gen_op_jnz_ecxw,
1223
    },
1224
    [1] = {
1225
        gen_op_loopnzl,
1226
        gen_op_loopzl,
1227
        gen_op_jnz_ecxl,
1228
    },
1229
#ifdef TARGET_X86_64
1230
    [2] = {
1231
        gen_op_loopnzq,
1232
        gen_op_loopzq,
1233
        gen_op_jnz_ecxq,
1234
    },
1235
#endif
1236
};
1237

    
1238
static GenOpFunc *gen_setcc_slow[8] = {
1239
    gen_op_seto_T0_cc,
1240
    gen_op_setb_T0_cc,
1241
    gen_op_setz_T0_cc,
1242
    gen_op_setbe_T0_cc,
1243
    gen_op_sets_T0_cc,
1244
    gen_op_setp_T0_cc,
1245
    gen_op_setl_T0_cc,
1246
    gen_op_setle_T0_cc,
1247
};
1248

    
1249
static GenOpFunc *gen_setcc_sub[4][8] = {
1250
    [OT_BYTE] = {
1251
        NULL,
1252
        gen_op_setb_T0_subb,
1253
        gen_op_setz_T0_subb,
1254
        gen_op_setbe_T0_subb,
1255
        gen_op_sets_T0_subb,
1256
        NULL,
1257
        gen_op_setl_T0_subb,
1258
        gen_op_setle_T0_subb,
1259
    },
1260
    [OT_WORD] = {
1261
        NULL,
1262
        gen_op_setb_T0_subw,
1263
        gen_op_setz_T0_subw,
1264
        gen_op_setbe_T0_subw,
1265
        gen_op_sets_T0_subw,
1266
        NULL,
1267
        gen_op_setl_T0_subw,
1268
        gen_op_setle_T0_subw,
1269
    },
1270
    [OT_LONG] = {
1271
        NULL,
1272
        gen_op_setb_T0_subl,
1273
        gen_op_setz_T0_subl,
1274
        gen_op_setbe_T0_subl,
1275
        gen_op_sets_T0_subl,
1276
        NULL,
1277
        gen_op_setl_T0_subl,
1278
        gen_op_setle_T0_subl,
1279
    },
1280
#ifdef TARGET_X86_64
1281
    [OT_QUAD] = {
1282
        NULL,
1283
        gen_op_setb_T0_subq,
1284
        gen_op_setz_T0_subq,
1285
        gen_op_setbe_T0_subq,
1286
        gen_op_sets_T0_subq,
1287
        NULL,
1288
        gen_op_setl_T0_subq,
1289
        gen_op_setle_T0_subq,
1290
    },
1291
#endif
1292
};
1293

    
1294
static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1295
    gen_op_fadd_ST0_FT0,
1296
    gen_op_fmul_ST0_FT0,
1297
    gen_op_fcom_ST0_FT0,
1298
    gen_op_fcom_ST0_FT0,
1299
    gen_op_fsub_ST0_FT0,
1300
    gen_op_fsubr_ST0_FT0,
1301
    gen_op_fdiv_ST0_FT0,
1302
    gen_op_fdivr_ST0_FT0,
1303
};
1304

    
1305
/* NOTE the exception in "r" op ordering */
1306
static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1307
    gen_op_fadd_STN_ST0,
1308
    gen_op_fmul_STN_ST0,
1309
    NULL,
1310
    NULL,
1311
    gen_op_fsubr_STN_ST0,
1312
    gen_op_fsub_STN_ST0,
1313
    gen_op_fdivr_STN_ST0,
1314
    gen_op_fdiv_STN_ST0,
1315
};
1316

    
1317
/* if d == OR_TMP0, it means memory operand (address in A0) */
1318
static void gen_op(DisasContext *s1, int op, int ot, int d)
1319
{
1320
    GenOpFunc *gen_update_cc;
1321

    
1322
    if (d != OR_TMP0) {
1323
        gen_op_mov_TN_reg[ot][0][d]();
1324
    } else {
1325
        gen_op_ld_T0_A0[ot + s1->mem_index]();
1326
    }
1327
    switch(op) {
1328
    case OP_ADCL:
1329
    case OP_SBBL:
1330
        if (s1->cc_op != CC_OP_DYNAMIC)
1331
            gen_op_set_cc_op(s1->cc_op);
1332
        if (d != OR_TMP0) {
1333
            gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1334
            gen_op_mov_reg_T0[ot][d]();
1335
        } else {
1336
            gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1337
        }
1338
        s1->cc_op = CC_OP_DYNAMIC;
1339
        goto the_end;
1340
    case OP_ADDL:
1341
        gen_op_addl_T0_T1();
1342
        s1->cc_op = CC_OP_ADDB + ot;
1343
        gen_update_cc = gen_op_update2_cc;
1344
        break;
1345
    case OP_SUBL:
1346
        gen_op_subl_T0_T1();
1347
        s1->cc_op = CC_OP_SUBB + ot;
1348
        gen_update_cc = gen_op_update2_cc;
1349
        break;
1350
    default:
1351
    case OP_ANDL:
1352
    case OP_ORL:
1353
    case OP_XORL:
1354
        gen_op_arith_T0_T1_cc[op]();
1355
        s1->cc_op = CC_OP_LOGICB + ot;
1356
        gen_update_cc = gen_op_update1_cc;
1357
        break;
1358
    case OP_CMPL:
1359
        gen_op_cmpl_T0_T1_cc();
1360
        s1->cc_op = CC_OP_SUBB + ot;
1361
        gen_update_cc = NULL;
1362
        break;
1363
    }
1364
    if (op != OP_CMPL) {
1365
        if (d != OR_TMP0)
1366
            gen_op_mov_reg_T0[ot][d]();
1367
        else
1368
            gen_op_st_T0_A0[ot + s1->mem_index]();
1369
    }
1370
    /* the flags update must happen after the memory write (precise
1371
       exception support) */
1372
    if (gen_update_cc)
1373
        gen_update_cc();
1374
 the_end: ;
1375
}
1376

    
1377
/* if d == OR_TMP0, it means memory operand (address in A0) */
1378
static void gen_inc(DisasContext *s1, int ot, int d, int c)
1379
{
1380
    if (d != OR_TMP0)
1381
        gen_op_mov_TN_reg[ot][0][d]();
1382
    else
1383
        gen_op_ld_T0_A0[ot + s1->mem_index]();
1384
    if (s1->cc_op != CC_OP_DYNAMIC)
1385
        gen_op_set_cc_op(s1->cc_op);
1386
    if (c > 0) {
1387
        gen_op_incl_T0();
1388
        s1->cc_op = CC_OP_INCB + ot;
1389
    } else {
1390
        gen_op_decl_T0();
1391
        s1->cc_op = CC_OP_DECB + ot;
1392
    }
1393
    if (d != OR_TMP0)
1394
        gen_op_mov_reg_T0[ot][d]();
1395
    else
1396
        gen_op_st_T0_A0[ot + s1->mem_index]();
1397
    gen_op_update_inc_cc();
1398
}
1399

    
1400
static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1401
{
1402
    if (d != OR_TMP0)
1403
        gen_op_mov_TN_reg[ot][0][d]();
1404
    else
1405
        gen_op_ld_T0_A0[ot + s1->mem_index]();
1406
    if (s != OR_TMP1)
1407
        gen_op_mov_TN_reg[ot][1][s]();
1408
    /* for zero counts, flags are not updated, so must do it dynamically */
1409
    if (s1->cc_op != CC_OP_DYNAMIC)
1410
        gen_op_set_cc_op(s1->cc_op);
1411

    
1412
    if (d != OR_TMP0)
1413
        gen_op_shift_T0_T1_cc[ot][op]();
1414
    else
1415
        gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1416
    if (d != OR_TMP0)
1417
        gen_op_mov_reg_T0[ot][d]();
1418
    s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1419
}
1420

    
1421
static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1422
{
1423
    /* currently not optimized */
1424
    gen_op_movl_T1_im(c);
1425
    gen_shift(s1, op, ot, d, OR_TMP1);
1426
}
1427

    
1428
static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1429
{
1430
    target_long disp;
1431
    int havesib;
1432
    int base;
1433
    int index;
1434
    int scale;
1435
    int opreg;
1436
    int mod, rm, code, override, must_add_seg;
1437

    
1438
    override = s->override;
1439
    must_add_seg = s->addseg;
1440
    if (override >= 0)
1441
        must_add_seg = 1;
1442
    mod = (modrm >> 6) & 3;
1443
    rm = modrm & 7;
1444

    
1445
    if (s->aflag) {
1446

    
1447
        havesib = 0;
1448
        base = rm;
1449
        index = 0;
1450
        scale = 0;
1451

    
1452
        if (base == 4) {
1453
            havesib = 1;
1454
            code = ldub_code(s->pc++);
1455
            scale = (code >> 6) & 3;
1456
            index = ((code >> 3) & 7) | REX_X(s);
1457
            base = (code & 7);
1458
        }
1459
        base |= REX_B(s);
1460

    
1461
        switch (mod) {
1462
        case 0:
1463
            if ((base & 7) == 5) {
1464
                base = -1;
1465
                disp = (int32_t)ldl_code(s->pc);
1466
                s->pc += 4;
1467
                if (CODE64(s) && !havesib) {
1468
                    disp += s->pc + s->rip_offset;
1469
                }
1470
            } else {
1471
                disp = 0;
1472
            }
1473
            break;
1474
        case 1:
1475
            disp = (int8_t)ldub_code(s->pc++);
1476
            break;
1477
        default:
1478
        case 2:
1479
            disp = ldl_code(s->pc);
1480
            s->pc += 4;
1481
            break;
1482
        }
1483

    
1484
        if (base >= 0) {
1485
            /* for correct popl handling with esp */
1486
            if (base == 4 && s->popl_esp_hack)
1487
                disp += s->popl_esp_hack;
1488
#ifdef TARGET_X86_64
1489
            if (s->aflag == 2) {
1490
                gen_op_movq_A0_reg[base]();
1491
                if (disp != 0) {
1492
                    if ((int32_t)disp == disp)
1493
                        gen_op_addq_A0_im(disp);
1494
                    else
1495
                        gen_op_addq_A0_im64(disp >> 32, disp);
1496
                }
1497
            } else
1498
#endif
1499
            {
1500
                gen_op_movl_A0_reg[base]();
1501
                if (disp != 0)
1502
                    gen_op_addl_A0_im(disp);
1503
            }
1504
        } else {
1505
#ifdef TARGET_X86_64
1506
            if (s->aflag == 2) {
1507
                if ((int32_t)disp == disp)
1508
                    gen_op_movq_A0_im(disp);
1509
                else
1510
                    gen_op_movq_A0_im64(disp >> 32, disp);
1511
            } else
1512
#endif
1513
            {
1514
                gen_op_movl_A0_im(disp);
1515
            }
1516
        }
1517
        /* XXX: index == 4 is always invalid */
1518
        if (havesib && (index != 4 || scale != 0)) {
1519
#ifdef TARGET_X86_64
1520
            if (s->aflag == 2) {
1521
                gen_op_addq_A0_reg_sN[scale][index]();
1522
            } else
1523
#endif
1524
            {
1525
                gen_op_addl_A0_reg_sN[scale][index]();
1526
            }
1527
        }
1528
        if (must_add_seg) {
1529
            if (override < 0) {
1530
                if (base == R_EBP || base == R_ESP)
1531
                    override = R_SS;
1532
                else
1533
                    override = R_DS;
1534
            }
1535
#ifdef TARGET_X86_64
1536
            if (s->aflag == 2) {
1537
                gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1538
            } else
1539
#endif
1540
            {
1541
                gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1542
            }
1543
        }
1544
    } else {
1545
        switch (mod) {
1546
        case 0:
1547
            if (rm == 6) {
1548
                disp = lduw_code(s->pc);
1549
                s->pc += 2;
1550
                gen_op_movl_A0_im(disp);
1551
                rm = 0; /* avoid SS override */
1552
                goto no_rm;
1553
            } else {
1554
                disp = 0;
1555
            }
1556
            break;
1557
        case 1:
1558
            disp = (int8_t)ldub_code(s->pc++);
1559
            break;
1560
        default:
1561
        case 2:
1562
            disp = lduw_code(s->pc);
1563
            s->pc += 2;
1564
            break;
1565
        }
1566
        switch(rm) {
1567
        case 0:
1568
            gen_op_movl_A0_reg[R_EBX]();
1569
            gen_op_addl_A0_reg_sN[0][R_ESI]();
1570
            break;
1571
        case 1:
1572
            gen_op_movl_A0_reg[R_EBX]();
1573
            gen_op_addl_A0_reg_sN[0][R_EDI]();
1574
            break;
1575
        case 2:
1576
            gen_op_movl_A0_reg[R_EBP]();
1577
            gen_op_addl_A0_reg_sN[0][R_ESI]();
1578
            break;
1579
        case 3:
1580
            gen_op_movl_A0_reg[R_EBP]();
1581
            gen_op_addl_A0_reg_sN[0][R_EDI]();
1582
            break;
1583
        case 4:
1584
            gen_op_movl_A0_reg[R_ESI]();
1585
            break;
1586
        case 5:
1587
            gen_op_movl_A0_reg[R_EDI]();
1588
            break;
1589
        case 6:
1590
            gen_op_movl_A0_reg[R_EBP]();
1591
            break;
1592
        default:
1593
        case 7:
1594
            gen_op_movl_A0_reg[R_EBX]();
1595
            break;
1596
        }
1597
        if (disp != 0)
1598
            gen_op_addl_A0_im(disp);
1599
        gen_op_andl_A0_ffff();
1600
    no_rm:
1601
        if (must_add_seg) {
1602
            if (override < 0) {
1603
                if (rm == 2 || rm == 3 || rm == 6)
1604
                    override = R_SS;
1605
                else
1606
                    override = R_DS;
1607
            }
1608
            gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1609
        }
1610
    }
1611

    
1612
    opreg = OR_A0;
1613
    disp = 0;
1614
    *reg_ptr = opreg;
1615
    *offset_ptr = disp;
1616
}
1617

    
1618
static void gen_nop_modrm(DisasContext *s, int modrm)
1619
{
1620
    int mod, rm, base, code;
1621

    
1622
    mod = (modrm >> 6) & 3;
1623
    if (mod == 3)
1624
        return;
1625
    rm = modrm & 7;
1626

    
1627
    if (s->aflag) {
1628

    
1629
        base = rm;
1630

    
1631
        if (base == 4) {
1632
            code = ldub_code(s->pc++);
1633
            base = (code & 7);
1634
        }
1635

    
1636
        switch (mod) {
1637
        case 0:
1638
            if (base == 5) {
1639
                s->pc += 4;
1640
            }
1641
            break;
1642
        case 1:
1643
            s->pc++;
1644
            break;
1645
        default:
1646
        case 2:
1647
            s->pc += 4;
1648
            break;
1649
        }
1650
    } else {
1651
        switch (mod) {
1652
        case 0:
1653
            if (rm == 6) {
1654
                s->pc += 2;
1655
            }
1656
            break;
1657
        case 1:
1658
            s->pc++;
1659
            break;
1660
        default:
1661
        case 2:
1662
            s->pc += 2;
1663
            break;
1664
        }
1665
    }
1666
}
1667

    
1668
/* used for LEA and MOV AX, mem */
1669
static void gen_add_A0_ds_seg(DisasContext *s)
1670
{
1671
    int override, must_add_seg;
1672
    must_add_seg = s->addseg;
1673
    override = R_DS;
1674
    if (s->override >= 0) {
1675
        override = s->override;
1676
        must_add_seg = 1;
1677
    } else {
1678
        override = R_DS;
1679
    }
1680
    if (must_add_seg) {
1681
#ifdef TARGET_X86_64
1682
        if (CODE64(s)) {
1683
            gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1684
        } else
1685
#endif
1686
        {
1687
            gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1688
        }
1689
    }
1690
}
1691

    
1692
/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1693
   OR_TMP0 */
1694
static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1695
{
1696
    int mod, rm, opreg, disp;
1697

    
1698
    mod = (modrm >> 6) & 3;
1699
    rm = (modrm & 7) | REX_B(s);
1700
    if (mod == 3) {
1701
        if (is_store) {
1702
            if (reg != OR_TMP0)
1703
                gen_op_mov_TN_reg[ot][0][reg]();
1704
            gen_op_mov_reg_T0[ot][rm]();
1705
        } else {
1706
            gen_op_mov_TN_reg[ot][0][rm]();
1707
            if (reg != OR_TMP0)
1708
                gen_op_mov_reg_T0[ot][reg]();
1709
        }
1710
    } else {
1711
        gen_lea_modrm(s, modrm, &opreg, &disp);
1712
        if (is_store) {
1713
            if (reg != OR_TMP0)
1714
                gen_op_mov_TN_reg[ot][0][reg]();
1715
            gen_op_st_T0_A0[ot + s->mem_index]();
1716
        } else {
1717
            gen_op_ld_T0_A0[ot + s->mem_index]();
1718
            if (reg != OR_TMP0)
1719
                gen_op_mov_reg_T0[ot][reg]();
1720
        }
1721
    }
1722
}
1723

    
1724
static inline uint32_t insn_get(DisasContext *s, int ot)
1725
{
1726
    uint32_t ret;
1727

    
1728
    switch(ot) {
1729
    case OT_BYTE:
1730
        ret = ldub_code(s->pc);
1731
        s->pc++;
1732
        break;
1733
    case OT_WORD:
1734
        ret = lduw_code(s->pc);
1735
        s->pc += 2;
1736
        break;
1737
    default:
1738
    case OT_LONG:
1739
        ret = ldl_code(s->pc);
1740
        s->pc += 4;
1741
        break;
1742
    }
1743
    return ret;
1744
}
1745

    
1746
static inline int insn_const_size(unsigned int ot)
1747
{
1748
    if (ot <= OT_LONG)
1749
        return 1 << ot;
1750
    else
1751
        return 4;
1752
}
1753

    
1754
static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1755
{
1756
    TranslationBlock *tb;
1757
    target_ulong pc;
1758

    
1759
    pc = s->cs_base + eip;
1760
    tb = s->tb;
1761
    /* NOTE: we handle the case where the TB spans two pages here */
1762
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1763
        (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK))  {
1764
        /* jump to same page: we can use a direct jump */
1765
        if (tb_num == 0)
1766
            gen_op_goto_tb0(TBPARAM(tb));
1767
        else
1768
            gen_op_goto_tb1(TBPARAM(tb));
1769
        gen_jmp_im(eip);
1770
        gen_op_movl_T0_im((long)tb + tb_num);
1771
        gen_op_exit_tb();
1772
    } else {
1773
        /* jump to another page: currently not optimized */
1774
        gen_jmp_im(eip);
1775
        gen_eob(s);
1776
    }
1777
}
1778

    
1779
static inline void gen_jcc(DisasContext *s, int b,
1780
                           target_ulong val, target_ulong next_eip)
1781
{
1782
    TranslationBlock *tb;
1783
    int inv, jcc_op;
1784
    GenOpFunc1 *func;
1785
    target_ulong tmp;
1786
    int l1, l2;
1787

    
1788
    inv = b & 1;
1789
    jcc_op = (b >> 1) & 7;
1790

    
1791
    if (s->jmp_opt) {
1792
        switch(s->cc_op) {
1793
            /* we optimize the cmp/jcc case */
1794
        case CC_OP_SUBB:
1795
        case CC_OP_SUBW:
1796
        case CC_OP_SUBL:
1797
        case CC_OP_SUBQ:
1798
            func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1799
            break;
1800

    
1801
            /* some jumps are easy to compute */
1802
        case CC_OP_ADDB:
1803
        case CC_OP_ADDW:
1804
        case CC_OP_ADDL:
1805
        case CC_OP_ADDQ:
1806

    
1807
        case CC_OP_ADCB:
1808
        case CC_OP_ADCW:
1809
        case CC_OP_ADCL:
1810
        case CC_OP_ADCQ:
1811

    
1812
        case CC_OP_SBBB:
1813
        case CC_OP_SBBW:
1814
        case CC_OP_SBBL:
1815
        case CC_OP_SBBQ:
1816

    
1817
        case CC_OP_LOGICB:
1818
        case CC_OP_LOGICW:
1819
        case CC_OP_LOGICL:
1820
        case CC_OP_LOGICQ:
1821

    
1822
        case CC_OP_INCB:
1823
        case CC_OP_INCW:
1824
        case CC_OP_INCL:
1825
        case CC_OP_INCQ:
1826

    
1827
        case CC_OP_DECB:
1828
        case CC_OP_DECW:
1829
        case CC_OP_DECL:
1830
        case CC_OP_DECQ:
1831

    
1832
        case CC_OP_SHLB:
1833
        case CC_OP_SHLW:
1834
        case CC_OP_SHLL:
1835
        case CC_OP_SHLQ:
1836

    
1837
        case CC_OP_SARB:
1838
        case CC_OP_SARW:
1839
        case CC_OP_SARL:
1840
        case CC_OP_SARQ:
1841
            switch(jcc_op) {
1842
            case JCC_Z:
1843
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1844
                break;
1845
            case JCC_S:
1846
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1847
                break;
1848
            default:
1849
                func = NULL;
1850
                break;
1851
            }
1852
            break;
1853
        default:
1854
            func = NULL;
1855
            break;
1856
        }
1857

    
1858
        if (s->cc_op != CC_OP_DYNAMIC) {
1859
            gen_op_set_cc_op(s->cc_op);
1860
            s->cc_op = CC_OP_DYNAMIC;
1861
        }
1862

    
1863
        if (!func) {
1864
            gen_setcc_slow[jcc_op]();
1865
            func = gen_op_jnz_T0_label;
1866
        }
1867

    
1868
        if (inv) {
1869
            tmp = val;
1870
            val = next_eip;
1871
            next_eip = tmp;
1872
        }
1873
        tb = s->tb;
1874

    
1875
        l1 = gen_new_label();
1876
        func(l1);
1877

    
1878
        gen_goto_tb(s, 0, next_eip);
1879

    
1880
        gen_set_label(l1);
1881
        gen_goto_tb(s, 1, val);
1882

    
1883
        s->is_jmp = 3;
1884
    } else {
1885

    
1886
        if (s->cc_op != CC_OP_DYNAMIC) {
1887
            gen_op_set_cc_op(s->cc_op);
1888
            s->cc_op = CC_OP_DYNAMIC;
1889
        }
1890
        gen_setcc_slow[jcc_op]();
1891
        if (inv) {
1892
            tmp = val;
1893
            val = next_eip;
1894
            next_eip = tmp;
1895
        }
1896
        l1 = gen_new_label();
1897
        l2 = gen_new_label();
1898
        gen_op_jnz_T0_label(l1);
1899
        gen_jmp_im(next_eip);
1900
        gen_op_jmp_label(l2);
1901
        gen_set_label(l1);
1902
        gen_jmp_im(val);
1903
        gen_set_label(l2);
1904
        gen_eob(s);
1905
    }
1906
}
1907

    
1908
static void gen_setcc(DisasContext *s, int b)
1909
{
1910
    int inv, jcc_op;
1911
    GenOpFunc *func;
1912

    
1913
    inv = b & 1;
1914
    jcc_op = (b >> 1) & 7;
1915
    switch(s->cc_op) {
1916
        /* we optimize the cmp/jcc case */
1917
    case CC_OP_SUBB:
1918
    case CC_OP_SUBW:
1919
    case CC_OP_SUBL:
1920
    case CC_OP_SUBQ:
1921
        func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1922
        if (!func)
1923
            goto slow_jcc;
1924
        break;
1925

    
1926
        /* some jumps are easy to compute */
1927
    case CC_OP_ADDB:
1928
    case CC_OP_ADDW:
1929
    case CC_OP_ADDL:
1930
    case CC_OP_ADDQ:
1931

    
1932
    case CC_OP_LOGICB:
1933
    case CC_OP_LOGICW:
1934
    case CC_OP_LOGICL:
1935
    case CC_OP_LOGICQ:
1936

    
1937
    case CC_OP_INCB:
1938
    case CC_OP_INCW:
1939
    case CC_OP_INCL:
1940
    case CC_OP_INCQ:
1941

    
1942
    case CC_OP_DECB:
1943
    case CC_OP_DECW:
1944
    case CC_OP_DECL:
1945
    case CC_OP_DECQ:
1946

    
1947
    case CC_OP_SHLB:
1948
    case CC_OP_SHLW:
1949
    case CC_OP_SHLL:
1950
    case CC_OP_SHLQ:
1951
        switch(jcc_op) {
1952
        case JCC_Z:
1953
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1954
            break;
1955
        case JCC_S:
1956
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1957
            break;
1958
        default:
1959
            goto slow_jcc;
1960
        }
1961
        break;
1962
    default:
1963
    slow_jcc:
1964
        if (s->cc_op != CC_OP_DYNAMIC)
1965
            gen_op_set_cc_op(s->cc_op);
1966
        func = gen_setcc_slow[jcc_op];
1967
        break;
1968
    }
1969
    func();
1970
    if (inv) {
1971
        gen_op_xor_T0_1();
1972
    }
1973
}
1974

    
1975
/* move T0 to seg_reg and compute if the CPU state may change. Never
1976
   call this function with seg_reg == R_CS */
1977
static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1978
{
1979
    if (s->pe && !s->vm86) {
1980
        /* XXX: optimize by finding processor state dynamically */
1981
        if (s->cc_op != CC_OP_DYNAMIC)
1982
            gen_op_set_cc_op(s->cc_op);
1983
        gen_jmp_im(cur_eip);
1984
        gen_op_movl_seg_T0(seg_reg);
1985
        /* abort translation because the addseg value may change or
1986
           because ss32 may change. For R_SS, translation must always
1987
           stop as a special handling must be done to disable hardware
1988
           interrupts for the next instruction */
1989
        if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1990
            s->is_jmp = 3;
1991
    } else {
1992
        gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1993
        if (seg_reg == R_SS)
1994
            s->is_jmp = 3;
1995
    }
1996
}
1997

    
1998
#ifdef TARGET_X86_64
1999
#define SVM_movq_T1_im(x) gen_op_movq_T1_im64((x) >> 32, x)
2000
#else
2001
#define SVM_movq_T1_im(x) gen_op_movl_T1_im(x)
2002
#endif
2003

    
2004
static inline int
2005
gen_svm_check_io(DisasContext *s, target_ulong pc_start, uint64_t type)
2006
{
2007
#if !defined(CONFIG_USER_ONLY)
2008
    if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
2009
        if (s->cc_op != CC_OP_DYNAMIC)
2010
            gen_op_set_cc_op(s->cc_op);
2011
        SVM_movq_T1_im(s->pc - s->cs_base);
2012
        gen_jmp_im(pc_start - s->cs_base);
2013
        gen_op_geneflags();
2014
        gen_op_svm_check_intercept_io((uint32_t)(type >> 32), (uint32_t)type);
2015
        s->cc_op = CC_OP_DYNAMIC;
2016
        /* FIXME: maybe we could move the io intercept vector to the TB as well
2017
                  so we know if this is an EOB or not ... let's assume it's not
2018
                  for now. */
2019
    }
2020
#endif
2021
    return 0;
2022
}
2023

    
2024
static inline int svm_is_rep(int prefixes)
2025
{
2026
    return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2027
}
2028

    
2029
static inline int
2030
gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2031
                              uint64_t type, uint64_t param)
2032
{
2033
    if(!(s->flags & (INTERCEPT_SVM_MASK)))
2034
        /* no SVM activated */
2035
        return 0;
2036
    switch(type) {
2037
        /* CRx and DRx reads/writes */
2038
        case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2039
            if (s->cc_op != CC_OP_DYNAMIC) {
2040
                gen_op_set_cc_op(s->cc_op);
2041
                s->cc_op = CC_OP_DYNAMIC;
2042
            }
2043
            gen_jmp_im(pc_start - s->cs_base);
2044
            SVM_movq_T1_im(param);
2045
            gen_op_geneflags();
2046
            gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2047
            /* this is a special case as we do not know if the interception occurs
2048
               so we assume there was none */
2049
            return 0;
2050
        case SVM_EXIT_MSR:
2051
            if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2052
                if (s->cc_op != CC_OP_DYNAMIC) {
2053
                    gen_op_set_cc_op(s->cc_op);
2054
                    s->cc_op = CC_OP_DYNAMIC;
2055
                }
2056
                gen_jmp_im(pc_start - s->cs_base);
2057
                SVM_movq_T1_im(param);
2058
                gen_op_geneflags();
2059
                gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2060
                /* this is a special case as we do not know if the interception occurs
2061
                   so we assume there was none */
2062
                return 0;
2063
            }
2064
            break;
2065
        default:
2066
            if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2067
                if (s->cc_op != CC_OP_DYNAMIC) {
2068
                    gen_op_set_cc_op(s->cc_op);
2069
                    s->cc_op = CC_OP_EFLAGS;
2070
                }
2071
                gen_jmp_im(pc_start - s->cs_base);
2072
                SVM_movq_T1_im(param);
2073
                gen_op_geneflags();
2074
                gen_op_svm_vmexit(type >> 32, type);
2075
                /* we can optimize this one so TBs don't get longer
2076
                   than up to vmexit */
2077
                gen_eob(s);
2078
                return 1;
2079
            }
2080
    }
2081
    return 0;
2082
}
2083

    
2084
static inline int
2085
gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2086
{
2087
    return gen_svm_check_intercept_param(s, pc_start, type, 0);
2088
}
2089

    
2090
static inline void gen_stack_update(DisasContext *s, int addend)
2091
{
2092
#ifdef TARGET_X86_64
2093
    if (CODE64(s)) {
2094
        if (addend == 8)
2095
            gen_op_addq_ESP_8();
2096
        else
2097
            gen_op_addq_ESP_im(addend);
2098
    } else
2099
#endif
2100
    if (s->ss32) {
2101
        if (addend == 2)
2102
            gen_op_addl_ESP_2();
2103
        else if (addend == 4)
2104
            gen_op_addl_ESP_4();
2105
        else
2106
            gen_op_addl_ESP_im(addend);
2107
    } else {
2108
        if (addend == 2)
2109
            gen_op_addw_ESP_2();
2110
        else if (addend == 4)
2111
            gen_op_addw_ESP_4();
2112
        else
2113
            gen_op_addw_ESP_im(addend);
2114
    }
2115
}
2116

    
2117
/* generate a push. It depends on ss32, addseg and dflag */
2118
static void gen_push_T0(DisasContext *s)
2119
{
2120
#ifdef TARGET_X86_64
2121
    if (CODE64(s)) {
2122
        gen_op_movq_A0_reg[R_ESP]();
2123
        if (s->dflag) {
2124
            gen_op_subq_A0_8();
2125
            gen_op_st_T0_A0[OT_QUAD + s->mem_index]();
2126
        } else {
2127
            gen_op_subq_A0_2();
2128
            gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2129
        }
2130
        gen_op_movq_ESP_A0();
2131
    } else
2132
#endif
2133
    {
2134
        gen_op_movl_A0_reg[R_ESP]();
2135
        if (!s->dflag)
2136
            gen_op_subl_A0_2();
2137
        else
2138
            gen_op_subl_A0_4();
2139
        if (s->ss32) {
2140
            if (s->addseg) {
2141
                gen_op_movl_T1_A0();
2142
                gen_op_addl_A0_SS();
2143
            }
2144
        } else {
2145
            gen_op_andl_A0_ffff();
2146
            gen_op_movl_T1_A0();
2147
            gen_op_addl_A0_SS();
2148
        }
2149
        gen_op_st_T0_A0[s->dflag + 1 + s->mem_index]();
2150
        if (s->ss32 && !s->addseg)
2151
            gen_op_movl_ESP_A0();
2152
        else
2153
            gen_op_mov_reg_T1[s->ss32 + 1][R_ESP]();
2154
    }
2155
}
2156

    
2157
/* generate a push. It depends on ss32, addseg and dflag */
2158
/* slower version for T1, only used for call Ev */
2159
static void gen_push_T1(DisasContext *s)
2160
{
2161
#ifdef TARGET_X86_64
2162
    if (CODE64(s)) {
2163
        gen_op_movq_A0_reg[R_ESP]();
2164
        if (s->dflag) {
2165
            gen_op_subq_A0_8();
2166
            gen_op_st_T1_A0[OT_QUAD + s->mem_index]();
2167
        } else {
2168
            gen_op_subq_A0_2();
2169
            gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2170
        }
2171
        gen_op_movq_ESP_A0();
2172
    } else
2173
#endif
2174
    {
2175
        gen_op_movl_A0_reg[R_ESP]();
2176
        if (!s->dflag)
2177
            gen_op_subl_A0_2();
2178
        else
2179
            gen_op_subl_A0_4();
2180
        if (s->ss32) {
2181
            if (s->addseg) {
2182
                gen_op_addl_A0_SS();
2183
            }
2184
        } else {
2185
            gen_op_andl_A0_ffff();
2186
            gen_op_addl_A0_SS();
2187
        }
2188
        gen_op_st_T1_A0[s->dflag + 1 + s->mem_index]();
2189

    
2190
        if (s->ss32 && !s->addseg)
2191
            gen_op_movl_ESP_A0();
2192
        else
2193
            gen_stack_update(s, (-2) << s->dflag);
2194
    }
2195
}
2196

    
2197
/* two step pop is necessary for precise exceptions */
2198
static void gen_pop_T0(DisasContext *s)
2199
{
2200
#ifdef TARGET_X86_64
2201
    if (CODE64(s)) {
2202
        gen_op_movq_A0_reg[R_ESP]();
2203
        gen_op_ld_T0_A0[(s->dflag ? OT_QUAD : OT_WORD) + s->mem_index]();
2204
    } else
2205
#endif
2206
    {
2207
        gen_op_movl_A0_reg[R_ESP]();
2208
        if (s->ss32) {
2209
            if (s->addseg)
2210
                gen_op_addl_A0_SS();
2211
        } else {
2212
            gen_op_andl_A0_ffff();
2213
            gen_op_addl_A0_SS();
2214
        }
2215
        gen_op_ld_T0_A0[s->dflag + 1 + s->mem_index]();
2216
    }
2217
}
2218

    
2219
static void gen_pop_update(DisasContext *s)
2220
{
2221
#ifdef TARGET_X86_64
2222
    if (CODE64(s) && s->dflag) {
2223
        gen_stack_update(s, 8);
2224
    } else
2225
#endif
2226
    {
2227
        gen_stack_update(s, 2 << s->dflag);
2228
    }
2229
}
2230

    
2231
static void gen_stack_A0(DisasContext *s)
2232
{
2233
    gen_op_movl_A0_ESP();
2234
    if (!s->ss32)
2235
        gen_op_andl_A0_ffff();
2236
    gen_op_movl_T1_A0();
2237
    if (s->addseg)
2238
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2239
}
2240

    
2241
/* NOTE: wrap around in 16 bit not fully handled */
2242
static void gen_pusha(DisasContext *s)
2243
{
2244
    int i;
2245
    gen_op_movl_A0_ESP();
2246
    gen_op_addl_A0_im(-16 <<  s->dflag);
2247
    if (!s->ss32)
2248
        gen_op_andl_A0_ffff();
2249
    gen_op_movl_T1_A0();
2250
    if (s->addseg)
2251
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2252
    for(i = 0;i < 8; i++) {
2253
        gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
2254
        gen_op_st_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2255
        gen_op_addl_A0_im(2 <<  s->dflag);
2256
    }
2257
    gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2258
}
2259

    
2260
/* NOTE: wrap around in 16 bit not fully handled */
2261
static void gen_popa(DisasContext *s)
2262
{
2263
    int i;
2264
    gen_op_movl_A0_ESP();
2265
    if (!s->ss32)
2266
        gen_op_andl_A0_ffff();
2267
    gen_op_movl_T1_A0();
2268
    gen_op_addl_T1_im(16 <<  s->dflag);
2269
    if (s->addseg)
2270
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2271
    for(i = 0;i < 8; i++) {
2272
        /* ESP is not reloaded */
2273
        if (i != 3) {
2274
            gen_op_ld_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2275
            gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
2276
        }
2277
        gen_op_addl_A0_im(2 <<  s->dflag);
2278
    }
2279
    gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2280
}
2281

    
2282
static void gen_enter(DisasContext *s, int esp_addend, int level)
2283
{
2284
    int ot, opsize;
2285

    
2286
    level &= 0x1f;
2287
#ifdef TARGET_X86_64
2288
    if (CODE64(s)) {
2289
        ot = s->dflag ? OT_QUAD : OT_WORD;
2290
        opsize = 1 << ot;
2291

    
2292
        gen_op_movl_A0_ESP();
2293
        gen_op_addq_A0_im(-opsize);
2294
        gen_op_movl_T1_A0();
2295

    
2296
        /* push bp */
2297
        gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2298
        gen_op_st_T0_A0[ot + s->mem_index]();
2299
        if (level) {
2300
            gen_op_enter64_level(level, (ot == OT_QUAD));
2301
        }
2302
        gen_op_mov_reg_T1[ot][R_EBP]();
2303
        gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2304
        gen_op_mov_reg_T1[OT_QUAD][R_ESP]();
2305
    } else
2306
#endif
2307
    {
2308
        ot = s->dflag + OT_WORD;
2309
        opsize = 2 << s->dflag;
2310

    
2311
        gen_op_movl_A0_ESP();
2312
        gen_op_addl_A0_im(-opsize);
2313
        if (!s->ss32)
2314
            gen_op_andl_A0_ffff();
2315
        gen_op_movl_T1_A0();
2316
        if (s->addseg)
2317
            gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2318
        /* push bp */
2319
        gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2320
        gen_op_st_T0_A0[ot + s->mem_index]();
2321
        if (level) {
2322
            gen_op_enter_level(level, s->dflag);
2323
        }
2324
        gen_op_mov_reg_T1[ot][R_EBP]();
2325
        gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2326
        gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2327
    }
2328
}
2329

    
2330
static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2331
{
2332
    if (s->cc_op != CC_OP_DYNAMIC)
2333
        gen_op_set_cc_op(s->cc_op);
2334
    gen_jmp_im(cur_eip);
2335
    gen_op_raise_exception(trapno);
2336
    s->is_jmp = 3;
2337
}
2338

    
2339
/* an interrupt is different from an exception because of the
2340
   privilege checks */
2341
static void gen_interrupt(DisasContext *s, int intno,
2342
                          target_ulong cur_eip, target_ulong next_eip)
2343
{
2344
    if (s->cc_op != CC_OP_DYNAMIC)
2345
        gen_op_set_cc_op(s->cc_op);
2346
    gen_jmp_im(cur_eip);
2347
    gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2348
    s->is_jmp = 3;
2349
}
2350

    
2351
static void gen_debug(DisasContext *s, target_ulong cur_eip)
2352
{
2353
    if (s->cc_op != CC_OP_DYNAMIC)
2354
        gen_op_set_cc_op(s->cc_op);
2355
    gen_jmp_im(cur_eip);
2356
    gen_op_debug();
2357
    s->is_jmp = 3;
2358
}
2359

    
2360
/* generate a generic end of block. Trace exception is also generated
2361
   if needed */
2362
static void gen_eob(DisasContext *s)
2363
{
2364
    if (s->cc_op != CC_OP_DYNAMIC)
2365
        gen_op_set_cc_op(s->cc_op);
2366
    if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2367
        gen_op_reset_inhibit_irq();
2368
    }
2369
    if (s->singlestep_enabled) {
2370
        gen_op_debug();
2371
    } else if (s->tf) {
2372
        gen_op_single_step();
2373
    } else {
2374
        gen_op_movl_T0_0();
2375
        gen_op_exit_tb();
2376
    }
2377
    s->is_jmp = 3;
2378
}
2379

    
2380
/* generate a jump to eip. No segment change must happen before as a
2381
   direct call to the next block may occur */
2382
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2383
{
2384
    if (s->jmp_opt) {
2385
        if (s->cc_op != CC_OP_DYNAMIC) {
2386
            gen_op_set_cc_op(s->cc_op);
2387
            s->cc_op = CC_OP_DYNAMIC;
2388
        }
2389
        gen_goto_tb(s, tb_num, eip);
2390
        s->is_jmp = 3;
2391
    } else {
2392
        gen_jmp_im(eip);
2393
        gen_eob(s);
2394
    }
2395
}
2396

    
2397
static void gen_jmp(DisasContext *s, target_ulong eip)
2398
{
2399
    gen_jmp_tb(s, eip, 0);
2400
}
2401

    
2402
static void gen_movtl_T0_im(target_ulong val)
2403
{
2404
#ifdef TARGET_X86_64
2405
    if ((int32_t)val == val) {
2406
        gen_op_movl_T0_im(val);
2407
    } else {
2408
        gen_op_movq_T0_im64(val >> 32, val);
2409
    }
2410
#else
2411
    gen_op_movl_T0_im(val);
2412
#endif
2413
}
2414

    
2415
static void gen_movtl_T1_im(target_ulong val)
2416
{
2417
#ifdef TARGET_X86_64
2418
    if ((int32_t)val == val) {
2419
        gen_op_movl_T1_im(val);
2420
    } else {
2421
        gen_op_movq_T1_im64(val >> 32, val);
2422
    }
2423
#else
2424
    gen_op_movl_T1_im(val);
2425
#endif
2426
}
2427

    
2428
static void gen_add_A0_im(DisasContext *s, int val)
2429
{
2430
#ifdef TARGET_X86_64
2431
    if (CODE64(s))
2432
        gen_op_addq_A0_im(val);
2433
    else
2434
#endif
2435
        gen_op_addl_A0_im(val);
2436
}
2437

    
2438
static GenOpFunc1 *gen_ldq_env_A0[3] = {
2439
    gen_op_ldq_raw_env_A0,
2440
#ifndef CONFIG_USER_ONLY
2441
    gen_op_ldq_kernel_env_A0,
2442
    gen_op_ldq_user_env_A0,
2443
#endif
2444
};
2445

    
2446
static GenOpFunc1 *gen_stq_env_A0[3] = {
2447
    gen_op_stq_raw_env_A0,
2448
#ifndef CONFIG_USER_ONLY
2449
    gen_op_stq_kernel_env_A0,
2450
    gen_op_stq_user_env_A0,
2451
#endif
2452
};
2453

    
2454
static GenOpFunc1 *gen_ldo_env_A0[3] = {
2455
    gen_op_ldo_raw_env_A0,
2456
#ifndef CONFIG_USER_ONLY
2457
    gen_op_ldo_kernel_env_A0,
2458
    gen_op_ldo_user_env_A0,
2459
#endif
2460
};
2461

    
2462
static GenOpFunc1 *gen_sto_env_A0[3] = {
2463
    gen_op_sto_raw_env_A0,
2464
#ifndef CONFIG_USER_ONLY
2465
    gen_op_sto_kernel_env_A0,
2466
    gen_op_sto_user_env_A0,
2467
#endif
2468
};
2469

    
2470
#define SSE_SPECIAL ((GenOpFunc2 *)1)
2471

    
2472
#define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2473
#define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2474
                     gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2475

    
2476
static GenOpFunc2 *sse_op_table1[256][4] = {
2477
    /* pure SSE operations */
2478
    [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2479
    [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2480
    [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2481
    [0x13] = { SSE_SPECIAL, SSE_SPECIAL },  /* movlps, movlpd */
2482
    [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2483
    [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2484
    [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd, movshdup */
2485
    [0x17] = { SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd */
2486

    
2487
    [0x28] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2488
    [0x29] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2489
    [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2490
    [0x2b] = { SSE_SPECIAL, SSE_SPECIAL },  /* movntps, movntpd */
2491
    [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2492
    [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2493
    [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2494
    [0x2f] = { gen_op_comiss, gen_op_comisd },
2495
    [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2496
    [0x51] = SSE_FOP(sqrt),
2497
    [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2498
    [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2499
    [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2500
    [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2501
    [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2502
    [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2503
    [0x58] = SSE_FOP(add),
2504
    [0x59] = SSE_FOP(mul),
2505
    [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps,
2506
               gen_op_cvtss2sd, gen_op_cvtsd2ss },
2507
    [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2508
    [0x5c] = SSE_FOP(sub),
2509
    [0x5d] = SSE_FOP(min),
2510
    [0x5e] = SSE_FOP(div),
2511
    [0x5f] = SSE_FOP(max),
2512

    
2513
    [0xc2] = SSE_FOP(cmpeq),
2514
    [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2515

    
2516
    /* MMX ops and their SSE extensions */
2517
    [0x60] = MMX_OP2(punpcklbw),
2518
    [0x61] = MMX_OP2(punpcklwd),
2519
    [0x62] = MMX_OP2(punpckldq),
2520
    [0x63] = MMX_OP2(packsswb),
2521
    [0x64] = MMX_OP2(pcmpgtb),
2522
    [0x65] = MMX_OP2(pcmpgtw),
2523
    [0x66] = MMX_OP2(pcmpgtl),
2524
    [0x67] = MMX_OP2(packuswb),
2525
    [0x68] = MMX_OP2(punpckhbw),
2526
    [0x69] = MMX_OP2(punpckhwd),
2527
    [0x6a] = MMX_OP2(punpckhdq),
2528
    [0x6b] = MMX_OP2(packssdw),
2529
    [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2530
    [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2531
    [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2532
    [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2533
    [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx,
2534
               (GenOpFunc2 *)gen_op_pshufd_xmm,
2535
               (GenOpFunc2 *)gen_op_pshufhw_xmm,
2536
               (GenOpFunc2 *)gen_op_pshuflw_xmm },
2537
    [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2538
    [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2539
    [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2540
    [0x74] = MMX_OP2(pcmpeqb),
2541
    [0x75] = MMX_OP2(pcmpeqw),
2542
    [0x76] = MMX_OP2(pcmpeql),
2543
    [0x77] = { SSE_SPECIAL }, /* emms */
2544
    [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2545
    [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2546
    [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2547
    [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2548
    [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2549
    [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2550
    [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2551
    [0xd1] = MMX_OP2(psrlw),
2552
    [0xd2] = MMX_OP2(psrld),
2553
    [0xd3] = MMX_OP2(psrlq),
2554
    [0xd4] = MMX_OP2(paddq),
2555
    [0xd5] = MMX_OP2(pmullw),
2556
    [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2557
    [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2558
    [0xd8] = MMX_OP2(psubusb),
2559
    [0xd9] = MMX_OP2(psubusw),
2560
    [0xda] = MMX_OP2(pminub),
2561
    [0xdb] = MMX_OP2(pand),
2562
    [0xdc] = MMX_OP2(paddusb),
2563
    [0xdd] = MMX_OP2(paddusw),
2564
    [0xde] = MMX_OP2(pmaxub),
2565
    [0xdf] = MMX_OP2(pandn),
2566
    [0xe0] = MMX_OP2(pavgb),
2567
    [0xe1] = MMX_OP2(psraw),
2568
    [0xe2] = MMX_OP2(psrad),
2569
    [0xe3] = MMX_OP2(pavgw),
2570
    [0xe4] = MMX_OP2(pmulhuw),
2571
    [0xe5] = MMX_OP2(pmulhw),
2572
    [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2573
    [0xe7] = { SSE_SPECIAL , SSE_SPECIAL },  /* movntq, movntq */
2574
    [0xe8] = MMX_OP2(psubsb),
2575
    [0xe9] = MMX_OP2(psubsw),
2576
    [0xea] = MMX_OP2(pminsw),
2577
    [0xeb] = MMX_OP2(por),
2578
    [0xec] = MMX_OP2(paddsb),
2579
    [0xed] = MMX_OP2(paddsw),
2580
    [0xee] = MMX_OP2(pmaxsw),
2581
    [0xef] = MMX_OP2(pxor),
2582
    [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2583
    [0xf1] = MMX_OP2(psllw),
2584
    [0xf2] = MMX_OP2(pslld),
2585
    [0xf3] = MMX_OP2(psllq),
2586
    [0xf4] = MMX_OP2(pmuludq),
2587
    [0xf5] = MMX_OP2(pmaddwd),
2588
    [0xf6] = MMX_OP2(psadbw),
2589
    [0xf7] = MMX_OP2(maskmov),
2590
    [0xf8] = MMX_OP2(psubb),
2591
    [0xf9] = MMX_OP2(psubw),
2592
    [0xfa] = MMX_OP2(psubl),
2593
    [0xfb] = MMX_OP2(psubq),
2594
    [0xfc] = MMX_OP2(paddb),
2595
    [0xfd] = MMX_OP2(paddw),
2596
    [0xfe] = MMX_OP2(paddl),
2597
};
2598

    
2599
static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2600
    [0 + 2] = MMX_OP2(psrlw),
2601
    [0 + 4] = MMX_OP2(psraw),
2602
    [0 + 6] = MMX_OP2(psllw),
2603
    [8 + 2] = MMX_OP2(psrld),
2604
    [8 + 4] = MMX_OP2(psrad),
2605
    [8 + 6] = MMX_OP2(pslld),
2606
    [16 + 2] = MMX_OP2(psrlq),
2607
    [16 + 3] = { NULL, gen_op_psrldq_xmm },
2608
    [16 + 6] = MMX_OP2(psllq),
2609
    [16 + 7] = { NULL, gen_op_pslldq_xmm },
2610
};
2611

    
2612
static GenOpFunc1 *sse_op_table3[4 * 3] = {
2613
    gen_op_cvtsi2ss,
2614
    gen_op_cvtsi2sd,
2615
    X86_64_ONLY(gen_op_cvtsq2ss),
2616
    X86_64_ONLY(gen_op_cvtsq2sd),
2617

    
2618
    gen_op_cvttss2si,
2619
    gen_op_cvttsd2si,
2620
    X86_64_ONLY(gen_op_cvttss2sq),
2621
    X86_64_ONLY(gen_op_cvttsd2sq),
2622

    
2623
    gen_op_cvtss2si,
2624
    gen_op_cvtsd2si,
2625
    X86_64_ONLY(gen_op_cvtss2sq),
2626
    X86_64_ONLY(gen_op_cvtsd2sq),
2627
};
2628

    
2629
static GenOpFunc2 *sse_op_table4[8][4] = {
2630
    SSE_FOP(cmpeq),
2631
    SSE_FOP(cmplt),
2632
    SSE_FOP(cmple),
2633
    SSE_FOP(cmpunord),
2634
    SSE_FOP(cmpneq),
2635
    SSE_FOP(cmpnlt),
2636
    SSE_FOP(cmpnle),
2637
    SSE_FOP(cmpord),
2638
};
2639

    
2640
static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2641
{
2642
    int b1, op1_offset, op2_offset, is_xmm, val, ot;
2643
    int modrm, mod, rm, reg, reg_addr, offset_addr;
2644
    GenOpFunc2 *sse_op2;
2645
    GenOpFunc3 *sse_op3;
2646

    
2647
    b &= 0xff;
2648
    if (s->prefix & PREFIX_DATA)
2649
        b1 = 1;
2650
    else if (s->prefix & PREFIX_REPZ)
2651
        b1 = 2;
2652
    else if (s->prefix & PREFIX_REPNZ)
2653
        b1 = 3;
2654
    else
2655
        b1 = 0;
2656
    sse_op2 = sse_op_table1[b][b1];
2657
    if (!sse_op2)
2658
        goto illegal_op;
2659
    if (b <= 0x5f || b == 0xc6 || b == 0xc2) {
2660
        is_xmm = 1;
2661
    } else {
2662
        if (b1 == 0) {
2663
            /* MMX case */
2664
            is_xmm = 0;
2665
        } else {
2666
            is_xmm = 1;
2667
        }
2668
    }
2669
    /* simple MMX/SSE operation */
2670
    if (s->flags & HF_TS_MASK) {
2671
        gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2672
        return;
2673
    }
2674
    if (s->flags & HF_EM_MASK) {
2675
    illegal_op:
2676
        gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2677
        return;
2678
    }
2679
    if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2680
        goto illegal_op;
2681
    if (b == 0x77) {
2682
        /* emms */
2683
        gen_op_emms();
2684
        return;
2685
    }
2686
    /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2687
       the static cpu state) */
2688
    if (!is_xmm) {
2689
        gen_op_enter_mmx();
2690
    }
2691

    
2692
    modrm = ldub_code(s->pc++);
2693
    reg = ((modrm >> 3) & 7);
2694
    if (is_xmm)
2695
        reg |= rex_r;
2696
    mod = (modrm >> 6) & 3;
2697
    if (sse_op2 == SSE_SPECIAL) {
2698
        b |= (b1 << 8);
2699
        switch(b) {
2700
        case 0x0e7: /* movntq */
2701
            if (mod == 3)
2702
                goto illegal_op;
2703
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2704
            gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2705
            break;
2706
        case 0x1e7: /* movntdq */
2707
        case 0x02b: /* movntps */
2708
        case 0x12b: /* movntps */
2709
        case 0x3f0: /* lddqu */
2710
            if (mod == 3)
2711
                goto illegal_op;
2712
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2713
            gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2714
            break;
2715
        case 0x6e: /* movd mm, ea */
2716
#ifdef TARGET_X86_64
2717
            if (s->dflag == 2) {
2718
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2719
                gen_op_movq_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2720
            } else
2721
#endif
2722
            {
2723
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2724
                gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2725
            }
2726
            break;
2727
        case 0x16e: /* movd xmm, ea */
2728
#ifdef TARGET_X86_64
2729
            if (s->dflag == 2) {
2730
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2731
                gen_op_movq_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2732
            } else
2733
#endif
2734
            {
2735
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2736
                gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2737
            }
2738
            break;
2739
        case 0x6f: /* movq mm, ea */
2740
            if (mod != 3) {
2741
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2742
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2743
            } else {
2744
                rm = (modrm & 7);
2745
                gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2746
                            offsetof(CPUX86State,fpregs[rm].mmx));
2747
            }
2748
            break;
2749
        case 0x010: /* movups */
2750
        case 0x110: /* movupd */
2751
        case 0x028: /* movaps */
2752
        case 0x128: /* movapd */
2753
        case 0x16f: /* movdqa xmm, ea */
2754
        case 0x26f: /* movdqu xmm, ea */
2755
            if (mod != 3) {
2756
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2757
                gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2758
            } else {
2759
                rm = (modrm & 7) | REX_B(s);
2760
                gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2761
                            offsetof(CPUX86State,xmm_regs[rm]));
2762
            }
2763
            break;
2764
        case 0x210: /* movss xmm, ea */
2765
            if (mod != 3) {
2766
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2767
                gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2768
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2769
                gen_op_movl_T0_0();
2770
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2771
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2772
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2773
            } else {
2774
                rm = (modrm & 7) | REX_B(s);
2775
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2776
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2777
            }
2778
            break;
2779
        case 0x310: /* movsd xmm, ea */
2780
            if (mod != 3) {
2781
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2782
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2783
                gen_op_movl_T0_0();
2784
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2785
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2786
            } else {
2787
                rm = (modrm & 7) | REX_B(s);
2788
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2789
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2790
            }
2791
            break;
2792
        case 0x012: /* movlps */
2793
        case 0x112: /* movlpd */
2794
            if (mod != 3) {
2795
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2796
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2797
            } else {
2798
                /* movhlps */
2799
                rm = (modrm & 7) | REX_B(s);
2800
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2801
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2802
            }
2803
            break;
2804
        case 0x212: /* movsldup */
2805
            if (mod != 3) {
2806
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2807
                gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2808
            } else {
2809
                rm = (modrm & 7) | REX_B(s);
2810
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2811
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2812
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2813
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2814
            }
2815
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2816
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2817
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2818
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2819
            break;
2820
        case 0x312: /* movddup */
2821
            if (mod != 3) {
2822
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2823
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2824
            } else {
2825
                rm = (modrm & 7) | REX_B(s);
2826
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2827
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2828
            }
2829
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2830
                        offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2831
            break;
2832
        case 0x016: /* movhps */
2833
        case 0x116: /* movhpd */
2834
            if (mod != 3) {
2835
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2836
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2837
            } else {
2838
                /* movlhps */
2839
                rm = (modrm & 7) | REX_B(s);
2840
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2841
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2842
            }
2843
            break;
2844
        case 0x216: /* movshdup */
2845
            if (mod != 3) {
2846
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2847
                gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2848
            } else {
2849
                rm = (modrm & 7) | REX_B(s);
2850
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2851
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2852
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2853
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2854
            }
2855
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2856
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2857
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2858
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2859
            break;
2860
        case 0x7e: /* movd ea, mm */
2861
#ifdef TARGET_X86_64
2862
            if (s->dflag == 2) {
2863
                gen_op_movq_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2864
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2865
            } else
2866
#endif
2867
            {
2868
                gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2869
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2870
            }
2871
            break;
2872
        case 0x17e: /* movd ea, xmm */
2873
#ifdef TARGET_X86_64
2874
            if (s->dflag == 2) {
2875
                gen_op_movq_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2876
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2877
            } else
2878
#endif
2879
            {
2880
                gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2881
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2882
            }
2883
            break;
2884
        case 0x27e: /* movq xmm, ea */
2885
            if (mod != 3) {
2886
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2887
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2888
            } else {
2889
                rm = (modrm & 7) | REX_B(s);
2890
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2891
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2892
            }
2893
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2894
            break;
2895
        case 0x7f: /* movq ea, mm */
2896
            if (mod != 3) {
2897
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2898
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2899
            } else {
2900
                rm = (modrm & 7);
2901
                gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2902
                            offsetof(CPUX86State,fpregs[reg].mmx));
2903
            }
2904
            break;
2905
        case 0x011: /* movups */
2906
        case 0x111: /* movupd */
2907
        case 0x029: /* movaps */
2908
        case 0x129: /* movapd */
2909
        case 0x17f: /* movdqa ea, xmm */
2910
        case 0x27f: /* movdqu ea, xmm */
2911
            if (mod != 3) {
2912
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2913
                gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2914
            } else {
2915
                rm = (modrm & 7) | REX_B(s);
2916
                gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2917
                            offsetof(CPUX86State,xmm_regs[reg]));
2918
            }
2919
            break;
2920
        case 0x211: /* movss ea, xmm */
2921
            if (mod != 3) {
2922
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2923
                gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2924
                gen_op_st_T0_A0[OT_LONG + s->mem_index]();
2925
            } else {
2926
                rm = (modrm & 7) | REX_B(s);
2927
                gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2928
                            offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2929
            }
2930
            break;
2931
        case 0x311: /* movsd ea, xmm */
2932
            if (mod != 3) {
2933
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2934
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2935
            } else {
2936
                rm = (modrm & 7) | REX_B(s);
2937
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2938
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2939
            }
2940
            break;
2941
        case 0x013: /* movlps */
2942
        case 0x113: /* movlpd */
2943
            if (mod != 3) {
2944
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2945
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2946
            } else {
2947
                goto illegal_op;
2948
            }
2949
            break;
2950
        case 0x017: /* movhps */
2951
        case 0x117: /* movhpd */
2952
            if (mod != 3) {
2953
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2954
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2955
            } else {
2956
                goto illegal_op;
2957
            }
2958
            break;
2959
        case 0x71: /* shift mm, im */
2960
        case 0x72:
2961
        case 0x73:
2962
        case 0x171: /* shift xmm, im */
2963
        case 0x172:
2964
        case 0x173:
2965
            val = ldub_code(s->pc++);
2966
            if (is_xmm) {
2967
                gen_op_movl_T0_im(val);
2968
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2969
                gen_op_movl_T0_0();
2970
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2971
                op1_offset = offsetof(CPUX86State,xmm_t0);
2972
            } else {
2973
                gen_op_movl_T0_im(val);
2974
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2975
                gen_op_movl_T0_0();
2976
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2977
                op1_offset = offsetof(CPUX86State,mmx_t0);
2978
            }
2979
            sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2980
            if (!sse_op2)
2981
                goto illegal_op;
2982
            if (is_xmm) {
2983
                rm = (modrm & 7) | REX_B(s);
2984
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2985
            } else {
2986
                rm = (modrm & 7);
2987
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2988
            }
2989
            sse_op2(op2_offset, op1_offset);
2990
            break;
2991
        case 0x050: /* movmskps */
2992
            rm = (modrm & 7) | REX_B(s);
2993
            gen_op_movmskps(offsetof(CPUX86State,xmm_regs[rm]));
2994
            gen_op_mov_reg_T0[OT_LONG][reg]();
2995
            break;
2996
        case 0x150: /* movmskpd */
2997
            rm = (modrm & 7) | REX_B(s);
2998
            gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[rm]));
2999
            gen_op_mov_reg_T0[OT_LONG][reg]();
3000
            break;
3001
        case 0x02a: /* cvtpi2ps */
3002
        case 0x12a: /* cvtpi2pd */
3003
            gen_op_enter_mmx();
3004
            if (mod != 3) {
3005
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3006
                op2_offset = offsetof(CPUX86State,mmx_t0);
3007
                gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
3008
            } else {
3009
                rm = (modrm & 7);
3010
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3011
            }
3012
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3013
            switch(b >> 8) {
3014
            case 0x0:
3015
                gen_op_cvtpi2ps(op1_offset, op2_offset);
3016
                break;
3017
            default:
3018
            case 0x1:
3019
                gen_op_cvtpi2pd(op1_offset, op2_offset);
3020
                break;
3021
            }
3022
            break;
3023
        case 0x22a: /* cvtsi2ss */
3024
        case 0x32a: /* cvtsi2sd */
3025
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3026
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3027
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3028
            sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
3029
            break;
3030
        case 0x02c: /* cvttps2pi */
3031
        case 0x12c: /* cvttpd2pi */
3032
        case 0x02d: /* cvtps2pi */
3033
        case 0x12d: /* cvtpd2pi */
3034
            gen_op_enter_mmx();
3035
            if (mod != 3) {
3036
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3037
                op2_offset = offsetof(CPUX86State,xmm_t0);
3038
                gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3039
            } else {
3040
                rm = (modrm & 7) | REX_B(s);
3041
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3042
            }
3043
            op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3044
            switch(b) {
3045
            case 0x02c:
3046
                gen_op_cvttps2pi(op1_offset, op2_offset);
3047
                break;
3048
            case 0x12c:
3049
                gen_op_cvttpd2pi(op1_offset, op2_offset);
3050
                break;
3051
            case 0x02d:
3052
                gen_op_cvtps2pi(op1_offset, op2_offset);
3053
                break;
3054
            case 0x12d:
3055
                gen_op_cvtpd2pi(op1_offset, op2_offset);
3056
                break;
3057
            }
3058
            break;
3059
        case 0x22c: /* cvttss2si */
3060
        case 0x32c: /* cvttsd2si */
3061
        case 0x22d: /* cvtss2si */
3062
        case 0x32d: /* cvtsd2si */
3063
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3064
            if (mod != 3) {
3065
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3066
                if ((b >> 8) & 1) {
3067
                    gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3068
                } else {
3069
                    gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
3070
                    gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3071
                }
3072
                op2_offset = offsetof(CPUX86State,xmm_t0);
3073
            } else {
3074
                rm = (modrm & 7) | REX_B(s);
3075
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3076
            }
3077
            sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3078
                          (b & 1) * 4](op2_offset);
3079
            gen_op_mov_reg_T0[ot][reg]();
3080
            break;
3081
        case 0xc4: /* pinsrw */
3082
        case 0x1c4:
3083
            s->rip_offset = 1;
3084
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3085
            val = ldub_code(s->pc++);
3086
            if (b1) {
3087
                val &= 7;
3088
                gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
3089
            } else {
3090
                val &= 3;
3091
                gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
3092
            }
3093
            break;
3094
        case 0xc5: /* pextrw */
3095
        case 0x1c5:
3096
            if (mod != 3)
3097
                goto illegal_op;
3098
            val = ldub_code(s->pc++);
3099
            if (b1) {
3100
                val &= 7;
3101
                rm = (modrm & 7) | REX_B(s);
3102
                gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
3103
            } else {
3104
                val &= 3;
3105
                rm = (modrm & 7);
3106
                gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
3107
            }
3108
            reg = ((modrm >> 3) & 7) | rex_r;
3109
            gen_op_mov_reg_T0[OT_LONG][reg]();
3110
            break;
3111
        case 0x1d6: /* movq ea, xmm */
3112
            if (mod != 3) {
3113
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3114
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3115
            } else {
3116
                rm = (modrm & 7) | REX_B(s);
3117
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3118
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3119
                gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3120
            }
3121
            break;
3122
        case 0x2d6: /* movq2dq */
3123
            gen_op_enter_mmx();
3124
            rm = (modrm & 7);
3125
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3126
                        offsetof(CPUX86State,fpregs[rm].mmx));
3127
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3128
            break;
3129
        case 0x3d6: /* movdq2q */
3130
            gen_op_enter_mmx();
3131
            rm = (modrm & 7) | REX_B(s);
3132
            gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3133
                        offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3134
            break;
3135
        case 0xd7: /* pmovmskb */
3136
        case 0x1d7:
3137
            if (mod != 3)
3138
                goto illegal_op;
3139
            if (b1) {
3140
                rm = (modrm & 7) | REX_B(s);
3141
                gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
3142
            } else {
3143
                rm = (modrm & 7);
3144
                gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
3145
            }
3146
            reg = ((modrm >> 3) & 7) | rex_r;
3147
            gen_op_mov_reg_T0[OT_LONG][reg]();
3148
            break;
3149
        default:
3150
            goto illegal_op;
3151
        }
3152
    } else {
3153
        /* generic MMX or SSE operation */
3154
        switch(b) {
3155
        case 0xf7:
3156
            /* maskmov : we must prepare A0 */
3157
            if (mod != 3)
3158
                goto illegal_op;
3159
#ifdef TARGET_X86_64
3160
            if (s->aflag == 2) {
3161
                gen_op_movq_A0_reg[R_EDI]();
3162
            } else
3163
#endif
3164
            {
3165
                gen_op_movl_A0_reg[R_EDI]();
3166
                if (s->aflag == 0)
3167
                    gen_op_andl_A0_ffff();
3168
            }
3169
            gen_add_A0_ds_seg(s);
3170
            break;
3171
        case 0x70: /* pshufx insn */
3172
        case 0xc6: /* pshufx insn */
3173
        case 0xc2: /* compare insns */
3174
            s->rip_offset = 1;
3175
            break;
3176
        default:
3177
            break;
3178
        }
3179
        if (is_xmm) {
3180
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3181
            if (mod != 3) {
3182
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3183
                op2_offset = offsetof(CPUX86State,xmm_t0);
3184
                if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3185
                                b == 0xc2)) {
3186
                    /* specific case for SSE single instructions */
3187
                    if (b1 == 2) {
3188
                        /* 32 bit access */
3189
                        gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
3190
                        gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3191
                    } else {
3192
                        /* 64 bit access */
3193
                        gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3194
                    }
3195
                } else {
3196
                    gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3197
                }
3198
            } else {
3199
                rm = (modrm & 7) | REX_B(s);
3200
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3201
            }
3202
        } else {
3203
            op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3204
            if (mod != 3) {
3205
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3206
                op2_offset = offsetof(CPUX86State,mmx_t0);
3207
                gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
3208
            } else {
3209
                rm = (modrm & 7);
3210
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3211
            }
3212
        }
3213
        switch(b) {
3214
        case 0x70: /* pshufx insn */
3215
        case 0xc6: /* pshufx insn */
3216
            val = ldub_code(s->pc++);
3217
            sse_op3 = (GenOpFunc3 *)sse_op2;
3218
            sse_op3(op1_offset, op2_offset, val);
3219
            break;
3220
        case 0xc2:
3221
            /* compare insns */
3222
            val = ldub_code(s->pc++);
3223
            if (val >= 8)
3224
                goto illegal_op;
3225
            sse_op2 = sse_op_table4[val][b1];
3226
            sse_op2(op1_offset, op2_offset);
3227
            break;
3228
        default:
3229
            sse_op2(op1_offset, op2_offset);
3230
            break;
3231
        }
3232
        if (b == 0x2e || b == 0x2f) {
3233
            s->cc_op = CC_OP_EFLAGS;
3234
        }
3235
    }
3236
}
3237

    
3238

    
3239
/* convert one instruction. s->is_jmp is set if the translation must
3240
   be stopped. Return the next pc value */
3241
static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3242
{
3243
    int b, prefixes, aflag, dflag;
3244
    int shift, ot;
3245
    int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3246
    target_ulong next_eip, tval;
3247
    int rex_w, rex_r;
3248

    
3249
    s->pc = pc_start;
3250
    prefixes = 0;
3251
    aflag = s->code32;
3252
    dflag = s->code32;
3253
    s->override = -1;
3254
    rex_w = -1;
3255
    rex_r = 0;
3256
#ifdef TARGET_X86_64
3257
    s->rex_x = 0;
3258
    s->rex_b = 0;
3259
    x86_64_hregs = 0;
3260
#endif
3261
    s->rip_offset = 0; /* for relative ip address */
3262
 next_byte:
3263
    b = ldub_code(s->pc);
3264
    s->pc++;
3265
    /* check prefixes */
3266
#ifdef TARGET_X86_64
3267
    if (CODE64(s)) {
3268
        switch (b) {
3269
        case 0xf3:
3270
            prefixes |= PREFIX_REPZ;
3271
            goto next_byte;
3272
        case 0xf2:
3273
            prefixes |= PREFIX_REPNZ;
3274
            goto next_byte;
3275
        case 0xf0:
3276
            prefixes |= PREFIX_LOCK;
3277
            goto next_byte;
3278
        case 0x2e:
3279
            s->override = R_CS;
3280
            goto next_byte;
3281
        case 0x36:
3282
            s->override = R_SS;
3283
            goto next_byte;
3284
        case 0x3e:
3285
            s->override = R_DS;
3286
            goto next_byte;
3287
        case 0x26:
3288
            s->override = R_ES;
3289
            goto next_byte;
3290
        case 0x64:
3291
            s->override = R_FS;
3292
            goto next_byte;
3293
        case 0x65:
3294
            s->override = R_GS;
3295
            goto next_byte;
3296
        case 0x66:
3297
            prefixes |= PREFIX_DATA;
3298
            goto next_byte;
3299
        case 0x67:
3300
            prefixes |= PREFIX_ADR;
3301
            goto next_byte;
3302
        case 0x40 ... 0x4f:
3303
            /* REX prefix */
3304
            rex_w = (b >> 3) & 1;
3305
            rex_r = (b & 0x4) << 1;
3306
            s->rex_x = (b & 0x2) << 2;
3307
            REX_B(s) = (b & 0x1) << 3;
3308
            x86_64_hregs = 1; /* select uniform byte register addressing */
3309
            goto next_byte;
3310
        }
3311
        if (rex_w == 1) {
3312
            /* 0x66 is ignored if rex.w is set */
3313
            dflag = 2;
3314
        } else {
3315
            if (prefixes & PREFIX_DATA)
3316
                dflag ^= 1;
3317
        }
3318
        if (!(prefixes & PREFIX_ADR))
3319
            aflag = 2;
3320
    } else
3321
#endif
3322
    {
3323
        switch (b) {
3324
        case 0xf3:
3325
            prefixes |= PREFIX_REPZ;
3326
            goto next_byte;
3327
        case 0xf2:
3328
            prefixes |= PREFIX_REPNZ;
3329
            goto next_byte;
3330
        case 0xf0:
3331
            prefixes |= PREFIX_LOCK;
3332
            goto next_byte;
3333
        case 0x2e:
3334
            s->override = R_CS;
3335
            goto next_byte;
3336
        case 0x36:
3337
            s->override = R_SS;
3338
            goto next_byte;
3339
        case 0x3e:
3340
            s->override = R_DS;
3341
            goto next_byte;
3342
        case 0x26:
3343
            s->override = R_ES;
3344
            goto next_byte;
3345
        case 0x64:
3346
            s->override = R_FS;
3347
            goto next_byte;
3348
        case 0x65:
3349
            s->override = R_GS;
3350
            goto next_byte;
3351
        case 0x66:
3352
            prefixes |= PREFIX_DATA;
3353
            goto next_byte;
3354
        case 0x67:
3355
            prefixes |= PREFIX_ADR;
3356
            goto next_byte;
3357
        }
3358
        if (prefixes & PREFIX_DATA)
3359
            dflag ^= 1;
3360
        if (prefixes & PREFIX_ADR)
3361
            aflag ^= 1;
3362
    }
3363

    
3364
    s->prefix = prefixes;
3365
    s->aflag = aflag;
3366
    s->dflag = dflag;
3367

    
3368
    /* lock generation */
3369
    if (prefixes & PREFIX_LOCK)
3370
        gen_op_lock();
3371

    
3372
    /* now check op code */
3373
 reswitch:
3374
    switch(b) {
3375
    case 0x0f:
3376
        /**************************/
3377
        /* extended op code */
3378
        b = ldub_code(s->pc++) | 0x100;
3379
        goto reswitch;
3380

    
3381
        /**************************/
3382
        /* arith & logic */
3383
    case 0x00 ... 0x05:
3384
    case 0x08 ... 0x0d:
3385
    case 0x10 ... 0x15:
3386
    case 0x18 ... 0x1d:
3387
    case 0x20 ... 0x25:
3388
    case 0x28 ... 0x2d:
3389
    case 0x30 ... 0x35:
3390
    case 0x38 ... 0x3d:
3391
        {
3392
            int op, f, val;
3393
            op = (b >> 3) & 7;
3394
            f = (b >> 1) & 3;
3395

    
3396
            if ((b & 1) == 0)
3397
                ot = OT_BYTE;
3398
            else
3399
                ot = dflag + OT_WORD;
3400

    
3401
            switch(f) {
3402
            case 0: /* OP Ev, Gv */
3403
                modrm = ldub_code(s->pc++);
3404
                reg = ((modrm >> 3) & 7) | rex_r;
3405
                mod = (modrm >> 6) & 3;
3406
                rm = (modrm & 7) | REX_B(s);
3407
                if (mod != 3) {
3408
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3409
                    opreg = OR_TMP0;
3410
                } else if (op == OP_XORL && rm == reg) {
3411
                xor_zero:
3412
                    /* xor reg, reg optimisation */
3413
                    gen_op_movl_T0_0();
3414
                    s->cc_op = CC_OP_LOGICB + ot;
3415
                    gen_op_mov_reg_T0[ot][reg]();
3416
                    gen_op_update1_cc();
3417
                    break;
3418
                } else {
3419
                    opreg = rm;
3420
                }
3421
                gen_op_mov_TN_reg[ot][1][reg]();
3422
                gen_op(s, op, ot, opreg);
3423
                break;
3424
            case 1: /* OP Gv, Ev */
3425
                modrm = ldub_code(s->pc++);
3426
                mod = (modrm >> 6) & 3;
3427
                reg = ((modrm >> 3) & 7) | rex_r;
3428
                rm = (modrm & 7) | REX_B(s);
3429
                if (mod != 3) {
3430
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3431
                    gen_op_ld_T1_A0[ot + s->mem_index]();
3432
                } else if (op == OP_XORL && rm == reg) {
3433
                    goto xor_zero;
3434
                } else {
3435
                    gen_op_mov_TN_reg[ot][1][rm]();
3436
                }
3437
                gen_op(s, op, ot, reg);
3438
                break;
3439
            case 2: /* OP A, Iv */
3440
                val = insn_get(s, ot);
3441
                gen_op_movl_T1_im(val);
3442
                gen_op(s, op, ot, OR_EAX);
3443
                break;
3444
            }
3445
        }
3446
        break;
3447

    
3448
    case 0x80: /* GRP1 */
3449
    case 0x81:
3450
    case 0x82:
3451
    case 0x83:
3452
        {
3453
            int val;
3454

    
3455
            if ((b & 1) == 0)
3456
                ot = OT_BYTE;
3457
            else
3458
                ot = dflag + OT_WORD;
3459

    
3460
            modrm = ldub_code(s->pc++);
3461
            mod = (modrm >> 6) & 3;
3462
            rm = (modrm & 7) | REX_B(s);
3463
            op = (modrm >> 3) & 7;
3464

    
3465
            if (mod != 3) {
3466
                if (b == 0x83)
3467
                    s->rip_offset = 1;
3468
                else
3469
                    s->rip_offset = insn_const_size(ot);
3470
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3471
                opreg = OR_TMP0;
3472
            } else {
3473
                opreg = rm;
3474
            }
3475

    
3476
            switch(b) {
3477
            default:
3478
            case 0x80:
3479
            case 0x81:
3480
            case 0x82:
3481
                val = insn_get(s, ot);
3482
                break;
3483
            case 0x83:
3484
                val = (int8_t)insn_get(s, OT_BYTE);
3485
                break;
3486
            }
3487
            gen_op_movl_T1_im(val);
3488
            gen_op(s, op, ot, opreg);
3489
        }
3490
        break;
3491

    
3492
        /**************************/
3493
        /* inc, dec, and other misc arith */
3494
    case 0x40 ... 0x47: /* inc Gv */
3495
        ot = dflag ? OT_LONG : OT_WORD;
3496
        gen_inc(s, ot, OR_EAX + (b & 7), 1);
3497
        break;
3498
    case 0x48 ... 0x4f: /* dec Gv */
3499
        ot = dflag ? OT_LONG : OT_WORD;
3500
        gen_inc(s, ot, OR_EAX + (b & 7), -1);
3501
        break;
3502
    case 0xf6: /* GRP3 */
3503
    case 0xf7:
3504
        if ((b & 1) == 0)
3505
            ot = OT_BYTE;
3506
        else
3507
            ot = dflag + OT_WORD;
3508

    
3509
        modrm = ldub_code(s->pc++);
3510
        mod = (modrm >> 6) & 3;
3511
        rm = (modrm & 7) | REX_B(s);
3512
        op = (modrm >> 3) & 7;
3513
        if (mod != 3) {
3514
            if (op == 0)
3515
                s->rip_offset = insn_const_size(ot);
3516
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3517
            gen_op_ld_T0_A0[ot + s->mem_index]();
3518
        } else {
3519
            gen_op_mov_TN_reg[ot][0][rm]();
3520
        }
3521

    
3522
        switch(op) {
3523
        case 0: /* test */
3524
            val = insn_get(s, ot);
3525
            gen_op_movl_T1_im(val);
3526
            gen_op_testl_T0_T1_cc();
3527
            s->cc_op = CC_OP_LOGICB + ot;
3528
            break;
3529
        case 2: /* not */
3530
            gen_op_notl_T0();
3531
            if (mod != 3) {
3532
                gen_op_st_T0_A0[ot + s->mem_index]();
3533
            } else {
3534
                gen_op_mov_reg_T0[ot][rm]();
3535
            }
3536
            break;
3537
        case 3: /* neg */
3538
            gen_op_negl_T0();
3539
            if (mod != 3) {
3540
                gen_op_st_T0_A0[ot + s->mem_index]();
3541
            } else {
3542
                gen_op_mov_reg_T0[ot][rm]();
3543
            }
3544
            gen_op_update_neg_cc();
3545
            s->cc_op = CC_OP_SUBB + ot;
3546
            break;
3547
        case 4: /* mul */
3548
            switch(ot) {
3549
            case OT_BYTE:
3550
                gen_op_mulb_AL_T0();
3551
                s->cc_op = CC_OP_MULB;
3552
                break;
3553
            case OT_WORD:
3554
                gen_op_mulw_AX_T0();
3555
                s->cc_op = CC_OP_MULW;
3556
                break;
3557
            default:
3558
            case OT_LONG:
3559
                gen_op_mull_EAX_T0();
3560
                s->cc_op = CC_OP_MULL;
3561
                break;
3562
#ifdef TARGET_X86_64
3563
            case OT_QUAD:
3564
                gen_op_mulq_EAX_T0();
3565
                s->cc_op = CC_OP_MULQ;
3566
                break;
3567
#endif
3568
            }
3569
            break;
3570
        case 5: /* imul */
3571
            switch(ot) {
3572
            case OT_BYTE:
3573
                gen_op_imulb_AL_T0();
3574
                s->cc_op = CC_OP_MULB;
3575
                break;
3576
            case OT_WORD:
3577
                gen_op_imulw_AX_T0();
3578
                s->cc_op = CC_OP_MULW;
3579
                break;
3580
            default:
3581
            case OT_LONG:
3582
                gen_op_imull_EAX_T0();
3583
                s->cc_op = CC_OP_MULL;
3584
                break;
3585
#ifdef TARGET_X86_64
3586
            case OT_QUAD:
3587
                gen_op_imulq_EAX_T0();
3588
                s->cc_op = CC_OP_MULQ;
3589
                break;
3590
#endif
3591
            }
3592
            break;
3593
        case 6: /* div */
3594
            switch(ot) {
3595
            case OT_BYTE:
3596
                gen_jmp_im(pc_start - s->cs_base);
3597
                gen_op_divb_AL_T0();
3598
                break;
3599
            case OT_WORD:
3600
                gen_jmp_im(pc_start - s->cs_base);
3601
                gen_op_divw_AX_T0();
3602
                break;
3603
            default:
3604
            case OT_LONG:
3605
                gen_jmp_im(pc_start - s->cs_base);
3606
                gen_op_divl_EAX_T0();
3607
                break;
3608
#ifdef TARGET_X86_64
3609
            case OT_QUAD:
3610
                gen_jmp_im(pc_start - s->cs_base);
3611
                gen_op_divq_EAX_T0();
3612
                break;
3613
#endif
3614
            }
3615
            break;
3616
        case 7: /* idiv */
3617
            switch(ot) {
3618
            case OT_BYTE:
3619
                gen_jmp_im(pc_start - s->cs_base);
3620
                gen_op_idivb_AL_T0();
3621
                break;
3622
            case OT_WORD:
3623
                gen_jmp_im(pc_start - s->cs_base);
3624
                gen_op_idivw_AX_T0();
3625
                break;
3626
            default:
3627
            case OT_LONG:
3628
                gen_jmp_im(pc_start - s->cs_base);
3629
                gen_op_idivl_EAX_T0();
3630
                break;
3631
#ifdef TARGET_X86_64
3632
            case OT_QUAD:
3633
                gen_jmp_im(pc_start - s->cs_base);
3634
                gen_op_idivq_EAX_T0();
3635
                break;
3636
#endif
3637
            }
3638
            break;
3639
        default:
3640
            goto illegal_op;
3641
        }
3642
        break;
3643

    
3644
    case 0xfe: /* GRP4 */
3645
    case 0xff: /* GRP5 */
3646
        if ((b & 1) == 0)
3647
            ot = OT_BYTE;
3648
        else
3649
            ot = dflag + OT_WORD;
3650

    
3651
        modrm = ldub_code(s->pc++);
3652
        mod = (modrm >> 6) & 3;
3653
        rm = (modrm & 7) | REX_B(s);
3654
        op = (modrm >> 3) & 7;
3655
        if (op >= 2 && b == 0xfe) {
3656
            goto illegal_op;
3657
        }
3658
        if (CODE64(s)) {
3659
            if (op == 2 || op == 4) {
3660
                /* operand size for jumps is 64 bit */
3661
                ot = OT_QUAD;
3662
            } else if (op == 3 || op == 5) {
3663
                /* for call calls, the operand is 16 or 32 bit, even
3664
                   in long mode */
3665
                ot = dflag ? OT_LONG : OT_WORD;
3666
            } else if (op == 6) {
3667
                /* default push size is 64 bit */
3668
                ot = dflag ? OT_QUAD : OT_WORD;
3669
            }
3670
        }
3671
        if (mod != 3) {
3672
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3673
            if (op >= 2 && op != 3 && op != 5)
3674
                gen_op_ld_T0_A0[ot + s->mem_index]();
3675
        } else {
3676
            gen_op_mov_TN_reg[ot][0][rm]();
3677
        }
3678

    
3679
        switch(op) {
3680
        case 0: /* inc Ev */
3681
            if (mod != 3)
3682
                opreg = OR_TMP0;
3683
            else
3684
                opreg = rm;
3685
            gen_inc(s, ot, opreg, 1);
3686
            break;
3687
        case 1: /* dec Ev */
3688
            if (mod != 3)
3689
                opreg = OR_TMP0;
3690
            else
3691
                opreg = rm;
3692
            gen_inc(s, ot, opreg, -1);
3693
            break;
3694
        case 2: /* call Ev */
3695
            /* XXX: optimize if memory (no 'and' is necessary) */
3696
            if (s->dflag == 0)
3697
                gen_op_andl_T0_ffff();
3698
            next_eip = s->pc - s->cs_base;
3699
            gen_movtl_T1_im(next_eip);
3700
            gen_push_T1(s);
3701
            gen_op_jmp_T0();
3702
            gen_eob(s);
3703
            break;
3704
        case 3: /* lcall Ev */
3705
            gen_op_ld_T1_A0[ot + s->mem_index]();
3706
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3707
            gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3708
        do_lcall:
3709
            if (s->pe && !s->vm86) {
3710
                if (s->cc_op != CC_OP_DYNAMIC)
3711
                    gen_op_set_cc_op(s->cc_op);
3712
                gen_jmp_im(pc_start - s->cs_base);
3713
                gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3714
            } else {
3715
                gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3716
            }
3717
            gen_eob(s);
3718
            break;
3719
        case 4: /* jmp Ev */
3720
            if (s->dflag == 0)
3721
                gen_op_andl_T0_ffff();
3722
            gen_op_jmp_T0();
3723
            gen_eob(s);
3724
            break;
3725
        case 5: /* ljmp Ev */
3726
            gen_op_ld_T1_A0[ot + s->mem_index]();
3727
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3728
            gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3729
        do_ljmp:
3730
            if (s->pe && !s->vm86) {
3731
                if (s->cc_op != CC_OP_DYNAMIC)
3732
                    gen_op_set_cc_op(s->cc_op);
3733
                gen_jmp_im(pc_start - s->cs_base);
3734
                gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3735
            } else {
3736
                gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3737
                gen_op_movl_T0_T1();
3738
                gen_op_jmp_T0();
3739
            }
3740
            gen_eob(s);
3741
            break;
3742
        case 6: /* push Ev */
3743
            gen_push_T0(s);
3744
            break;
3745
        default:
3746
            goto illegal_op;
3747
        }
3748
        break;
3749

    
3750
    case 0x84: /* test Ev, Gv */
3751
    case 0x85:
3752
        if ((b & 1) == 0)
3753
            ot = OT_BYTE;
3754
        else
3755
            ot = dflag + OT_WORD;
3756

    
3757
        modrm = ldub_code(s->pc++);
3758
        mod = (modrm >> 6) & 3;
3759
        rm = (modrm & 7) | REX_B(s);
3760
        reg = ((modrm >> 3) & 7) | rex_r;
3761

    
3762
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3763
        gen_op_mov_TN_reg[ot][1][reg]();
3764
        gen_op_testl_T0_T1_cc();
3765
        s->cc_op = CC_OP_LOGICB + ot;
3766
        break;
3767

    
3768
    case 0xa8: /* test eAX, Iv */
3769
    case 0xa9:
3770
        if ((b & 1) == 0)
3771
            ot = OT_BYTE;
3772
        else
3773
            ot = dflag + OT_WORD;
3774
        val = insn_get(s, ot);
3775

    
3776
        gen_op_mov_TN_reg[ot][0][OR_EAX]();
3777
        gen_op_movl_T1_im(val);
3778
        gen_op_testl_T0_T1_cc();
3779
        s->cc_op = CC_OP_LOGICB + ot;
3780
        break;
3781

    
3782
    case 0x98: /* CWDE/CBW */
3783
#ifdef TARGET_X86_64
3784
        if (dflag == 2) {
3785
            gen_op_movslq_RAX_EAX();
3786
        } else
3787
#endif
3788
        if (dflag == 1)
3789
            gen_op_movswl_EAX_AX();
3790
        else
3791
            gen_op_movsbw_AX_AL();
3792
        break;
3793
    case 0x99: /* CDQ/CWD */
3794
#ifdef TARGET_X86_64
3795
        if (dflag == 2) {
3796
            gen_op_movsqo_RDX_RAX();
3797
        } else
3798
#endif
3799
        if (dflag == 1)
3800
            gen_op_movslq_EDX_EAX();
3801
        else
3802
            gen_op_movswl_DX_AX();
3803
        break;
3804
    case 0x1af: /* imul Gv, Ev */
3805
    case 0x69: /* imul Gv, Ev, I */
3806
    case 0x6b:
3807
        ot = dflag + OT_WORD;
3808
        modrm = ldub_code(s->pc++);
3809
        reg = ((modrm >> 3) & 7) | rex_r;
3810
        if (b == 0x69)
3811
            s->rip_offset = insn_const_size(ot);
3812
        else if (b == 0x6b)
3813
            s->rip_offset = 1;
3814
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3815
        if (b == 0x69) {
3816
            val = insn_get(s, ot);
3817
            gen_op_movl_T1_im(val);
3818
        } else if (b == 0x6b) {
3819
            val = (int8_t)insn_get(s, OT_BYTE);
3820
            gen_op_movl_T1_im(val);
3821
        } else {
3822
            gen_op_mov_TN_reg[ot][1][reg]();
3823
        }
3824

    
3825
#ifdef TARGET_X86_64
3826
        if (ot == OT_QUAD) {
3827
            gen_op_imulq_T0_T1();
3828
        } else
3829
#endif
3830
        if (ot == OT_LONG) {
3831
            gen_op_imull_T0_T1();
3832
        } else {
3833
            gen_op_imulw_T0_T1();
3834
        }
3835
        gen_op_mov_reg_T0[ot][reg]();
3836
        s->cc_op = CC_OP_MULB + ot;
3837
        break;
3838
    case 0x1c0:
3839
    case 0x1c1: /* xadd Ev, Gv */
3840
        if ((b & 1) == 0)
3841
            ot = OT_BYTE;
3842
        else
3843
            ot = dflag + OT_WORD;
3844
        modrm = ldub_code(s->pc++);
3845
        reg = ((modrm >> 3) & 7) | rex_r;
3846
        mod = (modrm >> 6) & 3;
3847
        if (mod == 3) {
3848
            rm = (modrm & 7) | REX_B(s);
3849
            gen_op_mov_TN_reg[ot][0][reg]();
3850
            gen_op_mov_TN_reg[ot][1][rm]();
3851
            gen_op_addl_T0_T1();
3852
            gen_op_mov_reg_T1[ot][reg]();
3853
            gen_op_mov_reg_T0[ot][rm]();
3854
        } else {
3855
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3856
            gen_op_mov_TN_reg[ot][0][reg]();
3857
            gen_op_ld_T1_A0[ot + s->mem_index]();
3858
            gen_op_addl_T0_T1();
3859
            gen_op_st_T0_A0[ot + s->mem_index]();
3860
            gen_op_mov_reg_T1[ot][reg]();
3861
        }
3862
        gen_op_update2_cc();
3863
        s->cc_op = CC_OP_ADDB + ot;
3864
        break;
3865
    case 0x1b0:
3866
    case 0x1b1: /* cmpxchg Ev, Gv */
3867
        if ((b & 1) == 0)
3868
            ot = OT_BYTE;
3869
        else
3870
            ot = dflag + OT_WORD;
3871
        modrm = ldub_code(s->pc++);
3872
        reg = ((modrm >> 3) & 7) | rex_r;
3873
        mod = (modrm >> 6) & 3;
3874
        gen_op_mov_TN_reg[ot][1][reg]();
3875
        if (mod == 3) {
3876
            rm = (modrm & 7) | REX_B(s);
3877
            gen_op_mov_TN_reg[ot][0][rm]();
3878
            gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3879
            gen_op_mov_reg_T0[ot][rm]();
3880
        } else {
3881
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3882
            gen_op_ld_T0_A0[ot + s->mem_index]();
3883
            gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3884
        }
3885
        s->cc_op = CC_OP_SUBB + ot;
3886
        break;
3887
    case 0x1c7: /* cmpxchg8b */
3888
        modrm = ldub_code(s->pc++);
3889
        mod = (modrm >> 6) & 3;
3890
        if (mod == 3)
3891
            goto illegal_op;
3892
        gen_jmp_im(pc_start - s->cs_base);
3893
        if (s->cc_op != CC_OP_DYNAMIC)
3894
            gen_op_set_cc_op(s->cc_op);
3895
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3896
        gen_op_cmpxchg8b();
3897
        s->cc_op = CC_OP_EFLAGS;
3898
        break;
3899

    
3900
        /**************************/
3901
        /* push/pop */
3902
    case 0x50 ... 0x57: /* push */
3903
        gen_op_mov_TN_reg[OT_LONG][0][(b & 7) | REX_B(s)]();
3904
        gen_push_T0(s);
3905
        break;
3906
    case 0x58 ... 0x5f: /* pop */
3907
        if (CODE64(s)) {
3908
            ot = dflag ? OT_QUAD : OT_WORD;
3909
        } else {
3910
            ot = dflag + OT_WORD;
3911
        }
3912
        gen_pop_T0(s);
3913
        /* NOTE: order is important for pop %sp */
3914
        gen_pop_update(s);
3915
        gen_op_mov_reg_T0[ot][(b & 7) | REX_B(s)]();
3916
        break;
3917
    case 0x60: /* pusha */
3918
        if (CODE64(s))
3919
            goto illegal_op;
3920
        gen_pusha(s);
3921
        break;
3922
    case 0x61: /* popa */
3923
        if (CODE64(s))
3924
            goto illegal_op;
3925
        gen_popa(s);
3926
        break;
3927
    case 0x68: /* push Iv */
3928
    case 0x6a:
3929
        if (CODE64(s)) {
3930
            ot = dflag ? OT_QUAD : OT_WORD;
3931
        } else {
3932
            ot = dflag + OT_WORD;
3933
        }
3934
        if (b == 0x68)
3935
            val = insn_get(s, ot);
3936
        else
3937
            val = (int8_t)insn_get(s, OT_BYTE);
3938
        gen_op_movl_T0_im(val);
3939
        gen_push_T0(s);
3940
        break;
3941
    case 0x8f: /* pop Ev */
3942
        if (CODE64(s)) {
3943
            ot = dflag ? OT_QUAD : OT_WORD;
3944
        } else {
3945
            ot = dflag + OT_WORD;
3946
        }
3947
        modrm = ldub_code(s->pc++);
3948
        mod = (modrm >> 6) & 3;
3949
        gen_pop_T0(s);
3950
        if (mod == 3) {
3951
            /* NOTE: order is important for pop %sp */
3952
            gen_pop_update(s);
3953
            rm = (modrm & 7) | REX_B(s);
3954
            gen_op_mov_reg_T0[ot][rm]();
3955
        } else {
3956
            /* NOTE: order is important too for MMU exceptions */
3957
            s->popl_esp_hack = 1 << ot;
3958
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3959
            s->popl_esp_hack = 0;
3960
            gen_pop_update(s);
3961
        }
3962
        break;
3963
    case 0xc8: /* enter */
3964
        {
3965
            int level;
3966
            val = lduw_code(s->pc);
3967
            s->pc += 2;
3968
            level = ldub_code(s->pc++);
3969
            gen_enter(s, val, level);
3970
        }
3971
        break;
3972
    case 0xc9: /* leave */
3973
        /* XXX: exception not precise (ESP is updated before potential exception) */
3974
        if (CODE64(s)) {
3975
            gen_op_mov_TN_reg[OT_QUAD][0][R_EBP]();
3976
            gen_op_mov_reg_T0[OT_QUAD][R_ESP]();
3977
        } else if (s->ss32) {
3978
            gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
3979
            gen_op_mov_reg_T0[OT_LONG][R_ESP]();
3980
        } else {
3981
            gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
3982
            gen_op_mov_reg_T0[OT_WORD][R_ESP]();
3983
        }
3984
        gen_pop_T0(s);
3985
        if (CODE64(s)) {
3986
            ot = dflag ? OT_QUAD : OT_WORD;
3987
        } else {
3988
            ot = dflag + OT_WORD;
3989
        }
3990
        gen_op_mov_reg_T0[ot][R_EBP]();
3991
        gen_pop_update(s);
3992
        break;
3993
    case 0x06: /* push es */
3994
    case 0x0e: /* push cs */
3995
    case 0x16: /* push ss */
3996
    case 0x1e: /* push ds */
3997
        if (CODE64(s))
3998
            goto illegal_op;
3999
        gen_op_movl_T0_seg(b >> 3);
4000
        gen_push_T0(s);
4001
        break;
4002
    case 0x1a0: /* push fs */
4003
    case 0x1a8: /* push gs */
4004
        gen_op_movl_T0_seg((b >> 3) & 7);
4005
        gen_push_T0(s);
4006
        break;
4007
    case 0x07: /* pop es */
4008
    case 0x17: /* pop ss */
4009
    case 0x1f: /* pop ds */
4010
        if (CODE64(s))
4011
            goto illegal_op;
4012
        reg = b >> 3;
4013
        gen_pop_T0(s);
4014
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4015
        gen_pop_update(s);
4016
        if (reg == R_SS) {
4017
            /* if reg == SS, inhibit interrupts/trace. */
4018
            /* If several instructions disable interrupts, only the
4019
               _first_ does it */
4020
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4021
                gen_op_set_inhibit_irq();
4022
            s->tf = 0;
4023
        }
4024
        if (s->is_jmp) {
4025
            gen_jmp_im(s->pc - s->cs_base);
4026
            gen_eob(s);
4027
        }
4028
        break;
4029
    case 0x1a1: /* pop fs */
4030
    case 0x1a9: /* pop gs */
4031
        gen_pop_T0(s);
4032
        gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4033
        gen_pop_update(s);
4034
        if (s->is_jmp) {
4035
            gen_jmp_im(s->pc - s->cs_base);
4036
            gen_eob(s);
4037
        }
4038
        break;
4039

    
4040
        /**************************/
4041
        /* mov */
4042
    case 0x88:
4043
    case 0x89: /* mov Gv, Ev */
4044
        if ((b & 1) == 0)
4045
            ot = OT_BYTE;
4046
        else
4047
            ot = dflag + OT_WORD;
4048
        modrm = ldub_code(s->pc++);
4049
        reg = ((modrm >> 3) & 7) | rex_r;
4050

    
4051
        /* generate a generic store */
4052
        gen_ldst_modrm(s, modrm, ot, reg, 1);
4053
        break;
4054
    case 0xc6:
4055
    case 0xc7: /* mov Ev, Iv */
4056
        if ((b & 1) == 0)
4057
            ot = OT_BYTE;
4058
        else
4059
            ot = dflag + OT_WORD;
4060
        modrm = ldub_code(s->pc++);
4061
        mod = (modrm >> 6) & 3;
4062
        if (mod != 3) {
4063
            s->rip_offset = insn_const_size(ot);
4064
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4065
        }
4066
        val = insn_get(s, ot);
4067
        gen_op_movl_T0_im(val);
4068
        if (mod != 3)
4069
            gen_op_st_T0_A0[ot + s->mem_index]();
4070
        else
4071
            gen_op_mov_reg_T0[ot][(modrm & 7) | REX_B(s)]();
4072
        break;
4073
    case 0x8a:
4074
    case 0x8b: /* mov Ev, Gv */
4075
        if ((b & 1) == 0)
4076
            ot = OT_BYTE;
4077
        else
4078
            ot = OT_WORD + dflag;
4079
        modrm = ldub_code(s->pc++);
4080
        reg = ((modrm >> 3) & 7) | rex_r;
4081

    
4082
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4083
        gen_op_mov_reg_T0[ot][reg]();
4084
        break;
4085
    case 0x8e: /* mov seg, Gv */
4086
        modrm = ldub_code(s->pc++);
4087
        reg = (modrm >> 3) & 7;
4088
        if (reg >= 6 || reg == R_CS)
4089
            goto illegal_op;
4090
        gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4091
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4092
        if (reg == R_SS) {
4093
            /* if reg == SS, inhibit interrupts/trace */
4094
            /* If several instructions disable interrupts, only the
4095
               _first_ does it */
4096
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4097
                gen_op_set_inhibit_irq();
4098
            s->tf = 0;
4099
        }
4100
        if (s->is_jmp) {
4101
            gen_jmp_im(s->pc - s->cs_base);
4102
            gen_eob(s);
4103
        }
4104
        break;
4105
    case 0x8c: /* mov Gv, seg */
4106
        modrm = ldub_code(s->pc++);
4107
        reg = (modrm >> 3) & 7;
4108
        mod = (modrm >> 6) & 3;
4109
        if (reg >= 6)
4110
            goto illegal_op;
4111
        gen_op_movl_T0_seg(reg);
4112
        if (mod == 3)
4113
            ot = OT_WORD + dflag;
4114
        else
4115
            ot = OT_WORD;
4116
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4117
        break;
4118

    
4119
    case 0x1b6: /* movzbS Gv, Eb */
4120
    case 0x1b7: /* movzwS Gv, Eb */
4121
    case 0x1be: /* movsbS Gv, Eb */
4122
    case 0x1bf: /* movswS Gv, Eb */
4123
        {
4124
            int d_ot;
4125
            /* d_ot is the size of destination */
4126
            d_ot = dflag + OT_WORD;
4127
            /* ot is the size of source */
4128
            ot = (b & 1) + OT_BYTE;
4129
            modrm = ldub_code(s->pc++);
4130
            reg = ((modrm >> 3) & 7) | rex_r;
4131
            mod = (modrm >> 6) & 3;
4132
            rm = (modrm & 7) | REX_B(s);
4133

    
4134
            if (mod == 3) {
4135
                gen_op_mov_TN_reg[ot][0][rm]();
4136
                switch(ot | (b & 8)) {
4137
                case OT_BYTE:
4138
                    gen_op_movzbl_T0_T0();
4139
                    break;
4140
                case OT_BYTE | 8:
4141
                    gen_op_movsbl_T0_T0();
4142
                    break;
4143
                case OT_WORD:
4144
                    gen_op_movzwl_T0_T0();
4145
                    break;
4146
                default:
4147
                case OT_WORD | 8:
4148
                    gen_op_movswl_T0_T0();
4149
                    break;
4150
                }
4151
                gen_op_mov_reg_T0[d_ot][reg]();
4152
            } else {
4153
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4154
                if (b & 8) {
4155
                    gen_op_lds_T0_A0[ot + s->mem_index]();
4156
                } else {
4157
                    gen_op_ldu_T0_A0[ot + s->mem_index]();
4158
                }
4159
                gen_op_mov_reg_T0[d_ot][reg]();
4160
            }
4161
        }
4162
        break;
4163

    
4164
    case 0x8d: /* lea */
4165
        ot = dflag + OT_WORD;
4166
        modrm = ldub_code(s->pc++);
4167
        mod = (modrm >> 6) & 3;
4168
        if (mod == 3)
4169
            goto illegal_op;
4170
        reg = ((modrm >> 3) & 7) | rex_r;
4171
        /* we must ensure that no segment is added */
4172
        s->override = -1;
4173
        val = s->addseg;
4174
        s->addseg = 0;
4175
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4176
        s->addseg = val;
4177
        gen_op_mov_reg_A0[ot - OT_WORD][reg]();
4178
        break;
4179

    
4180
    case 0xa0: /* mov EAX, Ov */
4181
    case 0xa1:
4182
    case 0xa2: /* mov Ov, EAX */
4183
    case 0xa3:
4184
        {
4185
            target_ulong offset_addr;
4186

    
4187
            if ((b & 1) == 0)
4188
                ot = OT_BYTE;
4189
            else
4190
                ot = dflag + OT_WORD;
4191
#ifdef TARGET_X86_64
4192
            if (s->aflag == 2) {
4193
                offset_addr = ldq_code(s->pc);
4194
                s->pc += 8;
4195
                if (offset_addr == (int32_t)offset_addr)
4196
                    gen_op_movq_A0_im(offset_addr);
4197
                else
4198
                    gen_op_movq_A0_im64(offset_addr >> 32, offset_addr);
4199
            } else
4200
#endif
4201
            {
4202
                if (s->aflag) {
4203
                    offset_addr = insn_get(s, OT_LONG);
4204
                } else {
4205
                    offset_addr = insn_get(s, OT_WORD);
4206
                }
4207
                gen_op_movl_A0_im(offset_addr);
4208
            }
4209
            gen_add_A0_ds_seg(s);
4210
            if ((b & 2) == 0) {
4211
                gen_op_ld_T0_A0[ot + s->mem_index]();
4212
                gen_op_mov_reg_T0[ot][R_EAX]();
4213
            } else {
4214
                gen_op_mov_TN_reg[ot][0][R_EAX]();
4215
                gen_op_st_T0_A0[ot + s->mem_index]();
4216
            }
4217
        }
4218
        break;
4219
    case 0xd7: /* xlat */
4220
#ifdef TARGET_X86_64
4221
        if (s->aflag == 2) {
4222
            gen_op_movq_A0_reg[R_EBX]();
4223
            gen_op_addq_A0_AL();
4224
        } else
4225
#endif
4226
        {
4227
            gen_op_movl_A0_reg[R_EBX]();
4228
            gen_op_addl_A0_AL();
4229
            if (s->aflag == 0)
4230
                gen_op_andl_A0_ffff();
4231
        }
4232
        gen_add_A0_ds_seg(s);
4233
        gen_op_ldu_T0_A0[OT_BYTE + s->mem_index]();
4234
        gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
4235
        break;
4236
    case 0xb0 ... 0xb7: /* mov R, Ib */
4237
        val = insn_get(s, OT_BYTE);
4238
        gen_op_movl_T0_im(val);
4239
        gen_op_mov_reg_T0[OT_BYTE][(b & 7) | REX_B(s)]();
4240
        break;
4241
    case 0xb8 ... 0xbf: /* mov R, Iv */
4242
#ifdef TARGET_X86_64
4243
        if (dflag == 2) {
4244
            uint64_t tmp;
4245
            /* 64 bit case */
4246
            tmp = ldq_code(s->pc);
4247
            s->pc += 8;
4248
            reg = (b & 7) | REX_B(s);
4249
            gen_movtl_T0_im(tmp);
4250
            gen_op_mov_reg_T0[OT_QUAD][reg]();
4251
        } else
4252
#endif
4253
        {
4254
            ot = dflag ? OT_LONG : OT_WORD;
4255
            val = insn_get(s, ot);
4256
            reg = (b & 7) | REX_B(s);
4257
            gen_op_movl_T0_im(val);
4258
            gen_op_mov_reg_T0[ot][reg]();
4259
        }
4260
        break;
4261

    
4262
    case 0x91 ... 0x97: /* xchg R, EAX */
4263
        ot = dflag + OT_WORD;
4264
        reg = (b & 7) | REX_B(s);
4265
        rm = R_EAX;
4266
        goto do_xchg_reg;
4267
    case 0x86:
4268
    case 0x87: /* xchg Ev, Gv */
4269
        if ((b & 1) == 0)
4270
            ot = OT_BYTE;
4271
        else
4272
            ot = dflag + OT_WORD;
4273
        modrm = ldub_code(s->pc++);
4274
        reg = ((modrm >> 3) & 7) | rex_r;
4275
        mod = (modrm >> 6) & 3;
4276
        if (mod == 3) {
4277
            rm = (modrm & 7) | REX_B(s);
4278
        do_xchg_reg:
4279
            gen_op_mov_TN_reg[ot][0][reg]();
4280
            gen_op_mov_TN_reg[ot][1][rm]();
4281
            gen_op_mov_reg_T0[ot][rm]();
4282
            gen_op_mov_reg_T1[ot][reg]();
4283
        } else {
4284
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4285
            gen_op_mov_TN_reg[ot][0][reg]();
4286
            /* for xchg, lock is implicit */
4287
            if (!(prefixes & PREFIX_LOCK))
4288
                gen_op_lock();
4289
            gen_op_ld_T1_A0[ot + s->mem_index]();
4290
            gen_op_st_T0_A0[ot + s->mem_index]();
4291
            if (!(prefixes & PREFIX_LOCK))
4292
                gen_op_unlock();
4293
            gen_op_mov_reg_T1[ot][reg]();
4294
        }
4295
        break;
4296
    case 0xc4: /* les Gv */
4297
        if (CODE64(s))
4298
            goto illegal_op;
4299
        op = R_ES;
4300
        goto do_lxx;
4301
    case 0xc5: /* lds Gv */
4302
        if (CODE64(s))
4303
            goto illegal_op;
4304
        op = R_DS;
4305
        goto do_lxx;
4306
    case 0x1b2: /* lss Gv */
4307
        op = R_SS;
4308
        goto do_lxx;
4309
    case 0x1b4: /* lfs Gv */
4310
        op = R_FS;
4311
        goto do_lxx;
4312
    case 0x1b5: /* lgs Gv */
4313
        op = R_GS;
4314
    do_lxx:
4315
        ot = dflag ? OT_LONG : OT_WORD;
4316
        modrm = ldub_code(s->pc++);
4317
        reg = ((modrm >> 3) & 7) | rex_r;
4318
        mod = (modrm >> 6) & 3;
4319
        if (mod == 3)
4320
            goto illegal_op;
4321
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4322
        gen_op_ld_T1_A0[ot + s->mem_index]();
4323
        gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4324
        /* load the segment first to handle exceptions properly */
4325
        gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
4326
        gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4327
        /* then put the data */
4328
        gen_op_mov_reg_T1[ot][reg]();
4329
        if (s->is_jmp) {
4330
            gen_jmp_im(s->pc - s->cs_base);
4331
            gen_eob(s);
4332
        }
4333
        break;
4334

    
4335
        /************************/
4336
        /* shifts */
4337
    case 0xc0:
4338
    case 0xc1:
4339
        /* shift Ev,Ib */
4340
        shift = 2;
4341
    grp2:
4342
        {
4343
            if ((b & 1) == 0)
4344
                ot = OT_BYTE;
4345
            else
4346
                ot = dflag + OT_WORD;
4347

    
4348
            modrm = ldub_code(s->pc++);
4349
            mod = (modrm >> 6) & 3;
4350
            op = (modrm >> 3) & 7;
4351

    
4352
            if (mod != 3) {
4353
                if (shift == 2) {
4354
                    s->rip_offset = 1;
4355
                }
4356
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4357
                opreg = OR_TMP0;
4358
            } else {
4359
                opreg = (modrm & 7) | REX_B(s);
4360
            }
4361

    
4362
            /* simpler op */
4363
            if (shift == 0) {
4364
                gen_shift(s, op, ot, opreg, OR_ECX);
4365
            } else {
4366
                if (shift == 2) {
4367
                    shift = ldub_code(s->pc++);
4368
                }
4369
                gen_shifti(s, op, ot, opreg, shift);
4370
            }
4371
        }
4372
        break;
4373
    case 0xd0:
4374
    case 0xd1:
4375
        /* shift Ev,1 */
4376
        shift = 1;
4377
        goto grp2;
4378
    case 0xd2:
4379
    case 0xd3:
4380
        /* shift Ev,cl */
4381
        shift = 0;
4382
        goto grp2;
4383

    
4384
    case 0x1a4: /* shld imm */
4385
        op = 0;
4386
        shift = 1;
4387
        goto do_shiftd;
4388
    case 0x1a5: /* shld cl */
4389
        op = 0;
4390
        shift = 0;
4391
        goto do_shiftd;
4392
    case 0x1ac: /* shrd imm */
4393
        op = 1;
4394
        shift = 1;
4395
        goto do_shiftd;
4396
    case 0x1ad: /* shrd cl */
4397
        op = 1;
4398
        shift = 0;
4399
    do_shiftd:
4400
        ot = dflag + OT_WORD;
4401
        modrm = ldub_code(s->pc++);
4402
        mod = (modrm >> 6) & 3;
4403
        rm = (modrm & 7) | REX_B(s);
4404
        reg = ((modrm >> 3) & 7) | rex_r;
4405

    
4406
        if (mod != 3) {
4407
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4408
            gen_op_ld_T0_A0[ot + s->mem_index]();
4409
        } else {
4410
            gen_op_mov_TN_reg[ot][0][rm]();
4411
        }
4412
        gen_op_mov_TN_reg[ot][1][reg]();
4413

    
4414
        if (shift) {
4415
            val = ldub_code(s->pc++);
4416
            if (ot == OT_QUAD)
4417
                val &= 0x3f;
4418
            else
4419
                val &= 0x1f;
4420
            if (val) {
4421
                if (mod == 3)
4422
                    gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4423
                else
4424
                    gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4425
                if (op == 0 && ot != OT_WORD)
4426
                    s->cc_op = CC_OP_SHLB + ot;
4427
                else
4428
                    s->cc_op = CC_OP_SARB + ot;
4429
            }
4430
        } else {
4431
            if (s->cc_op != CC_OP_DYNAMIC)
4432
                gen_op_set_cc_op(s->cc_op);
4433
            if (mod == 3)
4434
                gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4435
            else
4436
                gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4437
            s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4438
        }
4439
        if (mod == 3) {
4440
            gen_op_mov_reg_T0[ot][rm]();
4441
        }
4442
        break;
4443

    
4444
        /************************/
4445
        /* floats */
4446
    case 0xd8 ... 0xdf:
4447
        if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4448
            /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4449
            /* XXX: what to do if illegal op ? */
4450
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4451
            break;
4452
        }
4453
        modrm = ldub_code(s->pc++);
4454
        mod = (modrm >> 6) & 3;
4455
        rm = modrm & 7;
4456
        op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4457
        if (mod != 3) {
4458
            /* memory op */
4459
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4460
            switch(op) {
4461
            case 0x00 ... 0x07: /* fxxxs */
4462
            case 0x10 ... 0x17: /* fixxxl */
4463
            case 0x20 ... 0x27: /* fxxxl */
4464
            case 0x30 ... 0x37: /* fixxx */
4465
                {
4466
                    int op1;
4467
                    op1 = op & 7;
4468

    
4469
                    switch(op >> 4) {
4470
                    case 0:
4471
                        gen_op_flds_FT0_A0();
4472
                        break;
4473
                    case 1:
4474
                        gen_op_fildl_FT0_A0();
4475
                        break;
4476
                    case 2:
4477
                        gen_op_fldl_FT0_A0();
4478
                        break;
4479
                    case 3:
4480
                    default:
4481
                        gen_op_fild_FT0_A0();
4482
                        break;
4483
                    }
4484

    
4485
                    gen_op_fp_arith_ST0_FT0[op1]();
4486
                    if (op1 == 3) {
4487
                        /* fcomp needs pop */
4488
                        gen_op_fpop();
4489
                    }
4490
                }
4491
                break;
4492
            case 0x08: /* flds */
4493
            case 0x0a: /* fsts */
4494
            case 0x0b: /* fstps */
4495
            case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4496
            case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4497
            case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4498
                switch(op & 7) {
4499
                case 0:
4500
                    switch(op >> 4) {
4501
                    case 0:
4502
                        gen_op_flds_ST0_A0();
4503
                        break;
4504
                    case 1:
4505
                        gen_op_fildl_ST0_A0();
4506
                        break;
4507
                    case 2:
4508
                        gen_op_fldl_ST0_A0();
4509
                        break;
4510
                    case 3:
4511
                    default:
4512
                        gen_op_fild_ST0_A0();
4513
                        break;
4514
                    }
4515
                    break;
4516
                case 1:
4517
                    switch(op >> 4) {
4518
                    case 1:
4519
                        gen_op_fisttl_ST0_A0();
4520
                        break;
4521
                    case 2:
4522
                        gen_op_fisttll_ST0_A0();
4523
                        break;
4524
                    case 3:
4525
                    default:
4526
                        gen_op_fistt_ST0_A0();
4527
                    }
4528
                    gen_op_fpop();
4529
                    break;
4530
                default:
4531
                    switch(op >> 4) {
4532
                    case 0:
4533
                        gen_op_fsts_ST0_A0();
4534
                        break;
4535
                    case 1:
4536
                        gen_op_fistl_ST0_A0();
4537
                        break;
4538
                    case 2:
4539
                        gen_op_fstl_ST0_A0();
4540
                        break;
4541
                    case 3:
4542
                    default:
4543
                        gen_op_fist_ST0_A0();
4544
                        break;
4545
                    }
4546
                    if ((op & 7) == 3)
4547
                        gen_op_fpop();
4548
                    break;
4549
                }
4550
                break;
4551
            case 0x0c: /* fldenv mem */
4552
                gen_op_fldenv_A0(s->dflag);
4553
                break;
4554
            case 0x0d: /* fldcw mem */
4555
                gen_op_fldcw_A0();
4556
                break;
4557
            case 0x0e: /* fnstenv mem */
4558
                gen_op_fnstenv_A0(s->dflag);
4559
                break;
4560
            case 0x0f: /* fnstcw mem */
4561
                gen_op_fnstcw_A0();
4562
                break;
4563
            case 0x1d: /* fldt mem */
4564
                gen_op_fldt_ST0_A0();
4565
                break;
4566
            case 0x1f: /* fstpt mem */
4567
                gen_op_fstt_ST0_A0();
4568
                gen_op_fpop();
4569
                break;
4570
            case 0x2c: /* frstor mem */
4571
                gen_op_frstor_A0(s->dflag);
4572
                break;
4573
            case 0x2e: /* fnsave mem */
4574
                gen_op_fnsave_A0(s->dflag);
4575
                break;
4576
            case 0x2f: /* fnstsw mem */
4577
                gen_op_fnstsw_A0();
4578
                break;
4579
            case 0x3c: /* fbld */
4580
                gen_op_fbld_ST0_A0();
4581
                break;
4582
            case 0x3e: /* fbstp */
4583
                gen_op_fbst_ST0_A0();
4584
                gen_op_fpop();
4585
                break;
4586
            case 0x3d: /* fildll */
4587
                gen_op_fildll_ST0_A0();
4588
                break;
4589
            case 0x3f: /* fistpll */
4590
                gen_op_fistll_ST0_A0();
4591
                gen_op_fpop();
4592
                break;
4593
            default:
4594
                goto illegal_op;
4595
            }
4596
        } else {
4597
            /* register float ops */
4598
            opreg = rm;
4599

    
4600
            switch(op) {
4601
            case 0x08: /* fld sti */
4602
                gen_op_fpush();
4603
                gen_op_fmov_ST0_STN((opreg + 1) & 7);
4604
                break;
4605
            case 0x09: /* fxchg sti */
4606
            case 0x29: /* fxchg4 sti, undocumented op */
4607
            case 0x39: /* fxchg7 sti, undocumented op */
4608
                gen_op_fxchg_ST0_STN(opreg);
4609
                break;
4610
            case 0x0a: /* grp d9/2 */
4611
                switch(rm) {
4612
                case 0: /* fnop */
4613
                    /* check exceptions (FreeBSD FPU probe) */
4614
                    if (s->cc_op != CC_OP_DYNAMIC)
4615
                        gen_op_set_cc_op(s->cc_op);
4616
                    gen_jmp_im(pc_start - s->cs_base);
4617
                    gen_op_fwait();
4618
                    break;
4619
                default:
4620
                    goto illegal_op;
4621
                }
4622
                break;
4623
            case 0x0c: /* grp d9/4 */
4624
                switch(rm) {
4625
                case 0: /* fchs */
4626
                    gen_op_fchs_ST0();
4627
                    break;
4628
                case 1: /* fabs */
4629
                    gen_op_fabs_ST0();
4630
                    break;
4631
                case 4: /* ftst */
4632
                    gen_op_fldz_FT0();
4633
                    gen_op_fcom_ST0_FT0();
4634
                    break;
4635
                case 5: /* fxam */
4636
                    gen_op_fxam_ST0();
4637
                    break;
4638
                default:
4639
                    goto illegal_op;
4640
                }
4641
                break;
4642
            case 0x0d: /* grp d9/5 */
4643
                {
4644
                    switch(rm) {
4645
                    case 0:
4646
                        gen_op_fpush();
4647
                        gen_op_fld1_ST0();
4648
                        break;
4649
                    case 1:
4650
                        gen_op_fpush();
4651
                        gen_op_fldl2t_ST0();
4652
                        break;
4653
                    case 2:
4654
                        gen_op_fpush();
4655
                        gen_op_fldl2e_ST0();
4656
                        break;
4657
                    case 3:
4658
                        gen_op_fpush();
4659
                        gen_op_fldpi_ST0();
4660
                        break;
4661
                    case 4:
4662
                        gen_op_fpush();
4663
                        gen_op_fldlg2_ST0();
4664
                        break;
4665
                    case 5:
4666
                        gen_op_fpush();
4667
                        gen_op_fldln2_ST0();
4668
                        break;
4669
                    case 6:
4670
                        gen_op_fpush();
4671
                        gen_op_fldz_ST0();
4672
                        break;
4673
                    default:
4674
                        goto illegal_op;
4675
                    }
4676
                }
4677
                break;
4678
            case 0x0e: /* grp d9/6 */
4679
                switch(rm) {
4680
                case 0: /* f2xm1 */
4681
                    gen_op_f2xm1();
4682
                    break;
4683
                case 1: /* fyl2x */
4684
                    gen_op_fyl2x();
4685
                    break;
4686
                case 2: /* fptan */
4687
                    gen_op_fptan();
4688
                    break;
4689
                case 3: /* fpatan */
4690
                    gen_op_fpatan();
4691
                    break;
4692
                case 4: /* fxtract */
4693
                    gen_op_fxtract();
4694
                    break;
4695
                case 5: /* fprem1 */
4696
                    gen_op_fprem1();
4697
                    break;
4698
                case 6: /* fdecstp */
4699
                    gen_op_fdecstp();
4700
                    break;
4701
                default:
4702
                case 7: /* fincstp */
4703
                    gen_op_fincstp();
4704
                    break;
4705
                }
4706
                break;
4707
            case 0x0f: /* grp d9/7 */
4708
                switch(rm) {
4709
                case 0: /* fprem */
4710
                    gen_op_fprem();
4711
                    break;
4712
                case 1: /* fyl2xp1 */
4713
                    gen_op_fyl2xp1();
4714
                    break;
4715
                case 2: /* fsqrt */
4716
                    gen_op_fsqrt();
4717
                    break;
4718
                case 3: /* fsincos */
4719
                    gen_op_fsincos();
4720
                    break;
4721
                case 5: /* fscale */
4722
                    gen_op_fscale();
4723
                    break;
4724
                case 4: /* frndint */
4725
                    gen_op_frndint();
4726
                    break;
4727
                case 6: /* fsin */
4728
                    gen_op_fsin();
4729
                    break;
4730
                default:
4731
                case 7: /* fcos */
4732
                    gen_op_fcos();
4733
                    break;
4734
                }
4735
                break;
4736
            case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4737
            case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4738
            case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4739
                {
4740
                    int op1;
4741

    
4742
                    op1 = op & 7;
4743
                    if (op >= 0x20) {
4744
                        gen_op_fp_arith_STN_ST0[op1](opreg);
4745
                        if (op >= 0x30)
4746
                            gen_op_fpop();
4747
                    } else {
4748
                        gen_op_fmov_FT0_STN(opreg);
4749
                        gen_op_fp_arith_ST0_FT0[op1]();
4750
                    }
4751
                }
4752
                break;
4753
            case 0x02: /* fcom */
4754
            case 0x22: /* fcom2, undocumented op */
4755
                gen_op_fmov_FT0_STN(opreg);
4756
                gen_op_fcom_ST0_FT0();
4757
                break;
4758
            case 0x03: /* fcomp */
4759
            case 0x23: /* fcomp3, undocumented op */
4760
            case 0x32: /* fcomp5, undocumented op */
4761
                gen_op_fmov_FT0_STN(opreg);
4762
                gen_op_fcom_ST0_FT0();
4763
                gen_op_fpop();
4764
                break;
4765
            case 0x15: /* da/5 */
4766
                switch(rm) {
4767
                case 1: /* fucompp */
4768
                    gen_op_fmov_FT0_STN(1);
4769
                    gen_op_fucom_ST0_FT0();
4770
                    gen_op_fpop();
4771
                    gen_op_fpop();
4772
                    break;
4773
                default:
4774
                    goto illegal_op;
4775
                }
4776
                break;
4777
            case 0x1c:
4778
                switch(rm) {
4779
                case 0: /* feni (287 only, just do nop here) */
4780
                    break;
4781
                case 1: /* fdisi (287 only, just do nop here) */
4782
                    break;
4783
                case 2: /* fclex */
4784
                    gen_op_fclex();
4785
                    break;
4786
                case 3: /* fninit */
4787
                    gen_op_fninit();
4788
                    break;
4789
                case 4: /* fsetpm (287 only, just do nop here) */
4790
                    break;
4791
                default:
4792
                    goto illegal_op;
4793
                }
4794
                break;
4795
            case 0x1d: /* fucomi */
4796
                if (s->cc_op != CC_OP_DYNAMIC)
4797
                    gen_op_set_cc_op(s->cc_op);
4798
                gen_op_fmov_FT0_STN(opreg);
4799
                gen_op_fucomi_ST0_FT0();
4800
                s->cc_op = CC_OP_EFLAGS;
4801
                break;
4802
            case 0x1e: /* fcomi */
4803
                if (s->cc_op != CC_OP_DYNAMIC)
4804
                    gen_op_set_cc_op(s->cc_op);
4805
                gen_op_fmov_FT0_STN(opreg);
4806
                gen_op_fcomi_ST0_FT0();
4807
                s->cc_op = CC_OP_EFLAGS;
4808
                break;
4809
            case 0x28: /* ffree sti */
4810
                gen_op_ffree_STN(opreg);
4811
                break;
4812
            case 0x2a: /* fst sti */
4813
                gen_op_fmov_STN_ST0(opreg);
4814
                break;
4815
            case 0x2b: /* fstp sti */
4816
            case 0x0b: /* fstp1 sti, undocumented op */
4817
            case 0x3a: /* fstp8 sti, undocumented op */
4818
            case 0x3b: /* fstp9 sti, undocumented op */
4819
                gen_op_fmov_STN_ST0(opreg);
4820
                gen_op_fpop();
4821
                break;
4822
            case 0x2c: /* fucom st(i) */
4823
                gen_op_fmov_FT0_STN(opreg);
4824
                gen_op_fucom_ST0_FT0();
4825
                break;
4826
            case 0x2d: /* fucomp st(i) */
4827
                gen_op_fmov_FT0_STN(opreg);
4828
                gen_op_fucom_ST0_FT0();
4829
                gen_op_fpop();
4830
                break;
4831
            case 0x33: /* de/3 */
4832
                switch(rm) {
4833
                case 1: /* fcompp */
4834
                    gen_op_fmov_FT0_STN(1);
4835
                    gen_op_fcom_ST0_FT0();
4836
                    gen_op_fpop();
4837
                    gen_op_fpop();
4838
                    break;
4839
                default:
4840
                    goto illegal_op;
4841
                }
4842
                break;
4843
            case 0x38: /* ffreep sti, undocumented op */
4844
                gen_op_ffree_STN(opreg);
4845
                gen_op_fpop();
4846
                break;
4847
            case 0x3c: /* df/4 */
4848
                switch(rm) {
4849
                case 0:
4850
                    gen_op_fnstsw_EAX();
4851
                    break;
4852
                default:
4853
                    goto illegal_op;
4854
                }
4855
                break;
4856
            case 0x3d: /* fucomip */
4857
                if (s->cc_op != CC_OP_DYNAMIC)
4858
                    gen_op_set_cc_op(s->cc_op);
4859
                gen_op_fmov_FT0_STN(opreg);
4860
                gen_op_fucomi_ST0_FT0();
4861
                gen_op_fpop();
4862
                s->cc_op = CC_OP_EFLAGS;
4863
                break;
4864
            case 0x3e: /* fcomip */
4865
                if (s->cc_op != CC_OP_DYNAMIC)
4866
                    gen_op_set_cc_op(s->cc_op);
4867
                gen_op_fmov_FT0_STN(opreg);
4868
                gen_op_fcomi_ST0_FT0();
4869
                gen_op_fpop();
4870
                s->cc_op = CC_OP_EFLAGS;
4871
                break;
4872
            case 0x10 ... 0x13: /* fcmovxx */
4873
            case 0x18 ... 0x1b:
4874
                {
4875
                    int op1;
4876
                    const static uint8_t fcmov_cc[8] = {
4877
                        (JCC_B << 1),
4878
                        (JCC_Z << 1),
4879
                        (JCC_BE << 1),
4880
                        (JCC_P << 1),
4881
                    };
4882
                    op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
4883
                    gen_setcc(s, op1);
4884
                    gen_op_fcmov_ST0_STN_T0(opreg);
4885
                }
4886
                break;
4887
            default:
4888
                goto illegal_op;
4889
            }
4890
        }
4891
#ifdef USE_CODE_COPY
4892
        s->tb->cflags |= CF_TB_FP_USED;
4893
#endif
4894
        break;
4895
        /************************/
4896
        /* string ops */
4897

    
4898
    case 0xa4: /* movsS */
4899
    case 0xa5:
4900
        if ((b & 1) == 0)
4901
            ot = OT_BYTE;
4902
        else
4903
            ot = dflag + OT_WORD;
4904

    
4905
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4906
            gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4907
        } else {
4908
            gen_movs(s, ot);
4909
        }
4910
        break;
4911

    
4912
    case 0xaa: /* stosS */
4913
    case 0xab:
4914
        if ((b & 1) == 0)
4915
            ot = OT_BYTE;
4916
        else
4917
            ot = dflag + OT_WORD;
4918

    
4919
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4920
            gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4921
        } else {
4922
            gen_stos(s, ot);
4923
        }
4924
        break;
4925
    case 0xac: /* lodsS */
4926
    case 0xad:
4927
        if ((b & 1) == 0)
4928
            ot = OT_BYTE;
4929
        else
4930
            ot = dflag + OT_WORD;
4931
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4932
            gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4933
        } else {
4934
            gen_lods(s, ot);
4935
        }
4936
        break;
4937
    case 0xae: /* scasS */
4938
    case 0xaf:
4939
        if ((b & 1) == 0)
4940
            ot = OT_BYTE;
4941
        else
4942
            ot = dflag + OT_WORD;
4943
        if (prefixes & PREFIX_REPNZ) {
4944
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4945
        } else if (prefixes & PREFIX_REPZ) {
4946
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4947
        } else {
4948
            gen_scas(s, ot);
4949
            s->cc_op = CC_OP_SUBB + ot;
4950
        }
4951
        break;
4952

    
4953
    case 0xa6: /* cmpsS */
4954
    case 0xa7:
4955
        if ((b & 1) == 0)
4956
            ot = OT_BYTE;
4957
        else
4958
            ot = dflag + OT_WORD;
4959
        if (prefixes & PREFIX_REPNZ) {
4960
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4961
        } else if (prefixes & PREFIX_REPZ) {
4962
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4963
        } else {
4964
            gen_cmps(s, ot);
4965
            s->cc_op = CC_OP_SUBB + ot;
4966
        }
4967
        break;
4968
    case 0x6c: /* insS */
4969
    case 0x6d:
4970
        if ((b & 1) == 0)
4971
            ot = OT_BYTE;
4972
        else
4973
            ot = dflag ? OT_LONG : OT_WORD;
4974
        gen_check_io(s, ot, 1, pc_start - s->cs_base);
4975
        gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4976
        gen_op_andl_T0_ffff();
4977
        if (gen_svm_check_io(s, pc_start,
4978
                             SVM_IOIO_TYPE_MASK | (1 << (4+ot)) |
4979
                             svm_is_rep(prefixes) | 4 | (1 << (7+s->aflag))))
4980
            break;
4981
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4982
            gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4983
        } else {
4984
            gen_ins(s, ot);
4985
        }
4986
        break;
4987
    case 0x6e: /* outsS */
4988
    case 0x6f:
4989
        if ((b & 1) == 0)
4990
            ot = OT_BYTE;
4991
        else
4992
            ot = dflag ? OT_LONG : OT_WORD;
4993
        gen_check_io(s, ot, 1, pc_start - s->cs_base);
4994
        gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4995
        gen_op_andl_T0_ffff();
4996
        if (gen_svm_check_io(s, pc_start,
4997
                             (1 << (4+ot)) | svm_is_rep(prefixes) |
4998
                             4 | (1 << (7+s->aflag))))
4999
            break;
5000
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5001
            gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5002
        } else {
5003
            gen_outs(s, ot);
5004
        }
5005
        break;
5006

    
5007
        /************************/
5008
        /* port I/O */
5009

    
5010
    case 0xe4:
5011
    case 0xe5:
5012
        if ((b & 1) == 0)
5013
            ot = OT_BYTE;
5014
        else
5015
            ot = dflag ? OT_LONG : OT_WORD;
5016
        val = ldub_code(s->pc++);
5017
        gen_op_movl_T0_im(val);
5018
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5019
        if (gen_svm_check_io(s, pc_start,
5020
                             SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5021
                             (1 << (4+ot))))
5022
            break;
5023
        gen_op_in[ot]();
5024
        gen_op_mov_reg_T1[ot][R_EAX]();
5025
        break;
5026
    case 0xe6:
5027
    case 0xe7:
5028
        if ((b & 1) == 0)
5029
            ot = OT_BYTE;
5030
        else
5031
            ot = dflag ? OT_LONG : OT_WORD;
5032
        val = ldub_code(s->pc++);
5033
        gen_op_movl_T0_im(val);
5034
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5035
        if (gen_svm_check_io(s, pc_start, svm_is_rep(prefixes) |
5036
                             (1 << (4+ot))))
5037
            break;
5038
        gen_op_mov_TN_reg[ot][1][R_EAX]();
5039
        gen_op_out[ot]();
5040
        break;
5041
    case 0xec:
5042
    case 0xed:
5043
        if ((b & 1) == 0)
5044
            ot = OT_BYTE;
5045
        else
5046
            ot = dflag ? OT_LONG : OT_WORD;
5047
        gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
5048
        gen_op_andl_T0_ffff();
5049
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5050
        if (gen_svm_check_io(s, pc_start,
5051
                             SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5052
                             (1 << (4+ot))))
5053
            break;
5054
        gen_op_in[ot]();
5055
        gen_op_mov_reg_T1[ot][R_EAX]();
5056
        break;
5057
    case 0xee:
5058
    case 0xef:
5059
        if ((b & 1) == 0)
5060
            ot = OT_BYTE;
5061
        else
5062
            ot = dflag ? OT_LONG : OT_WORD;
5063
        gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
5064
        gen_op_andl_T0_ffff();
5065
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5066
        if (gen_svm_check_io(s, pc_start,
5067
                             svm_is_rep(prefixes) | (1 << (4+ot))))
5068
            break;
5069
        gen_op_mov_TN_reg[ot][1][R_EAX]();
5070
        gen_op_out[ot]();
5071
        break;
5072

    
5073
        /************************/
5074
        /* control */
5075
    case 0xc2: /* ret im */
5076
        val = ldsw_code(s->pc);
5077
        s->pc += 2;
5078
        gen_pop_T0(s);
5079
        if (CODE64(s) && s->dflag)
5080
            s->dflag = 2;
5081
        gen_stack_update(s, val + (2 << s->dflag));
5082
        if (s->dflag == 0)
5083
            gen_op_andl_T0_ffff();
5084
        gen_op_jmp_T0();
5085
        gen_eob(s);
5086
        break;
5087
    case 0xc3: /* ret */
5088
        gen_pop_T0(s);
5089
        gen_pop_update(s);
5090
        if (s->dflag == 0)
5091
            gen_op_andl_T0_ffff();
5092
        gen_op_jmp_T0();
5093
        gen_eob(s);
5094
        break;
5095
    case 0xca: /* lret im */
5096
        val = ldsw_code(s->pc);
5097
        s->pc += 2;
5098
    do_lret:
5099
        if (s->pe && !s->vm86) {
5100
            if (s->cc_op != CC_OP_DYNAMIC)
5101
                gen_op_set_cc_op(s->cc_op);
5102
            gen_jmp_im(pc_start - s->cs_base);
5103
            gen_op_lret_protected(s->dflag, val);
5104
        } else {
5105
            gen_stack_A0(s);
5106
            /* pop offset */
5107
            gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
5108
            if (s->dflag == 0)
5109
                gen_op_andl_T0_ffff();
5110
            /* NOTE: keeping EIP updated is not a problem in case of
5111
               exception */
5112
            gen_op_jmp_T0();
5113
            /* pop selector */
5114
            gen_op_addl_A0_im(2 << s->dflag);
5115
            gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
5116
            gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5117
            /* add stack offset */
5118
            gen_stack_update(s, val + (4 << s->dflag));
5119
        }
5120
        gen_eob(s);
5121
        break;
5122
    case 0xcb: /* lret */
5123
        val = 0;
5124
        goto do_lret;
5125
    case 0xcf: /* iret */
5126
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5127
            break;
5128
        if (!s->pe) {
5129
            /* real mode */
5130
            gen_op_iret_real(s->dflag);
5131
            s->cc_op = CC_OP_EFLAGS;
5132
        } else if (s->vm86) {
5133
            if (s->iopl != 3) {
5134
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5135
            } else {
5136
                gen_op_iret_real(s->dflag);
5137
                s->cc_op = CC_OP_EFLAGS;
5138
            }
5139
        } else {
5140
            if (s->cc_op != CC_OP_DYNAMIC)
5141
                gen_op_set_cc_op(s->cc_op);
5142
            gen_jmp_im(pc_start - s->cs_base);
5143
            gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
5144
            s->cc_op = CC_OP_EFLAGS;
5145
        }
5146
        gen_eob(s);
5147
        break;
5148
    case 0xe8: /* call im */
5149
        {
5150
            if (dflag)
5151
                tval = (int32_t)insn_get(s, OT_LONG);
5152
            else
5153
                tval = (int16_t)insn_get(s, OT_WORD);
5154
            next_eip = s->pc - s->cs_base;
5155
            tval += next_eip;
5156
            if (s->dflag == 0)
5157
                tval &= 0xffff;
5158
            gen_movtl_T0_im(next_eip);
5159
            gen_push_T0(s);
5160
            gen_jmp(s, tval);
5161
        }
5162
        break;
5163
    case 0x9a: /* lcall im */
5164
        {
5165
            unsigned int selector, offset;
5166

    
5167
            if (CODE64(s))
5168
                goto illegal_op;
5169
            ot = dflag ? OT_LONG : OT_WORD;
5170
            offset = insn_get(s, ot);
5171
            selector = insn_get(s, OT_WORD);
5172

    
5173
            gen_op_movl_T0_im(selector);
5174
            gen_op_movl_T1_imu(offset);
5175
        }
5176
        goto do_lcall;
5177
    case 0xe9: /* jmp im */
5178
        if (dflag)
5179
            tval = (int32_t)insn_get(s, OT_LONG);
5180
        else
5181
            tval = (int16_t)insn_get(s, OT_WORD);
5182
        tval += s->pc - s->cs_base;
5183
        if (s->dflag == 0)
5184
            tval &= 0xffff;
5185
        gen_jmp(s, tval);
5186
        break;
5187
    case 0xea: /* ljmp im */
5188
        {
5189
            unsigned int selector, offset;
5190

    
5191
            if (CODE64(s))
5192
                goto illegal_op;
5193
            ot = dflag ? OT_LONG : OT_WORD;
5194
            offset = insn_get(s, ot);
5195
            selector = insn_get(s, OT_WORD);
5196

    
5197
            gen_op_movl_T0_im(selector);
5198
            gen_op_movl_T1_imu(offset);
5199
        }
5200
        goto do_ljmp;
5201
    case 0xeb: /* jmp Jb */
5202
        tval = (int8_t)insn_get(s, OT_BYTE);
5203
        tval += s->pc - s->cs_base;
5204
        if (s->dflag == 0)
5205
            tval &= 0xffff;
5206
        gen_jmp(s, tval);
5207
        break;
5208
    case 0x70 ... 0x7f: /* jcc Jb */
5209
        tval = (int8_t)insn_get(s, OT_BYTE);
5210
        goto do_jcc;
5211
    case 0x180 ... 0x18f: /* jcc Jv */
5212
        if (dflag) {
5213
            tval = (int32_t)insn_get(s, OT_LONG);
5214
        } else {
5215
            tval = (int16_t)insn_get(s, OT_WORD);
5216
        }
5217
    do_jcc:
5218
        next_eip = s->pc - s->cs_base;
5219
        tval += next_eip;
5220
        if (s->dflag == 0)
5221
            tval &= 0xffff;
5222
        gen_jcc(s, b, tval, next_eip);
5223
        break;
5224

    
5225
    case 0x190 ... 0x19f: /* setcc Gv */
5226
        modrm = ldub_code(s->pc++);
5227
        gen_setcc(s, b);
5228
        gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5229
        break;
5230
    case 0x140 ... 0x14f: /* cmov Gv, Ev */
5231
        ot = dflag + OT_WORD;
5232
        modrm = ldub_code(s->pc++);
5233
        reg = ((modrm >> 3) & 7) | rex_r;
5234
        mod = (modrm >> 6) & 3;
5235
        gen_setcc(s, b);
5236
        if (mod != 3) {
5237
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5238
            gen_op_ld_T1_A0[ot + s->mem_index]();
5239
        } else {
5240
            rm = (modrm & 7) | REX_B(s);
5241
            gen_op_mov_TN_reg[ot][1][rm]();
5242
        }
5243
        gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5244
        break;
5245

    
5246
        /************************/
5247
        /* flags */
5248
    case 0x9c: /* pushf */
5249
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5250
            break;
5251
        if (s->vm86 && s->iopl != 3) {
5252
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5253
        } else {
5254
            if (s->cc_op != CC_OP_DYNAMIC)
5255
                gen_op_set_cc_op(s->cc_op);
5256
            gen_op_movl_T0_eflags();
5257
            gen_push_T0(s);
5258
        }
5259
        break;
5260
    case 0x9d: /* popf */
5261
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5262
            break;
5263
        if (s->vm86 && s->iopl != 3) {
5264
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5265
        } else {
5266
            gen_pop_T0(s);
5267
            if (s->cpl == 0) {
5268
                if (s->dflag) {
5269
                    gen_op_movl_eflags_T0_cpl0();
5270
                } else {
5271
                    gen_op_movw_eflags_T0_cpl0();
5272
                }
5273
            } else {
5274
                if (s->cpl <= s->iopl) {
5275
                    if (s->dflag) {
5276
                        gen_op_movl_eflags_T0_io();
5277
                    } else {
5278
                        gen_op_movw_eflags_T0_io();
5279
                    }
5280
                } else {
5281
                    if (s->dflag) {
5282
                        gen_op_movl_eflags_T0();
5283
                    } else {
5284
                        gen_op_movw_eflags_T0();
5285
                    }
5286
                }
5287
            }
5288
            gen_pop_update(s);
5289
            s->cc_op = CC_OP_EFLAGS;
5290
            /* abort translation because TF flag may change */
5291
            gen_jmp_im(s->pc - s->cs_base);
5292
            gen_eob(s);
5293
        }
5294
        break;
5295
    case 0x9e: /* sahf */
5296
        if (CODE64(s))
5297
            goto illegal_op;
5298
        gen_op_mov_TN_reg[OT_BYTE][0][R_AH]();
5299
        if (s->cc_op != CC_OP_DYNAMIC)
5300
            gen_op_set_cc_op(s->cc_op);
5301
        gen_op_movb_eflags_T0();
5302
        s->cc_op = CC_OP_EFLAGS;
5303
        break;
5304
    case 0x9f: /* lahf */
5305
        if (CODE64(s))
5306
            goto illegal_op;
5307
        if (s->cc_op != CC_OP_DYNAMIC)
5308
            gen_op_set_cc_op(s->cc_op);
5309
        gen_op_movl_T0_eflags();
5310
        gen_op_mov_reg_T0[OT_BYTE][R_AH]();
5311
        break;
5312
    case 0xf5: /* cmc */
5313
        if (s->cc_op != CC_OP_DYNAMIC)
5314
            gen_op_set_cc_op(s->cc_op);
5315
        gen_op_cmc();
5316
        s->cc_op = CC_OP_EFLAGS;
5317
        break;
5318
    case 0xf8: /* clc */
5319
        if (s->cc_op != CC_OP_DYNAMIC)
5320
            gen_op_set_cc_op(s->cc_op);
5321
        gen_op_clc();
5322
        s->cc_op = CC_OP_EFLAGS;
5323
        break;
5324
    case 0xf9: /* stc */
5325
        if (s->cc_op != CC_OP_DYNAMIC)
5326
            gen_op_set_cc_op(s->cc_op);
5327
        gen_op_stc();
5328
        s->cc_op = CC_OP_EFLAGS;
5329
        break;
5330
    case 0xfc: /* cld */
5331
        gen_op_cld();
5332
        break;
5333
    case 0xfd: /* std */
5334
        gen_op_std();
5335
        break;
5336

    
5337
        /************************/
5338
        /* bit operations */
5339
    case 0x1ba: /* bt/bts/btr/btc Gv, im */
5340
        ot = dflag + OT_WORD;
5341
        modrm = ldub_code(s->pc++);
5342
        op = (modrm >> 3) & 7;
5343
        mod = (modrm >> 6) & 3;
5344
        rm = (modrm & 7) | REX_B(s);
5345
        if (mod != 3) {
5346
            s->rip_offset = 1;
5347
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5348
            gen_op_ld_T0_A0[ot + s->mem_index]();
5349
        } else {
5350
            gen_op_mov_TN_reg[ot][0][rm]();
5351
        }
5352
        /* load shift */
5353
        val = ldub_code(s->pc++);
5354
        gen_op_movl_T1_im(val);
5355
        if (op < 4)
5356
            goto illegal_op;
5357
        op -= 4;
5358
        gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5359
        s->cc_op = CC_OP_SARB + ot;
5360
        if (op != 0) {
5361
            if (mod != 3)
5362
                gen_op_st_T0_A0[ot + s->mem_index]();
5363
            else
5364
                gen_op_mov_reg_T0[ot][rm]();
5365
            gen_op_update_bt_cc();
5366
        }
5367
        break;
5368
    case 0x1a3: /* bt Gv, Ev */
5369
        op = 0;
5370
        goto do_btx;
5371
    case 0x1ab: /* bts */
5372
        op = 1;
5373
        goto do_btx;
5374
    case 0x1b3: /* btr */
5375
        op = 2;
5376
        goto do_btx;
5377
    case 0x1bb: /* btc */
5378
        op = 3;
5379
    do_btx:
5380
        ot = dflag + OT_WORD;
5381
        modrm = ldub_code(s->pc++);
5382
        reg = ((modrm >> 3) & 7) | rex_r;
5383
        mod = (modrm >> 6) & 3;
5384
        rm = (modrm & 7) | REX_B(s);
5385
        gen_op_mov_TN_reg[OT_LONG][1][reg]();
5386
        if (mod != 3) {
5387
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5388
            /* specific case: we need to add a displacement */
5389
            gen_op_add_bit_A0_T1[ot - OT_WORD]();
5390
            gen_op_ld_T0_A0[ot + s->mem_index]();
5391
        } else {
5392
            gen_op_mov_TN_reg[ot][0][rm]();
5393
        }
5394
        gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5395
        s->cc_op = CC_OP_SARB + ot;
5396
        if (op != 0) {
5397
            if (mod != 3)
5398
                gen_op_st_T0_A0[ot + s->mem_index]();
5399
            else
5400
                gen_op_mov_reg_T0[ot][rm]();
5401
            gen_op_update_bt_cc();
5402
        }
5403
        break;
5404
    case 0x1bc: /* bsf */
5405
    case 0x1bd: /* bsr */
5406
        ot = dflag + OT_WORD;
5407
        modrm = ldub_code(s->pc++);
5408
        reg = ((modrm >> 3) & 7) | rex_r;
5409
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5410
        /* NOTE: in order to handle the 0 case, we must load the
5411
           result. It could be optimized with a generated jump */
5412
        gen_op_mov_TN_reg[ot][1][reg]();
5413
        gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5414
        gen_op_mov_reg_T1[ot][reg]();
5415
        s->cc_op = CC_OP_LOGICB + ot;
5416
        break;
5417
        /************************/
5418
        /* bcd */
5419
    case 0x27: /* daa */
5420
        if (CODE64(s))
5421
            goto illegal_op;
5422
        if (s->cc_op != CC_OP_DYNAMIC)
5423
            gen_op_set_cc_op(s->cc_op);
5424
        gen_op_daa();
5425
        s->cc_op = CC_OP_EFLAGS;
5426
        break;
5427
    case 0x2f: /* das */
5428
        if (CODE64(s))
5429
            goto illegal_op;
5430
        if (s->cc_op != CC_OP_DYNAMIC)
5431
            gen_op_set_cc_op(s->cc_op);
5432
        gen_op_das();
5433
        s->cc_op = CC_OP_EFLAGS;
5434
        break;
5435
    case 0x37: /* aaa */
5436
        if (CODE64(s))
5437
            goto illegal_op;
5438
        if (s->cc_op != CC_OP_DYNAMIC)
5439
            gen_op_set_cc_op(s->cc_op);
5440
        gen_op_aaa();
5441
        s->cc_op = CC_OP_EFLAGS;
5442
        break;
5443
    case 0x3f: /* aas */
5444
        if (CODE64(s))
5445
            goto illegal_op;
5446
        if (s->cc_op != CC_OP_DYNAMIC)
5447
            gen_op_set_cc_op(s->cc_op);
5448
        gen_op_aas();
5449
        s->cc_op = CC_OP_EFLAGS;
5450
        break;
5451
    case 0xd4: /* aam */
5452
        if (CODE64(s))
5453
            goto illegal_op;
5454
        val = ldub_code(s->pc++);
5455
        if (val == 0) {
5456
            gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5457
        } else {
5458
            gen_op_aam(val);
5459
            s->cc_op = CC_OP_LOGICB;
5460
        }
5461
        break;
5462
    case 0xd5: /* aad */
5463
        if (CODE64(s))
5464
            goto illegal_op;
5465
        val = ldub_code(s->pc++);
5466
        gen_op_aad(val);
5467
        s->cc_op = CC_OP_LOGICB;
5468
        break;
5469
        /************************/
5470
        /* misc */
5471
    case 0x90: /* nop */
5472
        /* XXX: xchg + rex handling */
5473
        /* XXX: correct lock test for all insn */
5474
        if (prefixes & PREFIX_LOCK)
5475
            goto illegal_op;
5476
        if (prefixes & PREFIX_REPZ) {
5477
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
5478
        }
5479
        break;
5480
    case 0x9b: /* fwait */
5481
        if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5482
            (HF_MP_MASK | HF_TS_MASK)) {
5483
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5484
        } else {
5485
            if (s->cc_op != CC_OP_DYNAMIC)
5486
                gen_op_set_cc_op(s->cc_op);
5487
            gen_jmp_im(pc_start - s->cs_base);
5488
            gen_op_fwait();
5489
        }
5490
        break;
5491
    case 0xcc: /* int3 */
5492
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5493
            break;
5494
        gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5495
        break;
5496
    case 0xcd: /* int N */
5497
        val = ldub_code(s->pc++);
5498
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5499
            break;
5500
        if (s->vm86 && s->iopl != 3) {
5501
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5502
        } else {
5503
            gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5504
        }
5505
        break;
5506
    case 0xce: /* into */
5507
        if (CODE64(s))
5508
            goto illegal_op;
5509
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5510
            break;
5511
        if (s->cc_op != CC_OP_DYNAMIC)
5512
            gen_op_set_cc_op(s->cc_op);
5513
        gen_jmp_im(pc_start - s->cs_base);
5514
        gen_op_into(s->pc - pc_start);
5515
        break;
5516
    case 0xf1: /* icebp (undocumented, exits to external debugger) */
5517
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
5518
            break;
5519
#if 1
5520
        gen_debug(s, pc_start - s->cs_base);
5521
#else
5522
        /* start debug */
5523
        tb_flush(cpu_single_env);
5524
        cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5525
#endif
5526
        break;
5527
    case 0xfa: /* cli */
5528
        if (!s->vm86) {
5529
            if (s->cpl <= s->iopl) {
5530
                gen_op_cli();
5531
            } else {
5532
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5533
            }
5534
        } else {
5535
            if (s->iopl == 3) {
5536
                gen_op_cli();
5537
            } else {
5538
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5539
            }
5540
        }
5541
        break;
5542
    case 0xfb: /* sti */
5543
        if (!s->vm86) {
5544
            if (s->cpl <= s->iopl) {
5545
            gen_sti:
5546
                gen_op_sti();
5547
                /* interruptions are enabled only the first insn after sti */
5548
                /* If several instructions disable interrupts, only the
5549
                   _first_ does it */
5550
                if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5551
                    gen_op_set_inhibit_irq();
5552
                /* give a chance to handle pending irqs */
5553
                gen_jmp_im(s->pc - s->cs_base);
5554
                gen_eob(s);
5555
            } else {
5556
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5557
            }
5558
        } else {
5559
            if (s->iopl == 3) {
5560
                goto gen_sti;
5561
            } else {
5562
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5563
            }
5564
        }
5565
        break;
5566
    case 0x62: /* bound */
5567
        if (CODE64(s))
5568
            goto illegal_op;
5569
        ot = dflag ? OT_LONG : OT_WORD;
5570
        modrm = ldub_code(s->pc++);
5571
        reg = (modrm >> 3) & 7;
5572
        mod = (modrm >> 6) & 3;
5573
        if (mod == 3)
5574
            goto illegal_op;
5575
        gen_op_mov_TN_reg[ot][0][reg]();
5576
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5577
        gen_jmp_im(pc_start - s->cs_base);
5578
        if (ot == OT_WORD)
5579
            gen_op_boundw();
5580
        else
5581
            gen_op_boundl();
5582
        break;
5583
    case 0x1c8 ... 0x1cf: /* bswap reg */
5584
        reg = (b & 7) | REX_B(s);
5585
#ifdef TARGET_X86_64
5586
        if (dflag == 2) {
5587
            gen_op_mov_TN_reg[OT_QUAD][0][reg]();
5588
            gen_op_bswapq_T0();
5589
            gen_op_mov_reg_T0[OT_QUAD][reg]();
5590
        } else
5591
#endif
5592
        {
5593
            gen_op_mov_TN_reg[OT_LONG][0][reg]();
5594
            gen_op_bswapl_T0();
5595
            gen_op_mov_reg_T0[OT_LONG][reg]();
5596
        }
5597
        break;
5598
    case 0xd6: /* salc */
5599
        if (CODE64(s))
5600
            goto illegal_op;
5601
        if (s->cc_op != CC_OP_DYNAMIC)
5602
            gen_op_set_cc_op(s->cc_op);
5603
        gen_op_salc();
5604
        break;
5605
    case 0xe0: /* loopnz */
5606
    case 0xe1: /* loopz */
5607
        if (s->cc_op != CC_OP_DYNAMIC)
5608
            gen_op_set_cc_op(s->cc_op);
5609
        /* FALL THRU */
5610
    case 0xe2: /* loop */
5611
    case 0xe3: /* jecxz */
5612
        {
5613
            int l1, l2;
5614

    
5615
            tval = (int8_t)insn_get(s, OT_BYTE);
5616
            next_eip = s->pc - s->cs_base;
5617
            tval += next_eip;
5618
            if (s->dflag == 0)
5619
                tval &= 0xffff;
5620

    
5621
            l1 = gen_new_label();
5622
            l2 = gen_new_label();
5623
            b &= 3;
5624
            if (b == 3) {
5625
                gen_op_jz_ecx[s->aflag](l1);
5626
            } else {
5627
                gen_op_dec_ECX[s->aflag]();
5628
                if (b <= 1)
5629
                    gen_op_mov_T0_cc();
5630
                gen_op_loop[s->aflag][b](l1);
5631
            }
5632

    
5633
            gen_jmp_im(next_eip);
5634
            gen_op_jmp_label(l2);
5635
            gen_set_label(l1);
5636
            gen_jmp_im(tval);
5637
            gen_set_label(l2);
5638
            gen_eob(s);
5639
        }
5640
        break;
5641
    case 0x130: /* wrmsr */
5642
    case 0x132: /* rdmsr */
5643
        if (s->cpl != 0) {
5644
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5645
        } else {
5646
            int retval = 0;
5647
            if (b & 2) {
5648
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
5649
                gen_op_rdmsr();
5650
            } else {
5651
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
5652
                gen_op_wrmsr();
5653
            }
5654
            if(retval)
5655
                gen_eob(s);
5656
        }
5657
        break;
5658
    case 0x131: /* rdtsc */
5659
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
5660
            break;
5661
        gen_jmp_im(pc_start - s->cs_base);
5662
        gen_op_rdtsc();
5663
        break;
5664
    case 0x134: /* sysenter */
5665
        if (CODE64(s))
5666
            goto illegal_op;
5667
        if (!s->pe) {
5668
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5669
        } else {
5670
            if (s->cc_op != CC_OP_DYNAMIC) {
5671
                gen_op_set_cc_op(s->cc_op);
5672
                s->cc_op = CC_OP_DYNAMIC;
5673
            }
5674
            gen_jmp_im(pc_start - s->cs_base);
5675
            gen_op_sysenter();
5676
            gen_eob(s);
5677
        }
5678
        break;
5679
    case 0x135: /* sysexit */
5680
        if (CODE64(s))
5681
            goto illegal_op;
5682
        if (!s->pe) {
5683
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5684
        } else {
5685
            if (s->cc_op != CC_OP_DYNAMIC) {
5686
                gen_op_set_cc_op(s->cc_op);
5687
                s->cc_op = CC_OP_DYNAMIC;
5688
            }
5689
            gen_jmp_im(pc_start - s->cs_base);
5690
            gen_op_sysexit();
5691
            gen_eob(s);
5692
        }
5693
        break;
5694
#ifdef TARGET_X86_64
5695
    case 0x105: /* syscall */
5696
        /* XXX: is it usable in real mode ? */
5697
        if (s->cc_op != CC_OP_DYNAMIC) {
5698
            gen_op_set_cc_op(s->cc_op);
5699
            s->cc_op = CC_OP_DYNAMIC;
5700
        }
5701
        gen_jmp_im(pc_start - s->cs_base);
5702
        gen_op_syscall(s->pc - pc_start);
5703
        gen_eob(s);
5704
        break;
5705
    case 0x107: /* sysret */
5706
        if (!s->pe) {
5707
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5708
        } else {
5709
            if (s->cc_op != CC_OP_DYNAMIC) {
5710
                gen_op_set_cc_op(s->cc_op);
5711
                s->cc_op = CC_OP_DYNAMIC;
5712
            }
5713
            gen_jmp_im(pc_start - s->cs_base);
5714
            gen_op_sysret(s->dflag);
5715
            /* condition codes are modified only in long mode */
5716
            if (s->lma)
5717
                s->cc_op = CC_OP_EFLAGS;
5718
            gen_eob(s);
5719
        }
5720
        break;
5721
#endif
5722
    case 0x1a2: /* cpuid */
5723
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
5724
            break;
5725
        gen_op_cpuid();
5726
        break;
5727
    case 0xf4: /* hlt */
5728
        if (s->cpl != 0) {
5729
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5730
        } else {
5731
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
5732
                break;
5733
            if (s->cc_op != CC_OP_DYNAMIC)
5734
                gen_op_set_cc_op(s->cc_op);
5735
            gen_jmp_im(s->pc - s->cs_base);
5736
            gen_op_hlt();
5737
            s->is_jmp = 3;
5738
        }
5739
        break;
5740
    case 0x100:
5741
        modrm = ldub_code(s->pc++);
5742
        mod = (modrm >> 6) & 3;
5743
        op = (modrm >> 3) & 7;
5744
        switch(op) {
5745
        case 0: /* sldt */
5746
            if (!s->pe || s->vm86)
5747
                goto illegal_op;
5748
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
5749
                break;
5750
            gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5751
            ot = OT_WORD;
5752
            if (mod == 3)
5753
                ot += s->dflag;
5754
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5755
            break;
5756
        case 2: /* lldt */
5757
            if (!s->pe || s->vm86)
5758
                goto illegal_op;
5759
            if (s->cpl != 0) {
5760
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5761
            } else {
5762
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
5763
                    break;
5764
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5765
                gen_jmp_im(pc_start - s->cs_base);
5766
                gen_op_lldt_T0();
5767
            }
5768
            break;
5769
        case 1: /* str */
5770
            if (!s->pe || s->vm86)
5771
                goto illegal_op;
5772
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
5773
                break;
5774
            gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5775
            ot = OT_WORD;
5776
            if (mod == 3)
5777
                ot += s->dflag;
5778
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5779
            break;
5780
        case 3: /* ltr */
5781
            if (!s->pe || s->vm86)
5782
                goto illegal_op;
5783
            if (s->cpl != 0) {
5784
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5785
            } else {
5786
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
5787
                    break;
5788
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5789
                gen_jmp_im(pc_start - s->cs_base);
5790
                gen_op_ltr_T0();
5791
            }
5792
            break;
5793
        case 4: /* verr */
5794
        case 5: /* verw */
5795
            if (!s->pe || s->vm86)
5796
                goto illegal_op;
5797
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5798
            if (s->cc_op != CC_OP_DYNAMIC)
5799
                gen_op_set_cc_op(s->cc_op);
5800
            if (op == 4)
5801
                gen_op_verr();
5802
            else
5803
                gen_op_verw();
5804
            s->cc_op = CC_OP_EFLAGS;
5805
            break;
5806
        default:
5807
            goto illegal_op;
5808
        }
5809
        break;
5810
    case 0x101:
5811
        modrm = ldub_code(s->pc++);
5812
        mod = (modrm >> 6) & 3;
5813
        op = (modrm >> 3) & 7;
5814
        rm = modrm & 7;
5815
        switch(op) {
5816
        case 0: /* sgdt */
5817
            if (mod == 3)
5818
                goto illegal_op;
5819
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
5820
                break;
5821
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5822
            gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5823
            gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5824
            gen_add_A0_im(s, 2);
5825
            gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5826
            if (!s->dflag)
5827
                gen_op_andl_T0_im(0xffffff);
5828
            gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5829
            break;
5830
        case 1:
5831
            if (mod == 3) {
5832
                switch (rm) {
5833
                case 0: /* monitor */
5834
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5835
                        s->cpl != 0)
5836
                        goto illegal_op;
5837
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
5838
                        break;
5839
                    gen_jmp_im(pc_start - s->cs_base);
5840
#ifdef TARGET_X86_64
5841
                    if (s->aflag == 2) {
5842
                        gen_op_movq_A0_reg[R_EBX]();
5843
                        gen_op_addq_A0_AL();
5844
                    } else
5845
#endif
5846
                    {
5847
                        gen_op_movl_A0_reg[R_EBX]();
5848
                        gen_op_addl_A0_AL();
5849
                        if (s->aflag == 0)
5850
                            gen_op_andl_A0_ffff();
5851
                    }
5852
                    gen_add_A0_ds_seg(s);
5853
                    gen_op_monitor();
5854
                    break;
5855
                case 1: /* mwait */
5856
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5857
                        s->cpl != 0)
5858
                        goto illegal_op;
5859
                    if (s->cc_op != CC_OP_DYNAMIC) {
5860
                        gen_op_set_cc_op(s->cc_op);
5861
                        s->cc_op = CC_OP_DYNAMIC;
5862
                    }
5863
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
5864
                        break;
5865
                    gen_jmp_im(s->pc - s->cs_base);
5866
                    gen_op_mwait();
5867
                    gen_eob(s);
5868
                    break;
5869
                default:
5870
                    goto illegal_op;
5871
                }
5872
            } else { /* sidt */
5873
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
5874
                    break;
5875
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5876
                gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
5877
                gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5878
                gen_add_A0_im(s, 2);
5879
                gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
5880
                if (!s->dflag)
5881
                    gen_op_andl_T0_im(0xffffff);
5882
                gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5883
            }
5884
            break;
5885
        case 2: /* lgdt */
5886
        case 3: /* lidt */
5887
            if (mod == 3) {
5888
                switch(rm) {
5889
                case 0: /* VMRUN */
5890
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
5891
                        break;
5892
                    if (s->cc_op != CC_OP_DYNAMIC)
5893
                        gen_op_set_cc_op(s->cc_op);
5894
                    gen_jmp_im(s->pc - s->cs_base);
5895
                    gen_op_vmrun();
5896
                    s->cc_op = CC_OP_EFLAGS;
5897
                    gen_eob(s);
5898
                    break;
5899
                case 1: /* VMMCALL */
5900
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
5901
                         break;
5902
                    /* FIXME: cause #UD if hflags & SVM */
5903
                    gen_op_vmmcall();
5904
                    break;
5905
                case 2: /* VMLOAD */
5906
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
5907
                         break;
5908
                    gen_op_vmload();
5909
                    break;
5910
                case 3: /* VMSAVE */
5911
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
5912
                         break;
5913
                    gen_op_vmsave();
5914
                    break;
5915
                case 4: /* STGI */
5916
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
5917
                         break;
5918
                    gen_op_stgi();
5919
                    break;
5920
                case 5: /* CLGI */
5921
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
5922
                         break;
5923
                    gen_op_clgi();
5924
                    break;
5925
                case 6: /* SKINIT */
5926
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
5927
                         break;
5928
                    gen_op_skinit();
5929
                    break;
5930
                case 7: /* INVLPGA */
5931
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
5932
                         break;
5933
                    gen_op_invlpga();
5934
                    break;
5935
                default:
5936
                    goto illegal_op;
5937
                }
5938
            } else if (s->cpl != 0) {
5939
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5940
            } else {
5941
                if (gen_svm_check_intercept(s, pc_start,
5942
                                            op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
5943
                    break;
5944
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5945
                gen_op_ld_T1_A0[OT_WORD + s->mem_index]();
5946
                gen_add_A0_im(s, 2);
5947
                gen_op_ld_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5948
                if (!s->dflag)
5949
                    gen_op_andl_T0_im(0xffffff);
5950
                if (op == 2) {
5951
                    gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
5952
                    gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
5953
                } else {
5954
                    gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
5955
                    gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
5956
                }
5957
            }
5958
            break;
5959
        case 4: /* smsw */
5960
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
5961
                break;
5962
            gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
5963
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
5964
            break;
5965
        case 6: /* lmsw */
5966
            if (s->cpl != 0) {
5967
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5968
            } else {
5969
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
5970
                    break;
5971
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5972
                gen_op_lmsw_T0();
5973
                gen_jmp_im(s->pc - s->cs_base);
5974
                gen_eob(s);
5975
            }
5976
            break;
5977
        case 7: /* invlpg */
5978
            if (s->cpl != 0) {
5979
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5980
            } else {
5981
                if (mod == 3) {
5982
#ifdef TARGET_X86_64
5983
                    if (CODE64(s) && rm == 0) {
5984
                        /* swapgs */
5985
                        gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
5986
                        gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
5987
                        gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
5988
                        gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
5989
                    } else
5990
#endif
5991
                    {
5992
                        goto illegal_op;
5993
                    }
5994
                } else {
5995
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
5996
                        break;
5997
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5998
                    gen_op_invlpg_A0();
5999
                    gen_jmp_im(s->pc - s->cs_base);
6000
                    gen_eob(s);
6001
                }
6002
            }
6003
            break;
6004
        default:
6005
            goto illegal_op;
6006
        }
6007
        break;
6008
    case 0x108: /* invd */
6009
    case 0x109: /* wbinvd */
6010
        if (s->cpl != 0) {
6011
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6012
        } else {
6013
            if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6014
                break;
6015
            /* nothing to do */
6016
        }
6017
        break;
6018
    case 0x63: /* arpl or movslS (x86_64) */
6019
#ifdef TARGET_X86_64
6020
        if (CODE64(s)) {
6021
            int d_ot;
6022
            /* d_ot is the size of destination */
6023
            d_ot = dflag + OT_WORD;
6024

    
6025
            modrm = ldub_code(s->pc++);
6026
            reg = ((modrm >> 3) & 7) | rex_r;
6027
            mod = (modrm >> 6) & 3;
6028
            rm = (modrm & 7) | REX_B(s);
6029

    
6030
            if (mod == 3) {
6031
                gen_op_mov_TN_reg[OT_LONG][0][rm]();
6032
                /* sign extend */
6033
                if (d_ot == OT_QUAD)
6034
                    gen_op_movslq_T0_T0();
6035
                gen_op_mov_reg_T0[d_ot][reg]();
6036
            } else {
6037
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6038
                if (d_ot == OT_QUAD) {
6039
                    gen_op_lds_T0_A0[OT_LONG + s->mem_index]();
6040
                } else {
6041
                    gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
6042
                }
6043
                gen_op_mov_reg_T0[d_ot][reg]();
6044
            }
6045
        } else
6046
#endif
6047
        {
6048
            if (!s->pe || s->vm86)
6049
                goto illegal_op;
6050
            ot = dflag ? OT_LONG : OT_WORD;
6051
            modrm = ldub_code(s->pc++);
6052
            reg = (modrm >> 3) & 7;
6053
            mod = (modrm >> 6) & 3;
6054
            rm = modrm & 7;
6055
            if (mod != 3) {
6056
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6057
                gen_op_ld_T0_A0[ot + s->mem_index]();
6058
            } else {
6059
                gen_op_mov_TN_reg[ot][0][rm]();
6060
            }
6061
            if (s->cc_op != CC_OP_DYNAMIC)
6062
                gen_op_set_cc_op(s->cc_op);
6063
            gen_op_arpl();
6064
            s->cc_op = CC_OP_EFLAGS;
6065
            if (mod != 3) {
6066
                gen_op_st_T0_A0[ot + s->mem_index]();
6067
            } else {
6068
                gen_op_mov_reg_T0[ot][rm]();
6069
            }
6070
            gen_op_arpl_update();
6071
        }
6072
        break;
6073
    case 0x102: /* lar */
6074
    case 0x103: /* lsl */
6075
        if (!s->pe || s->vm86)
6076
            goto illegal_op;
6077
        ot = dflag ? OT_LONG : OT_WORD;
6078
        modrm = ldub_code(s->pc++);
6079
        reg = ((modrm >> 3) & 7) | rex_r;
6080
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6081
        gen_op_mov_TN_reg[ot][1][reg]();
6082
        if (s->cc_op != CC_OP_DYNAMIC)
6083
            gen_op_set_cc_op(s->cc_op);
6084
        if (b == 0x102)
6085
            gen_op_lar();
6086
        else
6087
            gen_op_lsl();
6088
        s->cc_op = CC_OP_EFLAGS;
6089
        gen_op_mov_reg_T1[ot][reg]();
6090
        break;
6091
    case 0x118:
6092
        modrm = ldub_code(s->pc++);
6093
        mod = (modrm >> 6) & 3;
6094
        op = (modrm >> 3) & 7;
6095
        switch(op) {
6096
        case 0: /* prefetchnta */
6097
        case 1: /* prefetchnt0 */
6098
        case 2: /* prefetchnt0 */
6099
        case 3: /* prefetchnt0 */
6100
            if (mod == 3)
6101
                goto illegal_op;
6102
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6103
            /* nothing more to do */
6104
            break;
6105
        default: /* nop (multi byte) */
6106
            gen_nop_modrm(s, modrm);
6107
            break;
6108
        }
6109
        break;
6110
    case 0x119 ... 0x11f: /* nop (multi byte) */
6111
        modrm = ldub_code(s->pc++);
6112
        gen_nop_modrm(s, modrm);
6113
        break;
6114
    case 0x120: /* mov reg, crN */
6115
    case 0x122: /* mov crN, reg */
6116
        if (s->cpl != 0) {
6117
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6118
        } else {
6119
            modrm = ldub_code(s->pc++);
6120
            if ((modrm & 0xc0) != 0xc0)
6121
                goto illegal_op;
6122
            rm = (modrm & 7) | REX_B(s);
6123
            reg = ((modrm >> 3) & 7) | rex_r;
6124
            if (CODE64(s))
6125
                ot = OT_QUAD;
6126
            else
6127
                ot = OT_LONG;
6128
            switch(reg) {
6129
            case 0:
6130
            case 2:
6131
            case 3:
6132
            case 4:
6133
            case 8:
6134
                if (b & 2) {
6135
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6136
                    gen_op_mov_TN_reg[ot][0][rm]();
6137
                    gen_op_movl_crN_T0(reg);
6138
                    gen_jmp_im(s->pc - s->cs_base);
6139
                    gen_eob(s);
6140
                } else {
6141
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6142
#if !defined(CONFIG_USER_ONLY)
6143
                    if (reg == 8)
6144
                        gen_op_movtl_T0_cr8();
6145
                    else
6146
#endif
6147
                        gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6148
                    gen_op_mov_reg_T0[ot][rm]();
6149
                }
6150
                break;
6151
            default:
6152
                goto illegal_op;
6153
            }
6154
        }
6155
        break;
6156
    case 0x121: /* mov reg, drN */
6157
    case 0x123: /* mov drN, reg */
6158
        if (s->cpl != 0) {
6159
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6160
        } else {
6161
            modrm = ldub_code(s->pc++);
6162
            if ((modrm & 0xc0) != 0xc0)
6163
                goto illegal_op;
6164
            rm = (modrm & 7) | REX_B(s);
6165
            reg = ((modrm >> 3) & 7) | rex_r;
6166
            if (CODE64(s))
6167
                ot = OT_QUAD;
6168
            else
6169
                ot = OT_LONG;
6170
            /* XXX: do it dynamically with CR4.DE bit */
6171
            if (reg == 4 || reg == 5 || reg >= 8)
6172
                goto illegal_op;
6173
            if (b & 2) {
6174
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6175
                gen_op_mov_TN_reg[ot][0][rm]();
6176
                gen_op_movl_drN_T0(reg);
6177
                gen_jmp_im(s->pc - s->cs_base);
6178
                gen_eob(s);
6179
            } else {
6180
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6181
                gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6182
                gen_op_mov_reg_T0[ot][rm]();
6183
            }
6184
        }
6185
        break;
6186
    case 0x106: /* clts */
6187
        if (s->cpl != 0) {
6188
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6189
        } else {
6190
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6191
            gen_op_clts();
6192
            /* abort block because static cpu state changed */
6193
            gen_jmp_im(s->pc - s->cs_base);
6194
            gen_eob(s);
6195
        }
6196
        break;
6197
    /* MMX/SSE/SSE2/PNI support */
6198
    case 0x1c3: /* MOVNTI reg, mem */
6199
        if (!(s->cpuid_features & CPUID_SSE2))
6200
            goto illegal_op;
6201
        ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6202
        modrm = ldub_code(s->pc++);
6203
        mod = (modrm >> 6) & 3;
6204
        if (mod == 3)
6205
            goto illegal_op;
6206
        reg = ((modrm >> 3) & 7) | rex_r;
6207
        /* generate a generic store */
6208
        gen_ldst_modrm(s, modrm, ot, reg, 1);
6209
        break;
6210
    case 0x1ae:
6211
        modrm = ldub_code(s->pc++);
6212
        mod = (modrm >> 6) & 3;
6213
        op = (modrm >> 3) & 7;
6214
        switch(op) {
6215
        case 0: /* fxsave */
6216
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6217
                (s->flags & HF_EM_MASK))
6218
                goto illegal_op;
6219
            if (s->flags & HF_TS_MASK) {
6220
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6221
                break;
6222
            }
6223
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6224
            gen_op_fxsave_A0((s->dflag == 2));
6225
            break;
6226
        case 1: /* fxrstor */
6227
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6228
                (s->flags & HF_EM_MASK))
6229
                goto illegal_op;
6230
            if (s->flags & HF_TS_MASK) {
6231
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6232
                break;
6233
            }
6234
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6235
            gen_op_fxrstor_A0((s->dflag == 2));
6236
            break;
6237
        case 2: /* ldmxcsr */
6238
        case 3: /* stmxcsr */
6239
            if (s->flags & HF_TS_MASK) {
6240
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6241
                break;
6242
            }
6243
            if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6244
                mod == 3)
6245
                goto illegal_op;
6246
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6247
            if (op == 2) {
6248
                gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
6249
                gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6250
            } else {
6251
                gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6252
                gen_op_st_T0_A0[OT_LONG + s->mem_index]();
6253
            }
6254
            break;
6255
        case 5: /* lfence */
6256
        case 6: /* mfence */
6257
            if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6258
                goto illegal_op;
6259
            break;
6260
        case 7: /* sfence / clflush */
6261
            if ((modrm & 0xc7) == 0xc0) {
6262
                /* sfence */
6263
                if (!(s->cpuid_features & CPUID_SSE))
6264
                    goto illegal_op;
6265
            } else {
6266
                /* clflush */
6267
                if (!(s->cpuid_features & CPUID_CLFLUSH))
6268
                    goto illegal_op;
6269
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6270
            }
6271
            break;
6272
        default:
6273
            goto illegal_op;
6274
        }
6275
        break;
6276
    case 0x10d: /* prefetch */
6277
        modrm = ldub_code(s->pc++);
6278
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6279
        /* ignore for now */
6280
        break;
6281
    case 0x1aa: /* rsm */
6282
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6283
            break;
6284
        if (!(s->flags & HF_SMM_MASK))
6285
            goto illegal_op;
6286
        if (s->cc_op != CC_OP_DYNAMIC) {
6287
            gen_op_set_cc_op(s->cc_op);
6288
            s->cc_op = CC_OP_DYNAMIC;
6289
        }
6290
        gen_jmp_im(s->pc - s->cs_base);
6291
        gen_op_rsm();
6292
        gen_eob(s);
6293
        break;
6294
    case 0x110 ... 0x117:
6295
    case 0x128 ... 0x12f:
6296
    case 0x150 ... 0x177:
6297
    case 0x17c ... 0x17f:
6298
    case 0x1c2:
6299
    case 0x1c4 ... 0x1c6:
6300
    case 0x1d0 ... 0x1fe:
6301
        gen_sse(s, b, pc_start, rex_r);
6302
        break;
6303
    default:
6304
        goto illegal_op;
6305
    }
6306
    /* lock generation */
6307
    if (s->prefix & PREFIX_LOCK)
6308
        gen_op_unlock();
6309
    return s->pc;
6310
 illegal_op:
6311
    if (s->prefix & PREFIX_LOCK)
6312
        gen_op_unlock();
6313
    /* XXX: ensure that no lock was generated */
6314
    gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6315
    return s->pc;
6316
}
6317

    
6318
#define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6319
#define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6320

    
6321
/* flags read by an operation */
6322
static uint16_t opc_read_flags[NB_OPS] = {
6323
    [INDEX_op_aas] = CC_A,
6324
    [INDEX_op_aaa] = CC_A,
6325
    [INDEX_op_das] = CC_A | CC_C,
6326
    [INDEX_op_daa] = CC_A | CC_C,
6327

    
6328
    /* subtle: due to the incl/decl implementation, C is used */
6329
    [INDEX_op_update_inc_cc] = CC_C,
6330

    
6331
    [INDEX_op_into] = CC_O,
6332

    
6333
    [INDEX_op_jb_subb] = CC_C,
6334
    [INDEX_op_jb_subw] = CC_C,
6335
    [INDEX_op_jb_subl] = CC_C,
6336

    
6337
    [INDEX_op_jz_subb] = CC_Z,
6338
    [INDEX_op_jz_subw] = CC_Z,
6339
    [INDEX_op_jz_subl] = CC_Z,
6340

    
6341
    [INDEX_op_jbe_subb] = CC_Z | CC_C,
6342
    [INDEX_op_jbe_subw] = CC_Z | CC_C,
6343
    [INDEX_op_jbe_subl] = CC_Z | CC_C,
6344

    
6345
    [INDEX_op_js_subb] = CC_S,
6346
    [INDEX_op_js_subw] = CC_S,
6347
    [INDEX_op_js_subl] = CC_S,
6348

    
6349
    [INDEX_op_jl_subb] = CC_O | CC_S,
6350
    [INDEX_op_jl_subw] = CC_O | CC_S,
6351
    [INDEX_op_jl_subl] = CC_O | CC_S,
6352

    
6353
    [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6354
    [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6355
    [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6356

    
6357
    [INDEX_op_loopnzw] = CC_Z,
6358
    [INDEX_op_loopnzl] = CC_Z,
6359
    [INDEX_op_loopzw] = CC_Z,
6360
    [INDEX_op_loopzl] = CC_Z,
6361

    
6362
    [INDEX_op_seto_T0_cc] = CC_O,
6363
    [INDEX_op_setb_T0_cc] = CC_C,
6364
    [INDEX_op_setz_T0_cc] = CC_Z,
6365
    [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6366
    [INDEX_op_sets_T0_cc] = CC_S,
6367
    [INDEX_op_setp_T0_cc] = CC_P,
6368
    [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6369
    [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6370

    
6371
    [INDEX_op_setb_T0_subb] = CC_C,
6372
    [INDEX_op_setb_T0_subw] = CC_C,
6373
    [INDEX_op_setb_T0_subl] = CC_C,
6374

    
6375
    [INDEX_op_setz_T0_subb] = CC_Z,
6376
    [INDEX_op_setz_T0_subw] = CC_Z,
6377
    [INDEX_op_setz_T0_subl] = CC_Z,
6378

    
6379
    [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6380
    [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6381
    [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6382

    
6383
    [INDEX_op_sets_T0_subb] = CC_S,
6384
    [INDEX_op_sets_T0_subw] = CC_S,
6385
    [INDEX_op_sets_T0_subl] = CC_S,
6386

    
6387
    [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6388
    [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6389
    [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6390

    
6391
    [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6392
    [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6393
    [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6394

    
6395
    [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6396
    [INDEX_op_cmc] = CC_C,
6397
    [INDEX_op_salc] = CC_C,
6398

    
6399
    /* needed for correct flag optimisation before string ops */
6400
    [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6401
    [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6402
    [INDEX_op_jz_ecxw] = CC_OSZAPC,
6403
    [INDEX_op_jz_ecxl] = CC_OSZAPC,
6404

    
6405
#ifdef TARGET_X86_64
6406
    [INDEX_op_jb_subq] = CC_C,
6407
    [INDEX_op_jz_subq] = CC_Z,
6408
    [INDEX_op_jbe_subq] = CC_Z | CC_C,
6409
    [INDEX_op_js_subq] = CC_S,
6410
    [INDEX_op_jl_subq] = CC_O | CC_S,
6411
    [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6412

    
6413
    [INDEX_op_loopnzq] = CC_Z,
6414
    [INDEX_op_loopzq] = CC_Z,
6415

    
6416
    [INDEX_op_setb_T0_subq] = CC_C,
6417
    [INDEX_op_setz_T0_subq] = CC_Z,
6418
    [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6419
    [INDEX_op_sets_T0_subq] = CC_S,
6420
    [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6421
    [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6422

    
6423
    [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6424
    [INDEX_op_jz_ecxq] = CC_OSZAPC,
6425
#endif
6426

    
6427
#define DEF_READF(SUFFIX)\
6428
    [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6429
    [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6430
    [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6431
    X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6432
    [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6433
    [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6434
    [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6435
    X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6436
\
6437
    [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6438
    [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6439
    [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6440
    X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6441
    [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6442
    [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6443
    [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6444
    X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6445

    
6446
    DEF_READF( )
6447
    DEF_READF(_raw)
6448
#ifndef CONFIG_USER_ONLY
6449
    DEF_READF(_kernel)
6450
    DEF_READF(_user)
6451
#endif
6452
};
6453

    
6454
/* flags written by an operation */
6455
static uint16_t opc_write_flags[NB_OPS] = {
6456
    [INDEX_op_update2_cc] = CC_OSZAPC,
6457
    [INDEX_op_update1_cc] = CC_OSZAPC,
6458
    [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6459
    [INDEX_op_update_neg_cc] = CC_OSZAPC,
6460
    /* subtle: due to the incl/decl implementation, C is used */
6461
    [INDEX_op_update_inc_cc] = CC_OSZAPC,
6462
    [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6463

    
6464
    [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6465
    [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6466
    [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6467
    X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6468
    [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6469
    [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6470
    [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6471
    X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6472
    [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6473
    [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6474
    X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6475

    
6476
    /* sse */
6477
    [INDEX_op_ucomiss] = CC_OSZAPC,
6478
    [INDEX_op_ucomisd] = CC_OSZAPC,
6479
    [INDEX_op_comiss] = CC_OSZAPC,
6480
    [INDEX_op_comisd] = CC_OSZAPC,
6481

    
6482
    /* bcd */
6483
    [INDEX_op_aam] = CC_OSZAPC,
6484
    [INDEX_op_aad] = CC_OSZAPC,
6485
    [INDEX_op_aas] = CC_OSZAPC,
6486
    [INDEX_op_aaa] = CC_OSZAPC,
6487
    [INDEX_op_das] = CC_OSZAPC,
6488
    [INDEX_op_daa] = CC_OSZAPC,
6489

    
6490
    [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6491
    [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6492
    [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6493
    [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6494
    [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6495
    [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6496
    [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6497
    [INDEX_op_clc] = CC_C,
6498
    [INDEX_op_stc] = CC_C,
6499
    [INDEX_op_cmc] = CC_C,
6500

    
6501
    [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6502
    [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6503
    X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6504
    [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6505
    [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6506
    X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6507
    [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6508
    [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6509
    X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6510
    [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6511
    [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6512
    X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6513

    
6514
    [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6515
    [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6516
    X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6517
    [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6518
    [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6519
    X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6520

    
6521
    [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6522
    [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6523
    [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6524
    X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6525

    
6526
    [INDEX_op_cmpxchg8b] = CC_Z,
6527
    [INDEX_op_lar] = CC_Z,
6528
    [INDEX_op_lsl] = CC_Z,
6529
    [INDEX_op_verr] = CC_Z,
6530
    [INDEX_op_verw] = CC_Z,
6531
    [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6532
    [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6533

    
6534
#define DEF_WRITEF(SUFFIX)\
6535
    [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6536
    [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6537
    [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6538
    X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6539
    [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6540
    [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6541
    [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6542
    X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6543
\
6544
    [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6545
    [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6546
    [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6547
    X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6548
    [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6549
    [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6550
    [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6551
    X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6552
\
6553
    [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6554
    [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6555
    [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6556
    X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6557
    [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6558
    [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6559
    [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6560
    X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6561
\
6562
    [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6563
    [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6564
    [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6565
    X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6566
\
6567
    [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6568
    [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6569
    [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6570
    X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6571
\
6572
    [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6573
    [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6574
    [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6575
    X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6576
\
6577
    [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6578
    [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6579
    X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6580
    [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6581
    [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6582
    X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6583
\
6584
    [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6585
    [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6586
    X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6587
    [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6588
    [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6589
    X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6590
\
6591
    [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6592
    [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6593
    [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6594
    X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6595

    
6596

    
6597
    DEF_WRITEF( )
6598
    DEF_WRITEF(_raw)
6599
#ifndef CONFIG_USER_ONLY
6600
    DEF_WRITEF(_kernel)
6601
    DEF_WRITEF(_user)
6602
#endif
6603
};
6604

    
6605
/* simpler form of an operation if no flags need to be generated */
6606
static uint16_t opc_simpler[NB_OPS] = {
6607
    [INDEX_op_update2_cc] = INDEX_op_nop,
6608
    [INDEX_op_update1_cc] = INDEX_op_nop,
6609
    [INDEX_op_update_neg_cc] = INDEX_op_nop,
6610
#if 0
6611
    /* broken: CC_OP logic must be rewritten */
6612
    [INDEX_op_update_inc_cc] = INDEX_op_nop,
6613
#endif
6614

    
6615
    [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6616
    [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6617
    [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6618
    X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6619

    
6620
    [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6621
    [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6622
    [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6623
    X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6624

    
6625
    [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6626
    [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6627
    [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6628
    X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6629

    
6630
#define DEF_SIMPLER(SUFFIX)\
6631
    [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6632
    [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6633
    [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6634
    X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6635
\
6636
    [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6637
    [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6638
    [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6639
    X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6640

    
6641
    DEF_SIMPLER( )
6642
    DEF_SIMPLER(_raw)
6643
#ifndef CONFIG_USER_ONLY
6644
    DEF_SIMPLER(_kernel)
6645
    DEF_SIMPLER(_user)
6646
#endif
6647
};
6648

    
6649
void optimize_flags_init(void)
6650
{
6651
    int i;
6652
    /* put default values in arrays */
6653
    for(i = 0; i < NB_OPS; i++) {
6654
        if (opc_simpler[i] == 0)
6655
            opc_simpler[i] = i;
6656
    }
6657
}
6658

    
6659
/* CPU flags computation optimization: we move backward thru the
6660
   generated code to see which flags are needed. The operation is
6661
   modified if suitable */
6662
static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6663
{
6664
    uint16_t *opc_ptr;
6665
    int live_flags, write_flags, op;
6666

    
6667
    opc_ptr = opc_buf + opc_buf_len;
6668
    /* live_flags contains the flags needed by the next instructions
6669
       in the code. At the end of the block, we consider that all the
6670
       flags are live. */
6671
    live_flags = CC_OSZAPC;
6672
    while (opc_ptr > opc_buf) {
6673
        op = *--opc_ptr;
6674
        /* if none of the flags written by the instruction is used,
6675
           then we can try to find a simpler instruction */
6676
        write_flags = opc_write_flags[op];
6677
        if ((live_flags & write_flags) == 0) {
6678
            *opc_ptr = opc_simpler[op];
6679
        }
6680
        /* compute the live flags before the instruction */
6681
        live_flags &= ~write_flags;
6682
        live_flags |= opc_read_flags[op];
6683
    }
6684
}
6685

    
6686
/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6687
   basic block 'tb'. If search_pc is TRUE, also generate PC
6688
   information for each intermediate instruction. */
6689
static inline int gen_intermediate_code_internal(CPUState *env,
6690
                                                 TranslationBlock *tb,
6691
                                                 int search_pc)
6692
{
6693
    DisasContext dc1, *dc = &dc1;
6694
    target_ulong pc_ptr;
6695
    uint16_t *gen_opc_end;
6696
    int j, lj, cflags;
6697
    uint64_t flags;
6698
    target_ulong pc_start;
6699
    target_ulong cs_base;
6700

    
6701
    /* generate intermediate code */
6702
    pc_start = tb->pc;
6703
    cs_base = tb->cs_base;
6704
    flags = tb->flags;
6705
    cflags = tb->cflags;
6706

    
6707
    dc->pe = (flags >> HF_PE_SHIFT) & 1;
6708
    dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6709
    dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6710
    dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6711
    dc->f_st = 0;
6712
    dc->vm86 = (flags >> VM_SHIFT) & 1;
6713
    dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6714
    dc->iopl = (flags >> IOPL_SHIFT) & 3;
6715
    dc->tf = (flags >> TF_SHIFT) & 1;
6716
    dc->singlestep_enabled = env->singlestep_enabled;
6717
    dc->cc_op = CC_OP_DYNAMIC;
6718
    dc->cs_base = cs_base;
6719
    dc->tb = tb;
6720
    dc->popl_esp_hack = 0;
6721
    /* select memory access functions */
6722
    dc->mem_index = 0;
6723
    if (flags & HF_SOFTMMU_MASK) {
6724
        if (dc->cpl == 3)
6725
            dc->mem_index = 2 * 4;
6726
        else
6727
            dc->mem_index = 1 * 4;
6728
    }
6729
    dc->cpuid_features = env->cpuid_features;
6730
    dc->cpuid_ext_features = env->cpuid_ext_features;
6731
#ifdef TARGET_X86_64
6732
    dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6733
    dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6734
#endif
6735
    dc->flags = flags;
6736
    dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6737
                    (flags & HF_INHIBIT_IRQ_MASK)
6738
#ifndef CONFIG_SOFTMMU
6739
                    || (flags & HF_SOFTMMU_MASK)
6740
#endif
6741
                    );
6742
#if 0
6743
    /* check addseg logic */
6744
    if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6745
        printf("ERROR addseg\n");
6746
#endif
6747

    
6748
    gen_opc_ptr = gen_opc_buf;
6749
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6750
    gen_opparam_ptr = gen_opparam_buf;
6751
    nb_gen_labels = 0;
6752

    
6753
    dc->is_jmp = DISAS_NEXT;
6754
    pc_ptr = pc_start;
6755
    lj = -1;
6756

    
6757
    for(;;) {
6758
        if (env->nb_breakpoints > 0) {
6759
            for(j = 0; j < env->nb_breakpoints; j++) {
6760
                if (env->breakpoints[j] == pc_ptr) {
6761
                    gen_debug(dc, pc_ptr - dc->cs_base);
6762
                    break;
6763
                }
6764
            }
6765
        }
6766
        if (search_pc) {
6767
            j = gen_opc_ptr - gen_opc_buf;
6768
            if (lj < j) {
6769
                lj++;
6770
                while (lj < j)
6771
                    gen_opc_instr_start[lj++] = 0;
6772
            }
6773
            gen_opc_pc[lj] = pc_ptr;
6774
            gen_opc_cc_op[lj] = dc->cc_op;
6775
            gen_opc_instr_start[lj] = 1;
6776
        }
6777
        pc_ptr = disas_insn(dc, pc_ptr);
6778
        /* stop translation if indicated */
6779
        if (dc->is_jmp)
6780
            break;
6781
        /* if single step mode, we generate only one instruction and
6782
           generate an exception */
6783
        /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6784
           the flag and abort the translation to give the irqs a
6785
           change to be happen */
6786
        if (dc->tf || dc->singlestep_enabled ||
6787
            (flags & HF_INHIBIT_IRQ_MASK) ||
6788
            (cflags & CF_SINGLE_INSN)) {
6789
            gen_jmp_im(pc_ptr - dc->cs_base);
6790
            gen_eob(dc);
6791
            break;
6792
        }
6793
        /* if too long translation, stop generation too */
6794
        if (gen_opc_ptr >= gen_opc_end ||
6795
            (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6796
            gen_jmp_im(pc_ptr - dc->cs_base);
6797
            gen_eob(dc);
6798
            break;
6799
        }
6800
    }
6801
    *gen_opc_ptr = INDEX_op_end;
6802
    /* we don't forget to fill the last values */
6803
    if (search_pc) {
6804
        j = gen_opc_ptr - gen_opc_buf;
6805
        lj++;
6806
        while (lj <= j)
6807
            gen_opc_instr_start[lj++] = 0;
6808
    }
6809

    
6810
#ifdef DEBUG_DISAS
6811
    if (loglevel & CPU_LOG_TB_CPU) {
6812
        cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6813
    }
6814
    if (loglevel & CPU_LOG_TB_IN_ASM) {
6815
        int disas_flags;
6816
        fprintf(logfile, "----------------\n");
6817
        fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6818
#ifdef TARGET_X86_64
6819
        if (dc->code64)
6820
            disas_flags = 2;
6821
        else
6822
#endif
6823
            disas_flags = !dc->code32;
6824
        target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6825
        fprintf(logfile, "\n");
6826
        if (loglevel & CPU_LOG_TB_OP) {
6827
            fprintf(logfile, "OP:\n");
6828
            dump_ops(gen_opc_buf, gen_opparam_buf);
6829
            fprintf(logfile, "\n");
6830
        }
6831
    }
6832
#endif
6833

    
6834
    /* optimize flag computations */
6835
    optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6836

    
6837
#ifdef DEBUG_DISAS
6838
    if (loglevel & CPU_LOG_TB_OP_OPT) {
6839
        fprintf(logfile, "AFTER FLAGS OPT:\n");
6840
        dump_ops(gen_opc_buf, gen_opparam_buf);
6841
        fprintf(logfile, "\n");
6842
    }
6843
#endif
6844
    if (!search_pc)
6845
        tb->size = pc_ptr - pc_start;
6846
    return 0;
6847
}
6848

    
6849
int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6850
{
6851
    return gen_intermediate_code_internal(env, tb, 0);
6852
}
6853

    
6854
int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6855
{
6856
    return gen_intermediate_code_internal(env, tb, 1);
6857
}
6858