Statistics
| Branch: | Revision:

root / target-i386 / translate.c @ ec6338ba

History | View | Annotate | Download (206 kB)

1
/*
2
 *  i386 translation
3
 *
4
 *  Copyright (c) 2003 Fabrice Bellard
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20
#include <stdarg.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23
#include <string.h>
24
#include <inttypes.h>
25
#include <signal.h>
26
#include <assert.h>
27

    
28
#include "cpu.h"
29
#include "exec-all.h"
30
#include "disas.h"
31

    
32
/* XXX: move that elsewhere */
33
static uint16_t *gen_opc_ptr;
34
static uint32_t *gen_opparam_ptr;
35

    
36
#define PREFIX_REPZ   0x01
37
#define PREFIX_REPNZ  0x02
38
#define PREFIX_LOCK   0x04
39
#define PREFIX_DATA   0x08
40
#define PREFIX_ADR    0x10
41

    
42
#ifdef TARGET_X86_64
43
#define X86_64_ONLY(x) x
44
#define X86_64_DEF(x...) x
45
#define CODE64(s) ((s)->code64)
46
#define REX_X(s) ((s)->rex_x)
47
#define REX_B(s) ((s)->rex_b)
48
/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
49
#if 1
50
#define BUGGY_64(x) NULL
51
#endif
52
#else
53
#define X86_64_ONLY(x) NULL
54
#define X86_64_DEF(x...)
55
#define CODE64(s) 0
56
#define REX_X(s) 0
57
#define REX_B(s) 0
58
#endif
59

    
60
#ifdef TARGET_X86_64
61
static int x86_64_hregs;
62
#endif
63

    
64
#ifdef USE_DIRECT_JUMP
65
#define TBPARAM(x)
66
#else
67
#define TBPARAM(x) (long)(x)
68
#endif
69

    
70
typedef struct DisasContext {
71
    /* current insn context */
72
    int override; /* -1 if no override */
73
    int prefix;
74
    int aflag, dflag;
75
    target_ulong pc; /* pc = eip + cs_base */
76
    int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
77
                   static state change (stop translation) */
78
    /* current block context */
79
    target_ulong cs_base; /* base of CS segment */
80
    int pe;     /* protected mode */
81
    int code32; /* 32 bit code segment */
82
#ifdef TARGET_X86_64
83
    int lma;    /* long mode active */
84
    int code64; /* 64 bit code segment */
85
    int rex_x, rex_b;
86
#endif
87
    int ss32;   /* 32 bit stack segment */
88
    int cc_op;  /* current CC operation */
89
    int addseg; /* non zero if either DS/ES/SS have a non zero base */
90
    int f_st;   /* currently unused */
91
    int vm86;   /* vm86 mode */
92
    int cpl;
93
    int iopl;
94
    int tf;     /* TF cpu flag */
95
    int singlestep_enabled; /* "hardware" single step enabled */
96
    int jmp_opt; /* use direct block chaining for direct jumps */
97
    int mem_index; /* select memory access functions */
98
    uint64_t flags; /* all execution flags */
99
    struct TranslationBlock *tb;
100
    int popl_esp_hack; /* for correct popl with esp base handling */
101
    int rip_offset; /* only used in x86_64, but left for simplicity */
102
    int cpuid_features;
103
    int cpuid_ext_features;
104
} DisasContext;
105

    
106
static void gen_eob(DisasContext *s);
107
static void gen_jmp(DisasContext *s, target_ulong eip);
108
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
109

    
110
/* i386 arith/logic operations */
111
enum {
112
    OP_ADDL,
113
    OP_ORL,
114
    OP_ADCL,
115
    OP_SBBL,
116
    OP_ANDL,
117
    OP_SUBL,
118
    OP_XORL,
119
    OP_CMPL,
120
};
121

    
122
/* i386 shift ops */
123
enum {
124
    OP_ROL,
125
    OP_ROR,
126
    OP_RCL,
127
    OP_RCR,
128
    OP_SHL,
129
    OP_SHR,
130
    OP_SHL1, /* undocumented */
131
    OP_SAR = 7,
132
};
133

    
134
enum {
135
#define DEF(s, n, copy_size) INDEX_op_ ## s,
136
#include "opc.h"
137
#undef DEF
138
    NB_OPS,
139
};
140

    
141
#include "gen-op.h"
142

    
143
/* operand size */
144
enum {
145
    OT_BYTE = 0,
146
    OT_WORD,
147
    OT_LONG,
148
    OT_QUAD,
149
};
150

    
151
enum {
152
    /* I386 int registers */
153
    OR_EAX,   /* MUST be even numbered */
154
    OR_ECX,
155
    OR_EDX,
156
    OR_EBX,
157
    OR_ESP,
158
    OR_EBP,
159
    OR_ESI,
160
    OR_EDI,
161

    
162
    OR_TMP0 = 16,    /* temporary operand register */
163
    OR_TMP1,
164
    OR_A0, /* temporary register used when doing address evaluation */
165
};
166

    
167
#ifdef TARGET_X86_64
168

    
169
#define NB_OP_SIZES 4
170

    
171
#define DEF_REGS(prefix, suffix) \
172
  prefix ## EAX ## suffix,\
173
  prefix ## ECX ## suffix,\
174
  prefix ## EDX ## suffix,\
175
  prefix ## EBX ## suffix,\
176
  prefix ## ESP ## suffix,\
177
  prefix ## EBP ## suffix,\
178
  prefix ## ESI ## suffix,\
179
  prefix ## EDI ## suffix,\
180
  prefix ## R8 ## suffix,\
181
  prefix ## R9 ## suffix,\
182
  prefix ## R10 ## suffix,\
183
  prefix ## R11 ## suffix,\
184
  prefix ## R12 ## suffix,\
185
  prefix ## R13 ## suffix,\
186
  prefix ## R14 ## suffix,\
187
  prefix ## R15 ## suffix,
188

    
189
#define DEF_BREGS(prefixb, prefixh, suffix)             \
190
                                                        \
191
static void prefixb ## ESP ## suffix ## _wrapper(void)  \
192
{                                                       \
193
    if (x86_64_hregs)                                 \
194
        prefixb ## ESP ## suffix ();                    \
195
    else                                                \
196
        prefixh ## EAX ## suffix ();                    \
197
}                                                       \
198
                                                        \
199
static void prefixb ## EBP ## suffix ## _wrapper(void)  \
200
{                                                       \
201
    if (x86_64_hregs)                                 \
202
        prefixb ## EBP ## suffix ();                    \
203
    else                                                \
204
        prefixh ## ECX ## suffix ();                    \
205
}                                                       \
206
                                                        \
207
static void prefixb ## ESI ## suffix ## _wrapper(void)  \
208
{                                                       \
209
    if (x86_64_hregs)                                 \
210
        prefixb ## ESI ## suffix ();                    \
211
    else                                                \
212
        prefixh ## EDX ## suffix ();                    \
213
}                                                       \
214
                                                        \
215
static void prefixb ## EDI ## suffix ## _wrapper(void)  \
216
{                                                       \
217
    if (x86_64_hregs)                                 \
218
        prefixb ## EDI ## suffix ();                    \
219
    else                                                \
220
        prefixh ## EBX ## suffix ();                    \
221
}
222

    
223
DEF_BREGS(gen_op_movb_, gen_op_movh_, _T0)
224
DEF_BREGS(gen_op_movb_, gen_op_movh_, _T1)
225
DEF_BREGS(gen_op_movl_T0_, gen_op_movh_T0_, )
226
DEF_BREGS(gen_op_movl_T1_, gen_op_movh_T1_, )
227

    
228
#else /* !TARGET_X86_64 */
229

    
230
#define NB_OP_SIZES 3
231

    
232
#define DEF_REGS(prefix, suffix) \
233
  prefix ## EAX ## suffix,\
234
  prefix ## ECX ## suffix,\
235
  prefix ## EDX ## suffix,\
236
  prefix ## EBX ## suffix,\
237
  prefix ## ESP ## suffix,\
238
  prefix ## EBP ## suffix,\
239
  prefix ## ESI ## suffix,\
240
  prefix ## EDI ## suffix,
241

    
242
#endif /* !TARGET_X86_64 */
243

    
244
static GenOpFunc *gen_op_mov_reg_T0[NB_OP_SIZES][CPU_NB_REGS] = {
245
    [OT_BYTE] = {
246
        gen_op_movb_EAX_T0,
247
        gen_op_movb_ECX_T0,
248
        gen_op_movb_EDX_T0,
249
        gen_op_movb_EBX_T0,
250
#ifdef TARGET_X86_64
251
        gen_op_movb_ESP_T0_wrapper,
252
        gen_op_movb_EBP_T0_wrapper,
253
        gen_op_movb_ESI_T0_wrapper,
254
        gen_op_movb_EDI_T0_wrapper,
255
        gen_op_movb_R8_T0,
256
        gen_op_movb_R9_T0,
257
        gen_op_movb_R10_T0,
258
        gen_op_movb_R11_T0,
259
        gen_op_movb_R12_T0,
260
        gen_op_movb_R13_T0,
261
        gen_op_movb_R14_T0,
262
        gen_op_movb_R15_T0,
263
#else
264
        gen_op_movh_EAX_T0,
265
        gen_op_movh_ECX_T0,
266
        gen_op_movh_EDX_T0,
267
        gen_op_movh_EBX_T0,
268
#endif
269
    },
270
    [OT_WORD] = {
271
        DEF_REGS(gen_op_movw_, _T0)
272
    },
273
    [OT_LONG] = {
274
        DEF_REGS(gen_op_movl_, _T0)
275
    },
276
#ifdef TARGET_X86_64
277
    [OT_QUAD] = {
278
        DEF_REGS(gen_op_movq_, _T0)
279
    },
280
#endif
281
};
282

    
283
static GenOpFunc *gen_op_mov_reg_T1[NB_OP_SIZES][CPU_NB_REGS] = {
284
    [OT_BYTE] = {
285
        gen_op_movb_EAX_T1,
286
        gen_op_movb_ECX_T1,
287
        gen_op_movb_EDX_T1,
288
        gen_op_movb_EBX_T1,
289
#ifdef TARGET_X86_64
290
        gen_op_movb_ESP_T1_wrapper,
291
        gen_op_movb_EBP_T1_wrapper,
292
        gen_op_movb_ESI_T1_wrapper,
293
        gen_op_movb_EDI_T1_wrapper,
294
        gen_op_movb_R8_T1,
295
        gen_op_movb_R9_T1,
296
        gen_op_movb_R10_T1,
297
        gen_op_movb_R11_T1,
298
        gen_op_movb_R12_T1,
299
        gen_op_movb_R13_T1,
300
        gen_op_movb_R14_T1,
301
        gen_op_movb_R15_T1,
302
#else
303
        gen_op_movh_EAX_T1,
304
        gen_op_movh_ECX_T1,
305
        gen_op_movh_EDX_T1,
306
        gen_op_movh_EBX_T1,
307
#endif
308
    },
309
    [OT_WORD] = {
310
        DEF_REGS(gen_op_movw_, _T1)
311
    },
312
    [OT_LONG] = {
313
        DEF_REGS(gen_op_movl_, _T1)
314
    },
315
#ifdef TARGET_X86_64
316
    [OT_QUAD] = {
317
        DEF_REGS(gen_op_movq_, _T1)
318
    },
319
#endif
320
};
321

    
322
static GenOpFunc *gen_op_mov_reg_A0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
323
    [0] = {
324
        DEF_REGS(gen_op_movw_, _A0)
325
    },
326
    [1] = {
327
        DEF_REGS(gen_op_movl_, _A0)
328
    },
329
#ifdef TARGET_X86_64
330
    [2] = {
331
        DEF_REGS(gen_op_movq_, _A0)
332
    },
333
#endif
334
};
335

    
336
static GenOpFunc *gen_op_mov_TN_reg[NB_OP_SIZES][2][CPU_NB_REGS] =
337
{
338
    [OT_BYTE] = {
339
        {
340
            gen_op_movl_T0_EAX,
341
            gen_op_movl_T0_ECX,
342
            gen_op_movl_T0_EDX,
343
            gen_op_movl_T0_EBX,
344
#ifdef TARGET_X86_64
345
            gen_op_movl_T0_ESP_wrapper,
346
            gen_op_movl_T0_EBP_wrapper,
347
            gen_op_movl_T0_ESI_wrapper,
348
            gen_op_movl_T0_EDI_wrapper,
349
            gen_op_movl_T0_R8,
350
            gen_op_movl_T0_R9,
351
            gen_op_movl_T0_R10,
352
            gen_op_movl_T0_R11,
353
            gen_op_movl_T0_R12,
354
            gen_op_movl_T0_R13,
355
            gen_op_movl_T0_R14,
356
            gen_op_movl_T0_R15,
357
#else
358
            gen_op_movh_T0_EAX,
359
            gen_op_movh_T0_ECX,
360
            gen_op_movh_T0_EDX,
361
            gen_op_movh_T0_EBX,
362
#endif
363
        },
364
        {
365
            gen_op_movl_T1_EAX,
366
            gen_op_movl_T1_ECX,
367
            gen_op_movl_T1_EDX,
368
            gen_op_movl_T1_EBX,
369
#ifdef TARGET_X86_64
370
            gen_op_movl_T1_ESP_wrapper,
371
            gen_op_movl_T1_EBP_wrapper,
372
            gen_op_movl_T1_ESI_wrapper,
373
            gen_op_movl_T1_EDI_wrapper,
374
            gen_op_movl_T1_R8,
375
            gen_op_movl_T1_R9,
376
            gen_op_movl_T1_R10,
377
            gen_op_movl_T1_R11,
378
            gen_op_movl_T1_R12,
379
            gen_op_movl_T1_R13,
380
            gen_op_movl_T1_R14,
381
            gen_op_movl_T1_R15,
382
#else
383
            gen_op_movh_T1_EAX,
384
            gen_op_movh_T1_ECX,
385
            gen_op_movh_T1_EDX,
386
            gen_op_movh_T1_EBX,
387
#endif
388
        },
389
    },
390
    [OT_WORD] = {
391
        {
392
            DEF_REGS(gen_op_movl_T0_, )
393
        },
394
        {
395
            DEF_REGS(gen_op_movl_T1_, )
396
        },
397
    },
398
    [OT_LONG] = {
399
        {
400
            DEF_REGS(gen_op_movl_T0_, )
401
        },
402
        {
403
            DEF_REGS(gen_op_movl_T1_, )
404
        },
405
    },
406
#ifdef TARGET_X86_64
407
    [OT_QUAD] = {
408
        {
409
            DEF_REGS(gen_op_movl_T0_, )
410
        },
411
        {
412
            DEF_REGS(gen_op_movl_T1_, )
413
        },
414
    },
415
#endif
416
};
417

    
418
static GenOpFunc *gen_op_movl_A0_reg[CPU_NB_REGS] = {
419
    DEF_REGS(gen_op_movl_A0_, )
420
};
421

    
422
static GenOpFunc *gen_op_addl_A0_reg_sN[4][CPU_NB_REGS] = {
423
    [0] = {
424
        DEF_REGS(gen_op_addl_A0_, )
425
    },
426
    [1] = {
427
        DEF_REGS(gen_op_addl_A0_, _s1)
428
    },
429
    [2] = {
430
        DEF_REGS(gen_op_addl_A0_, _s2)
431
    },
432
    [3] = {
433
        DEF_REGS(gen_op_addl_A0_, _s3)
434
    },
435
};
436

    
437
#ifdef TARGET_X86_64
438
static GenOpFunc *gen_op_movq_A0_reg[CPU_NB_REGS] = {
439
    DEF_REGS(gen_op_movq_A0_, )
440
};
441

    
442
static GenOpFunc *gen_op_addq_A0_reg_sN[4][CPU_NB_REGS] = {
443
    [0] = {
444
        DEF_REGS(gen_op_addq_A0_, )
445
    },
446
    [1] = {
447
        DEF_REGS(gen_op_addq_A0_, _s1)
448
    },
449
    [2] = {
450
        DEF_REGS(gen_op_addq_A0_, _s2)
451
    },
452
    [3] = {
453
        DEF_REGS(gen_op_addq_A0_, _s3)
454
    },
455
};
456
#endif
457

    
458
static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
459
    [0] = {
460
        DEF_REGS(gen_op_cmovw_, _T1_T0)
461
    },
462
    [1] = {
463
        DEF_REGS(gen_op_cmovl_, _T1_T0)
464
    },
465
#ifdef TARGET_X86_64
466
    [2] = {
467
        DEF_REGS(gen_op_cmovq_, _T1_T0)
468
    },
469
#endif
470
};
471

    
472
static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
473
    NULL,
474
    gen_op_orl_T0_T1,
475
    NULL,
476
    NULL,
477
    gen_op_andl_T0_T1,
478
    NULL,
479
    gen_op_xorl_T0_T1,
480
    NULL,
481
};
482

    
483
#define DEF_ARITHC(SUFFIX)\
484
    {\
485
        gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
486
        gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
487
    },\
488
    {\
489
        gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
490
        gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
491
    },\
492
    {\
493
        gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
494
        gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
495
    },\
496
    {\
497
        X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
498
        X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
499
    },
500

    
501
static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
502
    DEF_ARITHC( )
503
};
504

    
505
static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
506
    DEF_ARITHC(_raw)
507
#ifndef CONFIG_USER_ONLY
508
    DEF_ARITHC(_kernel)
509
    DEF_ARITHC(_user)
510
#endif
511
};
512

    
513
static const int cc_op_arithb[8] = {
514
    CC_OP_ADDB,
515
    CC_OP_LOGICB,
516
    CC_OP_ADDB,
517
    CC_OP_SUBB,
518
    CC_OP_LOGICB,
519
    CC_OP_SUBB,
520
    CC_OP_LOGICB,
521
    CC_OP_SUBB,
522
};
523

    
524
#define DEF_CMPXCHG(SUFFIX)\
525
    gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
526
    gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
527
    gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
528
    X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
529

    
530
static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
531
    DEF_CMPXCHG( )
532
};
533

    
534
static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
535
    DEF_CMPXCHG(_raw)
536
#ifndef CONFIG_USER_ONLY
537
    DEF_CMPXCHG(_kernel)
538
    DEF_CMPXCHG(_user)
539
#endif
540
};
541

    
542
#define DEF_SHIFT(SUFFIX)\
543
    {\
544
        gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
545
        gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
546
        gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
547
        gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
548
        gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
549
        gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
550
        gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
551
        gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
552
    },\
553
    {\
554
        gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
555
        gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
556
        gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
557
        gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
558
        gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
559
        gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
560
        gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
561
        gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
562
    },\
563
    {\
564
        gen_op_roll ## SUFFIX ## _T0_T1_cc,\
565
        gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
566
        gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
567
        gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
568
        gen_op_shll ## SUFFIX ## _T0_T1_cc,\
569
        gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
570
        gen_op_shll ## SUFFIX ## _T0_T1_cc,\
571
        gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
572
    },\
573
    {\
574
        X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
575
        X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
576
        X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
577
        X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
578
        X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
579
        X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
580
        X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
581
        X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
582
    },
583

    
584
static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
585
    DEF_SHIFT( )
586
};
587

    
588
static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
589
    DEF_SHIFT(_raw)
590
#ifndef CONFIG_USER_ONLY
591
    DEF_SHIFT(_kernel)
592
    DEF_SHIFT(_user)
593
#endif
594
};
595

    
596
#define DEF_SHIFTD(SUFFIX, op)\
597
    {\
598
        NULL,\
599
        NULL,\
600
    },\
601
    {\
602
        gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
603
        gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
604
     },\
605
    {\
606
        gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
607
        gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
608
    },\
609
    {\
610
X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
611
           gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
612
    },
613

    
614
static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
615
    DEF_SHIFTD(, im)
616
};
617

    
618
static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
619
    DEF_SHIFTD(, ECX)
620
};
621

    
622
static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
623
    DEF_SHIFTD(_raw, im)
624
#ifndef CONFIG_USER_ONLY
625
    DEF_SHIFTD(_kernel, im)
626
    DEF_SHIFTD(_user, im)
627
#endif
628
};
629

    
630
static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
631
    DEF_SHIFTD(_raw, ECX)
632
#ifndef CONFIG_USER_ONLY
633
    DEF_SHIFTD(_kernel, ECX)
634
    DEF_SHIFTD(_user, ECX)
635
#endif
636
};
637

    
638
static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
639
    [0] = {
640
        gen_op_btw_T0_T1_cc,
641
        gen_op_btsw_T0_T1_cc,
642
        gen_op_btrw_T0_T1_cc,
643
        gen_op_btcw_T0_T1_cc,
644
    },
645
    [1] = {
646
        gen_op_btl_T0_T1_cc,
647
        gen_op_btsl_T0_T1_cc,
648
        gen_op_btrl_T0_T1_cc,
649
        gen_op_btcl_T0_T1_cc,
650
    },
651
#ifdef TARGET_X86_64
652
    [2] = {
653
        gen_op_btq_T0_T1_cc,
654
        gen_op_btsq_T0_T1_cc,
655
        gen_op_btrq_T0_T1_cc,
656
        gen_op_btcq_T0_T1_cc,
657
    },
658
#endif
659
};
660

    
661
static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
662
    gen_op_add_bitw_A0_T1,
663
    gen_op_add_bitl_A0_T1,
664
    X86_64_ONLY(gen_op_add_bitq_A0_T1),
665
};
666

    
667
static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
668
    [0] = {
669
        gen_op_bsfw_T0_cc,
670
        gen_op_bsrw_T0_cc,
671
    },
672
    [1] = {
673
        gen_op_bsfl_T0_cc,
674
        gen_op_bsrl_T0_cc,
675
    },
676
#ifdef TARGET_X86_64
677
    [2] = {
678
        gen_op_bsfq_T0_cc,
679
        gen_op_bsrq_T0_cc,
680
    },
681
#endif
682
};
683

    
684
static GenOpFunc *gen_op_lds_T0_A0[3 * 4] = {
685
    gen_op_ldsb_raw_T0_A0,
686
    gen_op_ldsw_raw_T0_A0,
687
    X86_64_ONLY(gen_op_ldsl_raw_T0_A0),
688
    NULL,
689
#ifndef CONFIG_USER_ONLY
690
    gen_op_ldsb_kernel_T0_A0,
691
    gen_op_ldsw_kernel_T0_A0,
692
    X86_64_ONLY(gen_op_ldsl_kernel_T0_A0),
693
    NULL,
694

    
695
    gen_op_ldsb_user_T0_A0,
696
    gen_op_ldsw_user_T0_A0,
697
    X86_64_ONLY(gen_op_ldsl_user_T0_A0),
698
    NULL,
699
#endif
700
};
701

    
702
static GenOpFunc *gen_op_ldu_T0_A0[3 * 4] = {
703
    gen_op_ldub_raw_T0_A0,
704
    gen_op_lduw_raw_T0_A0,
705
    NULL,
706
    NULL,
707

    
708
#ifndef CONFIG_USER_ONLY
709
    gen_op_ldub_kernel_T0_A0,
710
    gen_op_lduw_kernel_T0_A0,
711
    NULL,
712
    NULL,
713

    
714
    gen_op_ldub_user_T0_A0,
715
    gen_op_lduw_user_T0_A0,
716
    NULL,
717
    NULL,
718
#endif
719
};
720

    
721
/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
722
static GenOpFunc *gen_op_ld_T0_A0[3 * 4] = {
723
    gen_op_ldub_raw_T0_A0,
724
    gen_op_lduw_raw_T0_A0,
725
    gen_op_ldl_raw_T0_A0,
726
    X86_64_ONLY(gen_op_ldq_raw_T0_A0),
727

    
728
#ifndef CONFIG_USER_ONLY
729
    gen_op_ldub_kernel_T0_A0,
730
    gen_op_lduw_kernel_T0_A0,
731
    gen_op_ldl_kernel_T0_A0,
732
    X86_64_ONLY(gen_op_ldq_kernel_T0_A0),
733

    
734
    gen_op_ldub_user_T0_A0,
735
    gen_op_lduw_user_T0_A0,
736
    gen_op_ldl_user_T0_A0,
737
    X86_64_ONLY(gen_op_ldq_user_T0_A0),
738
#endif
739
};
740

    
741
static GenOpFunc *gen_op_ld_T1_A0[3 * 4] = {
742
    gen_op_ldub_raw_T1_A0,
743
    gen_op_lduw_raw_T1_A0,
744
    gen_op_ldl_raw_T1_A0,
745
    X86_64_ONLY(gen_op_ldq_raw_T1_A0),
746

    
747
#ifndef CONFIG_USER_ONLY
748
    gen_op_ldub_kernel_T1_A0,
749
    gen_op_lduw_kernel_T1_A0,
750
    gen_op_ldl_kernel_T1_A0,
751
    X86_64_ONLY(gen_op_ldq_kernel_T1_A0),
752

    
753
    gen_op_ldub_user_T1_A0,
754
    gen_op_lduw_user_T1_A0,
755
    gen_op_ldl_user_T1_A0,
756
    X86_64_ONLY(gen_op_ldq_user_T1_A0),
757
#endif
758
};
759

    
760
static GenOpFunc *gen_op_st_T0_A0[3 * 4] = {
761
    gen_op_stb_raw_T0_A0,
762
    gen_op_stw_raw_T0_A0,
763
    gen_op_stl_raw_T0_A0,
764
    X86_64_ONLY(gen_op_stq_raw_T0_A0),
765

    
766
#ifndef CONFIG_USER_ONLY
767
    gen_op_stb_kernel_T0_A0,
768
    gen_op_stw_kernel_T0_A0,
769
    gen_op_stl_kernel_T0_A0,
770
    X86_64_ONLY(gen_op_stq_kernel_T0_A0),
771

    
772
    gen_op_stb_user_T0_A0,
773
    gen_op_stw_user_T0_A0,
774
    gen_op_stl_user_T0_A0,
775
    X86_64_ONLY(gen_op_stq_user_T0_A0),
776
#endif
777
};
778

    
779
static GenOpFunc *gen_op_st_T1_A0[3 * 4] = {
780
    NULL,
781
    gen_op_stw_raw_T1_A0,
782
    gen_op_stl_raw_T1_A0,
783
    X86_64_ONLY(gen_op_stq_raw_T1_A0),
784

    
785
#ifndef CONFIG_USER_ONLY
786
    NULL,
787
    gen_op_stw_kernel_T1_A0,
788
    gen_op_stl_kernel_T1_A0,
789
    X86_64_ONLY(gen_op_stq_kernel_T1_A0),
790

    
791
    NULL,
792
    gen_op_stw_user_T1_A0,
793
    gen_op_stl_user_T1_A0,
794
    X86_64_ONLY(gen_op_stq_user_T1_A0),
795
#endif
796
};
797

    
798
static inline void gen_jmp_im(target_ulong pc)
799
{
800
#ifdef TARGET_X86_64
801
    if (pc == (uint32_t)pc) {
802
        gen_op_movl_eip_im(pc);
803
    } else if (pc == (int32_t)pc) {
804
        gen_op_movq_eip_im(pc);
805
    } else {
806
        gen_op_movq_eip_im64(pc >> 32, pc);
807
    }
808
#else
809
    gen_op_movl_eip_im(pc);
810
#endif
811
}
812

    
813
static inline void gen_string_movl_A0_ESI(DisasContext *s)
814
{
815
    int override;
816

    
817
    override = s->override;
818
#ifdef TARGET_X86_64
819
    if (s->aflag == 2) {
820
        if (override >= 0) {
821
            gen_op_movq_A0_seg(offsetof(CPUX86State,segs[override].base));
822
            gen_op_addq_A0_reg_sN[0][R_ESI]();
823
        } else {
824
            gen_op_movq_A0_reg[R_ESI]();
825
        }
826
    } else
827
#endif
828
    if (s->aflag) {
829
        /* 32 bit address */
830
        if (s->addseg && override < 0)
831
            override = R_DS;
832
        if (override >= 0) {
833
            gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
834
            gen_op_addl_A0_reg_sN[0][R_ESI]();
835
        } else {
836
            gen_op_movl_A0_reg[R_ESI]();
837
        }
838
    } else {
839
        /* 16 address, always override */
840
        if (override < 0)
841
            override = R_DS;
842
        gen_op_movl_A0_reg[R_ESI]();
843
        gen_op_andl_A0_ffff();
844
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
845
    }
846
}
847

    
848
static inline void gen_string_movl_A0_EDI(DisasContext *s)
849
{
850
#ifdef TARGET_X86_64
851
    if (s->aflag == 2) {
852
        gen_op_movq_A0_reg[R_EDI]();
853
    } else
854
#endif
855
    if (s->aflag) {
856
        if (s->addseg) {
857
            gen_op_movl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
858
            gen_op_addl_A0_reg_sN[0][R_EDI]();
859
        } else {
860
            gen_op_movl_A0_reg[R_EDI]();
861
        }
862
    } else {
863
        gen_op_movl_A0_reg[R_EDI]();
864
        gen_op_andl_A0_ffff();
865
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
866
    }
867
}
868

    
869
static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
870
    gen_op_movl_T0_Dshiftb,
871
    gen_op_movl_T0_Dshiftw,
872
    gen_op_movl_T0_Dshiftl,
873
    X86_64_ONLY(gen_op_movl_T0_Dshiftq),
874
};
875

    
876
static GenOpFunc1 *gen_op_jnz_ecx[3] = {
877
    gen_op_jnz_ecxw,
878
    gen_op_jnz_ecxl,
879
    X86_64_ONLY(gen_op_jnz_ecxq),
880
};
881

    
882
static GenOpFunc1 *gen_op_jz_ecx[3] = {
883
    gen_op_jz_ecxw,
884
    gen_op_jz_ecxl,
885
    X86_64_ONLY(gen_op_jz_ecxq),
886
};
887

    
888
static GenOpFunc *gen_op_dec_ECX[3] = {
889
    gen_op_decw_ECX,
890
    gen_op_decl_ECX,
891
    X86_64_ONLY(gen_op_decq_ECX),
892
};
893

    
894
static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
895
    {
896
        gen_op_jnz_subb,
897
        gen_op_jnz_subw,
898
        gen_op_jnz_subl,
899
        X86_64_ONLY(gen_op_jnz_subq),
900
    },
901
    {
902
        gen_op_jz_subb,
903
        gen_op_jz_subw,
904
        gen_op_jz_subl,
905
        X86_64_ONLY(gen_op_jz_subq),
906
    },
907
};
908

    
909
static GenOpFunc *gen_op_in_DX_T0[3] = {
910
    gen_op_inb_DX_T0,
911
    gen_op_inw_DX_T0,
912
    gen_op_inl_DX_T0,
913
};
914

    
915
static GenOpFunc *gen_op_out_DX_T0[3] = {
916
    gen_op_outb_DX_T0,
917
    gen_op_outw_DX_T0,
918
    gen_op_outl_DX_T0,
919
};
920

    
921
static GenOpFunc *gen_op_in[3] = {
922
    gen_op_inb_T0_T1,
923
    gen_op_inw_T0_T1,
924
    gen_op_inl_T0_T1,
925
};
926

    
927
static GenOpFunc *gen_op_out[3] = {
928
    gen_op_outb_T0_T1,
929
    gen_op_outw_T0_T1,
930
    gen_op_outl_T0_T1,
931
};
932

    
933
static GenOpFunc *gen_check_io_T0[3] = {
934
    gen_op_check_iob_T0,
935
    gen_op_check_iow_T0,
936
    gen_op_check_iol_T0,
937
};
938

    
939
static GenOpFunc *gen_check_io_DX[3] = {
940
    gen_op_check_iob_DX,
941
    gen_op_check_iow_DX,
942
    gen_op_check_iol_DX,
943
};
944

    
945
static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
946
{
947
    if (s->pe && (s->cpl > s->iopl || s->vm86)) {
948
        if (s->cc_op != CC_OP_DYNAMIC)
949
            gen_op_set_cc_op(s->cc_op);
950
        gen_jmp_im(cur_eip);
951
        if (use_dx)
952
            gen_check_io_DX[ot]();
953
        else
954
            gen_check_io_T0[ot]();
955
    }
956
}
957

    
958
static inline void gen_movs(DisasContext *s, int ot)
959
{
960
    gen_string_movl_A0_ESI(s);
961
    gen_op_ld_T0_A0[ot + s->mem_index]();
962
    gen_string_movl_A0_EDI(s);
963
    gen_op_st_T0_A0[ot + s->mem_index]();
964
    gen_op_movl_T0_Dshift[ot]();
965
#ifdef TARGET_X86_64
966
    if (s->aflag == 2) {
967
        gen_op_addq_ESI_T0();
968
        gen_op_addq_EDI_T0();
969
    } else
970
#endif
971
    if (s->aflag) {
972
        gen_op_addl_ESI_T0();
973
        gen_op_addl_EDI_T0();
974
    } else {
975
        gen_op_addw_ESI_T0();
976
        gen_op_addw_EDI_T0();
977
    }
978
}
979

    
980
static inline void gen_update_cc_op(DisasContext *s)
981
{
982
    if (s->cc_op != CC_OP_DYNAMIC) {
983
        gen_op_set_cc_op(s->cc_op);
984
        s->cc_op = CC_OP_DYNAMIC;
985
    }
986
}
987

    
988
/* XXX: does not work with gdbstub "ice" single step - not a
989
   serious problem */
990
static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
991
{
992
    int l1, l2;
993

    
994
    l1 = gen_new_label();
995
    l2 = gen_new_label();
996
    gen_op_jnz_ecx[s->aflag](l1);
997
    gen_set_label(l2);
998
    gen_jmp_tb(s, next_eip, 1);
999
    gen_set_label(l1);
1000
    return l2;
1001
}
1002

    
1003
static inline void gen_stos(DisasContext *s, int ot)
1004
{
1005
    gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1006
    gen_string_movl_A0_EDI(s);
1007
    gen_op_st_T0_A0[ot + s->mem_index]();
1008
    gen_op_movl_T0_Dshift[ot]();
1009
#ifdef TARGET_X86_64
1010
    if (s->aflag == 2) {
1011
        gen_op_addq_EDI_T0();
1012
    } else
1013
#endif
1014
    if (s->aflag) {
1015
        gen_op_addl_EDI_T0();
1016
    } else {
1017
        gen_op_addw_EDI_T0();
1018
    }
1019
}
1020

    
1021
static inline void gen_lods(DisasContext *s, int ot)
1022
{
1023
    gen_string_movl_A0_ESI(s);
1024
    gen_op_ld_T0_A0[ot + s->mem_index]();
1025
    gen_op_mov_reg_T0[ot][R_EAX]();
1026
    gen_op_movl_T0_Dshift[ot]();
1027
#ifdef TARGET_X86_64
1028
    if (s->aflag == 2) {
1029
        gen_op_addq_ESI_T0();
1030
    } else
1031
#endif
1032
    if (s->aflag) {
1033
        gen_op_addl_ESI_T0();
1034
    } else {
1035
        gen_op_addw_ESI_T0();
1036
    }
1037
}
1038

    
1039
static inline void gen_scas(DisasContext *s, int ot)
1040
{
1041
    gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1042
    gen_string_movl_A0_EDI(s);
1043
    gen_op_ld_T1_A0[ot + s->mem_index]();
1044
    gen_op_cmpl_T0_T1_cc();
1045
    gen_op_movl_T0_Dshift[ot]();
1046
#ifdef TARGET_X86_64
1047
    if (s->aflag == 2) {
1048
        gen_op_addq_EDI_T0();
1049
    } else
1050
#endif
1051
    if (s->aflag) {
1052
        gen_op_addl_EDI_T0();
1053
    } else {
1054
        gen_op_addw_EDI_T0();
1055
    }
1056
}
1057

    
1058
static inline void gen_cmps(DisasContext *s, int ot)
1059
{
1060
    gen_string_movl_A0_ESI(s);
1061
    gen_op_ld_T0_A0[ot + s->mem_index]();
1062
    gen_string_movl_A0_EDI(s);
1063
    gen_op_ld_T1_A0[ot + s->mem_index]();
1064
    gen_op_cmpl_T0_T1_cc();
1065
    gen_op_movl_T0_Dshift[ot]();
1066
#ifdef TARGET_X86_64
1067
    if (s->aflag == 2) {
1068
        gen_op_addq_ESI_T0();
1069
        gen_op_addq_EDI_T0();
1070
    } else
1071
#endif
1072
    if (s->aflag) {
1073
        gen_op_addl_ESI_T0();
1074
        gen_op_addl_EDI_T0();
1075
    } else {
1076
        gen_op_addw_ESI_T0();
1077
        gen_op_addw_EDI_T0();
1078
    }
1079
}
1080

    
1081
static inline void gen_ins(DisasContext *s, int ot)
1082
{
1083
    gen_string_movl_A0_EDI(s);
1084
    gen_op_movl_T0_0();
1085
    gen_op_st_T0_A0[ot + s->mem_index]();
1086
    gen_op_in_DX_T0[ot]();
1087
    gen_op_st_T0_A0[ot + s->mem_index]();
1088
    gen_op_movl_T0_Dshift[ot]();
1089
#ifdef TARGET_X86_64
1090
    if (s->aflag == 2) {
1091
        gen_op_addq_EDI_T0();
1092
    } else
1093
#endif
1094
    if (s->aflag) {
1095
        gen_op_addl_EDI_T0();
1096
    } else {
1097
        gen_op_addw_EDI_T0();
1098
    }
1099
}
1100

    
1101
static inline void gen_outs(DisasContext *s, int ot)
1102
{
1103
    gen_string_movl_A0_ESI(s);
1104
    gen_op_ld_T0_A0[ot + s->mem_index]();
1105
    gen_op_out_DX_T0[ot]();
1106
    gen_op_movl_T0_Dshift[ot]();
1107
#ifdef TARGET_X86_64
1108
    if (s->aflag == 2) {
1109
        gen_op_addq_ESI_T0();
1110
    } else
1111
#endif
1112
    if (s->aflag) {
1113
        gen_op_addl_ESI_T0();
1114
    } else {
1115
        gen_op_addw_ESI_T0();
1116
    }
1117
}
1118

    
1119
/* same method as Valgrind : we generate jumps to current or next
1120
   instruction */
1121
#define GEN_REPZ(op)                                                          \
1122
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1123
                                 target_ulong cur_eip, target_ulong next_eip) \
1124
{                                                                             \
1125
    int l2;\
1126
    gen_update_cc_op(s);                                                      \
1127
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1128
    gen_ ## op(s, ot);                                                        \
1129
    gen_op_dec_ECX[s->aflag]();                                               \
1130
    /* a loop would cause two single step exceptions if ECX = 1               \
1131
       before rep string_insn */                                              \
1132
    if (!s->jmp_opt)                                                          \
1133
        gen_op_jz_ecx[s->aflag](l2);                                          \
1134
    gen_jmp(s, cur_eip);                                                      \
1135
}
1136

    
1137
#define GEN_REPZ2(op)                                                         \
1138
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1139
                                   target_ulong cur_eip,                      \
1140
                                   target_ulong next_eip,                     \
1141
                                   int nz)                                    \
1142
{                                                                             \
1143
    int l2;\
1144
    gen_update_cc_op(s);                                                      \
1145
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1146
    gen_ ## op(s, ot);                                                        \
1147
    gen_op_dec_ECX[s->aflag]();                                               \
1148
    gen_op_set_cc_op(CC_OP_SUBB + ot);                                        \
1149
    gen_op_string_jnz_sub[nz][ot](l2);\
1150
    if (!s->jmp_opt)                                                          \
1151
        gen_op_jz_ecx[s->aflag](l2);                                          \
1152
    gen_jmp(s, cur_eip);                                                      \
1153
}
1154

    
1155
GEN_REPZ(movs)
1156
GEN_REPZ(stos)
1157
GEN_REPZ(lods)
1158
GEN_REPZ(ins)
1159
GEN_REPZ(outs)
1160
GEN_REPZ2(scas)
1161
GEN_REPZ2(cmps)
1162

    
1163
enum {
1164
    JCC_O,
1165
    JCC_B,
1166
    JCC_Z,
1167
    JCC_BE,
1168
    JCC_S,
1169
    JCC_P,
1170
    JCC_L,
1171
    JCC_LE,
1172
};
1173

    
1174
static GenOpFunc1 *gen_jcc_sub[4][8] = {
1175
    [OT_BYTE] = {
1176
        NULL,
1177
        gen_op_jb_subb,
1178
        gen_op_jz_subb,
1179
        gen_op_jbe_subb,
1180
        gen_op_js_subb,
1181
        NULL,
1182
        gen_op_jl_subb,
1183
        gen_op_jle_subb,
1184
    },
1185
    [OT_WORD] = {
1186
        NULL,
1187
        gen_op_jb_subw,
1188
        gen_op_jz_subw,
1189
        gen_op_jbe_subw,
1190
        gen_op_js_subw,
1191
        NULL,
1192
        gen_op_jl_subw,
1193
        gen_op_jle_subw,
1194
    },
1195
    [OT_LONG] = {
1196
        NULL,
1197
        gen_op_jb_subl,
1198
        gen_op_jz_subl,
1199
        gen_op_jbe_subl,
1200
        gen_op_js_subl,
1201
        NULL,
1202
        gen_op_jl_subl,
1203
        gen_op_jle_subl,
1204
    },
1205
#ifdef TARGET_X86_64
1206
    [OT_QUAD] = {
1207
        NULL,
1208
        BUGGY_64(gen_op_jb_subq),
1209
        gen_op_jz_subq,
1210
        BUGGY_64(gen_op_jbe_subq),
1211
        gen_op_js_subq,
1212
        NULL,
1213
        BUGGY_64(gen_op_jl_subq),
1214
        BUGGY_64(gen_op_jle_subq),
1215
    },
1216
#endif
1217
};
1218
static GenOpFunc1 *gen_op_loop[3][4] = {
1219
    [0] = {
1220
        gen_op_loopnzw,
1221
        gen_op_loopzw,
1222
        gen_op_jnz_ecxw,
1223
    },
1224
    [1] = {
1225
        gen_op_loopnzl,
1226
        gen_op_loopzl,
1227
        gen_op_jnz_ecxl,
1228
    },
1229
#ifdef TARGET_X86_64
1230
    [2] = {
1231
        gen_op_loopnzq,
1232
        gen_op_loopzq,
1233
        gen_op_jnz_ecxq,
1234
    },
1235
#endif
1236
};
1237

    
1238
static GenOpFunc *gen_setcc_slow[8] = {
1239
    gen_op_seto_T0_cc,
1240
    gen_op_setb_T0_cc,
1241
    gen_op_setz_T0_cc,
1242
    gen_op_setbe_T0_cc,
1243
    gen_op_sets_T0_cc,
1244
    gen_op_setp_T0_cc,
1245
    gen_op_setl_T0_cc,
1246
    gen_op_setle_T0_cc,
1247
};
1248

    
1249
static GenOpFunc *gen_setcc_sub[4][8] = {
1250
    [OT_BYTE] = {
1251
        NULL,
1252
        gen_op_setb_T0_subb,
1253
        gen_op_setz_T0_subb,
1254
        gen_op_setbe_T0_subb,
1255
        gen_op_sets_T0_subb,
1256
        NULL,
1257
        gen_op_setl_T0_subb,
1258
        gen_op_setle_T0_subb,
1259
    },
1260
    [OT_WORD] = {
1261
        NULL,
1262
        gen_op_setb_T0_subw,
1263
        gen_op_setz_T0_subw,
1264
        gen_op_setbe_T0_subw,
1265
        gen_op_sets_T0_subw,
1266
        NULL,
1267
        gen_op_setl_T0_subw,
1268
        gen_op_setle_T0_subw,
1269
    },
1270
    [OT_LONG] = {
1271
        NULL,
1272
        gen_op_setb_T0_subl,
1273
        gen_op_setz_T0_subl,
1274
        gen_op_setbe_T0_subl,
1275
        gen_op_sets_T0_subl,
1276
        NULL,
1277
        gen_op_setl_T0_subl,
1278
        gen_op_setle_T0_subl,
1279
    },
1280
#ifdef TARGET_X86_64
1281
    [OT_QUAD] = {
1282
        NULL,
1283
        gen_op_setb_T0_subq,
1284
        gen_op_setz_T0_subq,
1285
        gen_op_setbe_T0_subq,
1286
        gen_op_sets_T0_subq,
1287
        NULL,
1288
        gen_op_setl_T0_subq,
1289
        gen_op_setle_T0_subq,
1290
    },
1291
#endif
1292
};
1293

    
1294
static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1295
    gen_op_fadd_ST0_FT0,
1296
    gen_op_fmul_ST0_FT0,
1297
    gen_op_fcom_ST0_FT0,
1298
    gen_op_fcom_ST0_FT0,
1299
    gen_op_fsub_ST0_FT0,
1300
    gen_op_fsubr_ST0_FT0,
1301
    gen_op_fdiv_ST0_FT0,
1302
    gen_op_fdivr_ST0_FT0,
1303
};
1304

    
1305
/* NOTE the exception in "r" op ordering */
1306
static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1307
    gen_op_fadd_STN_ST0,
1308
    gen_op_fmul_STN_ST0,
1309
    NULL,
1310
    NULL,
1311
    gen_op_fsubr_STN_ST0,
1312
    gen_op_fsub_STN_ST0,
1313
    gen_op_fdivr_STN_ST0,
1314
    gen_op_fdiv_STN_ST0,
1315
};
1316

    
1317
/* if d == OR_TMP0, it means memory operand (address in A0) */
1318
static void gen_op(DisasContext *s1, int op, int ot, int d)
1319
{
1320
    GenOpFunc *gen_update_cc;
1321

    
1322
    if (d != OR_TMP0) {
1323
        gen_op_mov_TN_reg[ot][0][d]();
1324
    } else {
1325
        gen_op_ld_T0_A0[ot + s1->mem_index]();
1326
    }
1327
    switch(op) {
1328
    case OP_ADCL:
1329
    case OP_SBBL:
1330
        if (s1->cc_op != CC_OP_DYNAMIC)
1331
            gen_op_set_cc_op(s1->cc_op);
1332
        if (d != OR_TMP0) {
1333
            gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1334
            gen_op_mov_reg_T0[ot][d]();
1335
        } else {
1336
            gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1337
        }
1338
        s1->cc_op = CC_OP_DYNAMIC;
1339
        goto the_end;
1340
    case OP_ADDL:
1341
        gen_op_addl_T0_T1();
1342
        s1->cc_op = CC_OP_ADDB + ot;
1343
        gen_update_cc = gen_op_update2_cc;
1344
        break;
1345
    case OP_SUBL:
1346
        gen_op_subl_T0_T1();
1347
        s1->cc_op = CC_OP_SUBB + ot;
1348
        gen_update_cc = gen_op_update2_cc;
1349
        break;
1350
    default:
1351
    case OP_ANDL:
1352
    case OP_ORL:
1353
    case OP_XORL:
1354
        gen_op_arith_T0_T1_cc[op]();
1355
        s1->cc_op = CC_OP_LOGICB + ot;
1356
        gen_update_cc = gen_op_update1_cc;
1357
        break;
1358
    case OP_CMPL:
1359
        gen_op_cmpl_T0_T1_cc();
1360
        s1->cc_op = CC_OP_SUBB + ot;
1361
        gen_update_cc = NULL;
1362
        break;
1363
    }
1364
    if (op != OP_CMPL) {
1365
        if (d != OR_TMP0)
1366
            gen_op_mov_reg_T0[ot][d]();
1367
        else
1368
            gen_op_st_T0_A0[ot + s1->mem_index]();
1369
    }
1370
    /* the flags update must happen after the memory write (precise
1371
       exception support) */
1372
    if (gen_update_cc)
1373
        gen_update_cc();
1374
 the_end: ;
1375
}
1376

    
1377
/* if d == OR_TMP0, it means memory operand (address in A0) */
1378
static void gen_inc(DisasContext *s1, int ot, int d, int c)
1379
{
1380
    if (d != OR_TMP0)
1381
        gen_op_mov_TN_reg[ot][0][d]();
1382
    else
1383
        gen_op_ld_T0_A0[ot + s1->mem_index]();
1384
    if (s1->cc_op != CC_OP_DYNAMIC)
1385
        gen_op_set_cc_op(s1->cc_op);
1386
    if (c > 0) {
1387
        gen_op_incl_T0();
1388
        s1->cc_op = CC_OP_INCB + ot;
1389
    } else {
1390
        gen_op_decl_T0();
1391
        s1->cc_op = CC_OP_DECB + ot;
1392
    }
1393
    if (d != OR_TMP0)
1394
        gen_op_mov_reg_T0[ot][d]();
1395
    else
1396
        gen_op_st_T0_A0[ot + s1->mem_index]();
1397
    gen_op_update_inc_cc();
1398
}
1399

    
1400
static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1401
{
1402
    if (d != OR_TMP0)
1403
        gen_op_mov_TN_reg[ot][0][d]();
1404
    else
1405
        gen_op_ld_T0_A0[ot + s1->mem_index]();
1406
    if (s != OR_TMP1)
1407
        gen_op_mov_TN_reg[ot][1][s]();
1408
    /* for zero counts, flags are not updated, so must do it dynamically */
1409
    if (s1->cc_op != CC_OP_DYNAMIC)
1410
        gen_op_set_cc_op(s1->cc_op);
1411

    
1412
    if (d != OR_TMP0)
1413
        gen_op_shift_T0_T1_cc[ot][op]();
1414
    else
1415
        gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1416
    if (d != OR_TMP0)
1417
        gen_op_mov_reg_T0[ot][d]();
1418
    s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1419
}
1420

    
1421
static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1422
{
1423
    /* currently not optimized */
1424
    gen_op_movl_T1_im(c);
1425
    gen_shift(s1, op, ot, d, OR_TMP1);
1426
}
1427

    
1428
static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1429
{
1430
    target_long disp;
1431
    int havesib;
1432
    int base;
1433
    int index;
1434
    int scale;
1435
    int opreg;
1436
    int mod, rm, code, override, must_add_seg;
1437

    
1438
    override = s->override;
1439
    must_add_seg = s->addseg;
1440
    if (override >= 0)
1441
        must_add_seg = 1;
1442
    mod = (modrm >> 6) & 3;
1443
    rm = modrm & 7;
1444

    
1445
    if (s->aflag) {
1446

    
1447
        havesib = 0;
1448
        base = rm;
1449
        index = 0;
1450
        scale = 0;
1451

    
1452
        if (base == 4) {
1453
            havesib = 1;
1454
            code = ldub_code(s->pc++);
1455
            scale = (code >> 6) & 3;
1456
            index = ((code >> 3) & 7) | REX_X(s);
1457
            base = (code & 7);
1458
        }
1459
        base |= REX_B(s);
1460

    
1461
        switch (mod) {
1462
        case 0:
1463
            if ((base & 7) == 5) {
1464
                base = -1;
1465
                disp = (int32_t)ldl_code(s->pc);
1466
                s->pc += 4;
1467
                if (CODE64(s) && !havesib) {
1468
                    disp += s->pc + s->rip_offset;
1469
                }
1470
            } else {
1471
                disp = 0;
1472
            }
1473
            break;
1474
        case 1:
1475
            disp = (int8_t)ldub_code(s->pc++);
1476
            break;
1477
        default:
1478
        case 2:
1479
            disp = ldl_code(s->pc);
1480
            s->pc += 4;
1481
            break;
1482
        }
1483

    
1484
        if (base >= 0) {
1485
            /* for correct popl handling with esp */
1486
            if (base == 4 && s->popl_esp_hack)
1487
                disp += s->popl_esp_hack;
1488
#ifdef TARGET_X86_64
1489
            if (s->aflag == 2) {
1490
                gen_op_movq_A0_reg[base]();
1491
                if (disp != 0) {
1492
                    if ((int32_t)disp == disp)
1493
                        gen_op_addq_A0_im(disp);
1494
                    else
1495
                        gen_op_addq_A0_im64(disp >> 32, disp);
1496
                }
1497
            } else
1498
#endif
1499
            {
1500
                gen_op_movl_A0_reg[base]();
1501
                if (disp != 0)
1502
                    gen_op_addl_A0_im(disp);
1503
            }
1504
        } else {
1505
#ifdef TARGET_X86_64
1506
            if (s->aflag == 2) {
1507
                if ((int32_t)disp == disp)
1508
                    gen_op_movq_A0_im(disp);
1509
                else
1510
                    gen_op_movq_A0_im64(disp >> 32, disp);
1511
            } else
1512
#endif
1513
            {
1514
                gen_op_movl_A0_im(disp);
1515
            }
1516
        }
1517
        /* XXX: index == 4 is always invalid */
1518
        if (havesib && (index != 4 || scale != 0)) {
1519
#ifdef TARGET_X86_64
1520
            if (s->aflag == 2) {
1521
                gen_op_addq_A0_reg_sN[scale][index]();
1522
            } else
1523
#endif
1524
            {
1525
                gen_op_addl_A0_reg_sN[scale][index]();
1526
            }
1527
        }
1528
        if (must_add_seg) {
1529
            if (override < 0) {
1530
                if (base == R_EBP || base == R_ESP)
1531
                    override = R_SS;
1532
                else
1533
                    override = R_DS;
1534
            }
1535
#ifdef TARGET_X86_64
1536
            if (s->aflag == 2) {
1537
                gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1538
            } else
1539
#endif
1540
            {
1541
                gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1542
            }
1543
        }
1544
    } else {
1545
        switch (mod) {
1546
        case 0:
1547
            if (rm == 6) {
1548
                disp = lduw_code(s->pc);
1549
                s->pc += 2;
1550
                gen_op_movl_A0_im(disp);
1551
                rm = 0; /* avoid SS override */
1552
                goto no_rm;
1553
            } else {
1554
                disp = 0;
1555
            }
1556
            break;
1557
        case 1:
1558
            disp = (int8_t)ldub_code(s->pc++);
1559
            break;
1560
        default:
1561
        case 2:
1562
            disp = lduw_code(s->pc);
1563
            s->pc += 2;
1564
            break;
1565
        }
1566
        switch(rm) {
1567
        case 0:
1568
            gen_op_movl_A0_reg[R_EBX]();
1569
            gen_op_addl_A0_reg_sN[0][R_ESI]();
1570
            break;
1571
        case 1:
1572
            gen_op_movl_A0_reg[R_EBX]();
1573
            gen_op_addl_A0_reg_sN[0][R_EDI]();
1574
            break;
1575
        case 2:
1576
            gen_op_movl_A0_reg[R_EBP]();
1577
            gen_op_addl_A0_reg_sN[0][R_ESI]();
1578
            break;
1579
        case 3:
1580
            gen_op_movl_A0_reg[R_EBP]();
1581
            gen_op_addl_A0_reg_sN[0][R_EDI]();
1582
            break;
1583
        case 4:
1584
            gen_op_movl_A0_reg[R_ESI]();
1585
            break;
1586
        case 5:
1587
            gen_op_movl_A0_reg[R_EDI]();
1588
            break;
1589
        case 6:
1590
            gen_op_movl_A0_reg[R_EBP]();
1591
            break;
1592
        default:
1593
        case 7:
1594
            gen_op_movl_A0_reg[R_EBX]();
1595
            break;
1596
        }
1597
        if (disp != 0)
1598
            gen_op_addl_A0_im(disp);
1599
        gen_op_andl_A0_ffff();
1600
    no_rm:
1601
        if (must_add_seg) {
1602
            if (override < 0) {
1603
                if (rm == 2 || rm == 3 || rm == 6)
1604
                    override = R_SS;
1605
                else
1606
                    override = R_DS;
1607
            }
1608
            gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1609
        }
1610
    }
1611

    
1612
    opreg = OR_A0;
1613
    disp = 0;
1614
    *reg_ptr = opreg;
1615
    *offset_ptr = disp;
1616
}
1617

    
1618
static void gen_nop_modrm(DisasContext *s, int modrm)
1619
{
1620
    int mod, rm, base, code;
1621

    
1622
    mod = (modrm >> 6) & 3;
1623
    if (mod == 3)
1624
        return;
1625
    rm = modrm & 7;
1626

    
1627
    if (s->aflag) {
1628

    
1629
        base = rm;
1630

    
1631
        if (base == 4) {
1632
            code = ldub_code(s->pc++);
1633
            base = (code & 7);
1634
        }
1635

    
1636
        switch (mod) {
1637
        case 0:
1638
            if (base == 5) {
1639
                s->pc += 4;
1640
            }
1641
            break;
1642
        case 1:
1643
            s->pc++;
1644
            break;
1645
        default:
1646
        case 2:
1647
            s->pc += 4;
1648
            break;
1649
        }
1650
    } else {
1651
        switch (mod) {
1652
        case 0:
1653
            if (rm == 6) {
1654
                s->pc += 2;
1655
            }
1656
            break;
1657
        case 1:
1658
            s->pc++;
1659
            break;
1660
        default:
1661
        case 2:
1662
            s->pc += 2;
1663
            break;
1664
        }
1665
    }
1666
}
1667

    
1668
/* used for LEA and MOV AX, mem */
1669
static void gen_add_A0_ds_seg(DisasContext *s)
1670
{
1671
    int override, must_add_seg;
1672
    must_add_seg = s->addseg;
1673
    override = R_DS;
1674
    if (s->override >= 0) {
1675
        override = s->override;
1676
        must_add_seg = 1;
1677
    } else {
1678
        override = R_DS;
1679
    }
1680
    if (must_add_seg) {
1681
#ifdef TARGET_X86_64
1682
        if (CODE64(s)) {
1683
            gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1684
        } else
1685
#endif
1686
        {
1687
            gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1688
        }
1689
    }
1690
}
1691

    
1692
/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1693
   OR_TMP0 */
1694
static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1695
{
1696
    int mod, rm, opreg, disp;
1697

    
1698
    mod = (modrm >> 6) & 3;
1699
    rm = (modrm & 7) | REX_B(s);
1700
    if (mod == 3) {
1701
        if (is_store) {
1702
            if (reg != OR_TMP0)
1703
                gen_op_mov_TN_reg[ot][0][reg]();
1704
            gen_op_mov_reg_T0[ot][rm]();
1705
        } else {
1706
            gen_op_mov_TN_reg[ot][0][rm]();
1707
            if (reg != OR_TMP0)
1708
                gen_op_mov_reg_T0[ot][reg]();
1709
        }
1710
    } else {
1711
        gen_lea_modrm(s, modrm, &opreg, &disp);
1712
        if (is_store) {
1713
            if (reg != OR_TMP0)
1714
                gen_op_mov_TN_reg[ot][0][reg]();
1715
            gen_op_st_T0_A0[ot + s->mem_index]();
1716
        } else {
1717
            gen_op_ld_T0_A0[ot + s->mem_index]();
1718
            if (reg != OR_TMP0)
1719
                gen_op_mov_reg_T0[ot][reg]();
1720
        }
1721
    }
1722
}
1723

    
1724
static inline uint32_t insn_get(DisasContext *s, int ot)
1725
{
1726
    uint32_t ret;
1727

    
1728
    switch(ot) {
1729
    case OT_BYTE:
1730
        ret = ldub_code(s->pc);
1731
        s->pc++;
1732
        break;
1733
    case OT_WORD:
1734
        ret = lduw_code(s->pc);
1735
        s->pc += 2;
1736
        break;
1737
    default:
1738
    case OT_LONG:
1739
        ret = ldl_code(s->pc);
1740
        s->pc += 4;
1741
        break;
1742
    }
1743
    return ret;
1744
}
1745

    
1746
static inline int insn_const_size(unsigned int ot)
1747
{
1748
    if (ot <= OT_LONG)
1749
        return 1 << ot;
1750
    else
1751
        return 4;
1752
}
1753

    
1754
static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1755
{
1756
    TranslationBlock *tb;
1757
    target_ulong pc;
1758

    
1759
    pc = s->cs_base + eip;
1760
    tb = s->tb;
1761
    /* NOTE: we handle the case where the TB spans two pages here */
1762
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1763
        (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK))  {
1764
        /* jump to same page: we can use a direct jump */
1765
        if (tb_num == 0)
1766
            gen_op_goto_tb0(TBPARAM(tb));
1767
        else
1768
            gen_op_goto_tb1(TBPARAM(tb));
1769
        gen_jmp_im(eip);
1770
        gen_op_movl_T0_im((long)tb + tb_num);
1771
        gen_op_exit_tb();
1772
    } else {
1773
        /* jump to another page: currently not optimized */
1774
        gen_jmp_im(eip);
1775
        gen_eob(s);
1776
    }
1777
}
1778

    
1779
static inline void gen_jcc(DisasContext *s, int b,
1780
                           target_ulong val, target_ulong next_eip)
1781
{
1782
    TranslationBlock *tb;
1783
    int inv, jcc_op;
1784
    GenOpFunc1 *func;
1785
    target_ulong tmp;
1786
    int l1, l2;
1787

    
1788
    inv = b & 1;
1789
    jcc_op = (b >> 1) & 7;
1790

    
1791
    if (s->jmp_opt) {
1792
        switch(s->cc_op) {
1793
            /* we optimize the cmp/jcc case */
1794
        case CC_OP_SUBB:
1795
        case CC_OP_SUBW:
1796
        case CC_OP_SUBL:
1797
        case CC_OP_SUBQ:
1798
            func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1799
            break;
1800

    
1801
            /* some jumps are easy to compute */
1802
        case CC_OP_ADDB:
1803
        case CC_OP_ADDW:
1804
        case CC_OP_ADDL:
1805
        case CC_OP_ADDQ:
1806

    
1807
        case CC_OP_ADCB:
1808
        case CC_OP_ADCW:
1809
        case CC_OP_ADCL:
1810
        case CC_OP_ADCQ:
1811

    
1812
        case CC_OP_SBBB:
1813
        case CC_OP_SBBW:
1814
        case CC_OP_SBBL:
1815
        case CC_OP_SBBQ:
1816

    
1817
        case CC_OP_LOGICB:
1818
        case CC_OP_LOGICW:
1819
        case CC_OP_LOGICL:
1820
        case CC_OP_LOGICQ:
1821

    
1822
        case CC_OP_INCB:
1823
        case CC_OP_INCW:
1824
        case CC_OP_INCL:
1825
        case CC_OP_INCQ:
1826

    
1827
        case CC_OP_DECB:
1828
        case CC_OP_DECW:
1829
        case CC_OP_DECL:
1830
        case CC_OP_DECQ:
1831

    
1832
        case CC_OP_SHLB:
1833
        case CC_OP_SHLW:
1834
        case CC_OP_SHLL:
1835
        case CC_OP_SHLQ:
1836

    
1837
        case CC_OP_SARB:
1838
        case CC_OP_SARW:
1839
        case CC_OP_SARL:
1840
        case CC_OP_SARQ:
1841
            switch(jcc_op) {
1842
            case JCC_Z:
1843
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1844
                break;
1845
            case JCC_S:
1846
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1847
                break;
1848
            default:
1849
                func = NULL;
1850
                break;
1851
            }
1852
            break;
1853
        default:
1854
            func = NULL;
1855
            break;
1856
        }
1857

    
1858
        if (s->cc_op != CC_OP_DYNAMIC) {
1859
            gen_op_set_cc_op(s->cc_op);
1860
            s->cc_op = CC_OP_DYNAMIC;
1861
        }
1862

    
1863
        if (!func) {
1864
            gen_setcc_slow[jcc_op]();
1865
            func = gen_op_jnz_T0_label;
1866
        }
1867

    
1868
        if (inv) {
1869
            tmp = val;
1870
            val = next_eip;
1871
            next_eip = tmp;
1872
        }
1873
        tb = s->tb;
1874

    
1875
        l1 = gen_new_label();
1876
        func(l1);
1877

    
1878
        gen_goto_tb(s, 0, next_eip);
1879

    
1880
        gen_set_label(l1);
1881
        gen_goto_tb(s, 1, val);
1882

    
1883
        s->is_jmp = 3;
1884
    } else {
1885

    
1886
        if (s->cc_op != CC_OP_DYNAMIC) {
1887
            gen_op_set_cc_op(s->cc_op);
1888
            s->cc_op = CC_OP_DYNAMIC;
1889
        }
1890
        gen_setcc_slow[jcc_op]();
1891
        if (inv) {
1892
            tmp = val;
1893
            val = next_eip;
1894
            next_eip = tmp;
1895
        }
1896
        l1 = gen_new_label();
1897
        l2 = gen_new_label();
1898
        gen_op_jnz_T0_label(l1);
1899
        gen_jmp_im(next_eip);
1900
        gen_op_jmp_label(l2);
1901
        gen_set_label(l1);
1902
        gen_jmp_im(val);
1903
        gen_set_label(l2);
1904
        gen_eob(s);
1905
    }
1906
}
1907

    
1908
static void gen_setcc(DisasContext *s, int b)
1909
{
1910
    int inv, jcc_op;
1911
    GenOpFunc *func;
1912

    
1913
    inv = b & 1;
1914
    jcc_op = (b >> 1) & 7;
1915
    switch(s->cc_op) {
1916
        /* we optimize the cmp/jcc case */
1917
    case CC_OP_SUBB:
1918
    case CC_OP_SUBW:
1919
    case CC_OP_SUBL:
1920
    case CC_OP_SUBQ:
1921
        func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1922
        if (!func)
1923
            goto slow_jcc;
1924
        break;
1925

    
1926
        /* some jumps are easy to compute */
1927
    case CC_OP_ADDB:
1928
    case CC_OP_ADDW:
1929
    case CC_OP_ADDL:
1930
    case CC_OP_ADDQ:
1931

    
1932
    case CC_OP_LOGICB:
1933
    case CC_OP_LOGICW:
1934
    case CC_OP_LOGICL:
1935
    case CC_OP_LOGICQ:
1936

    
1937
    case CC_OP_INCB:
1938
    case CC_OP_INCW:
1939
    case CC_OP_INCL:
1940
    case CC_OP_INCQ:
1941

    
1942
    case CC_OP_DECB:
1943
    case CC_OP_DECW:
1944
    case CC_OP_DECL:
1945
    case CC_OP_DECQ:
1946

    
1947
    case CC_OP_SHLB:
1948
    case CC_OP_SHLW:
1949
    case CC_OP_SHLL:
1950
    case CC_OP_SHLQ:
1951
        switch(jcc_op) {
1952
        case JCC_Z:
1953
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1954
            break;
1955
        case JCC_S:
1956
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1957
            break;
1958
        default:
1959
            goto slow_jcc;
1960
        }
1961
        break;
1962
    default:
1963
    slow_jcc:
1964
        if (s->cc_op != CC_OP_DYNAMIC)
1965
            gen_op_set_cc_op(s->cc_op);
1966
        func = gen_setcc_slow[jcc_op];
1967
        break;
1968
    }
1969
    func();
1970
    if (inv) {
1971
        gen_op_xor_T0_1();
1972
    }
1973
}
1974

    
1975
/* move T0 to seg_reg and compute if the CPU state may change. Never
1976
   call this function with seg_reg == R_CS */
1977
static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1978
{
1979
    if (s->pe && !s->vm86) {
1980
        /* XXX: optimize by finding processor state dynamically */
1981
        if (s->cc_op != CC_OP_DYNAMIC)
1982
            gen_op_set_cc_op(s->cc_op);
1983
        gen_jmp_im(cur_eip);
1984
        gen_op_movl_seg_T0(seg_reg);
1985
        /* abort translation because the addseg value may change or
1986
           because ss32 may change. For R_SS, translation must always
1987
           stop as a special handling must be done to disable hardware
1988
           interrupts for the next instruction */
1989
        if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1990
            s->is_jmp = 3;
1991
    } else {
1992
        gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1993
        if (seg_reg == R_SS)
1994
            s->is_jmp = 3;
1995
    }
1996
}
1997

    
1998
#ifdef TARGET_X86_64
1999
#define SVM_movq_T1_im(x) gen_op_movq_T1_im64((x) >> 32, x)
2000
#else
2001
#define SVM_movq_T1_im(x) gen_op_movl_T1_im(x)
2002
#endif
2003

    
2004
static inline int
2005
gen_svm_check_io(DisasContext *s, target_ulong pc_start, uint64_t type)
2006
{
2007
#if !defined(CONFIG_USER_ONLY)
2008
    if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
2009
        if (s->cc_op != CC_OP_DYNAMIC)
2010
            gen_op_set_cc_op(s->cc_op);
2011
        SVM_movq_T1_im(s->pc - s->cs_base);
2012
        gen_jmp_im(pc_start - s->cs_base);
2013
        gen_op_geneflags();
2014
        gen_op_svm_check_intercept_io((uint32_t)(type >> 32), (uint32_t)type);
2015
        s->cc_op = CC_OP_DYNAMIC;
2016
        /* FIXME: maybe we could move the io intercept vector to the TB as well
2017
                  so we know if this is an EOB or not ... let's assume it's not
2018
                  for now. */
2019
    }
2020
#endif
2021
    return 0;
2022
}
2023

    
2024
static inline int svm_is_rep(int prefixes)
2025
{
2026
    return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2027
}
2028

    
2029
static inline int
2030
gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2031
                              uint64_t type, uint64_t param)
2032
{
2033
    if(!(s->flags & (INTERCEPT_SVM_MASK)))
2034
        /* no SVM activated */
2035
        return 0;
2036
    switch(type) {
2037
        /* CRx and DRx reads/writes */
2038
        case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2039
            if (s->cc_op != CC_OP_DYNAMIC) {
2040
                gen_op_set_cc_op(s->cc_op);
2041
                s->cc_op = CC_OP_DYNAMIC;
2042
            }
2043
            gen_jmp_im(pc_start - s->cs_base);
2044
            SVM_movq_T1_im(param);
2045
            gen_op_geneflags();
2046
            gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2047
            /* this is a special case as we do not know if the interception occurs
2048
               so we assume there was none */
2049
            return 0;
2050
        case SVM_EXIT_MSR:
2051
            if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2052
                if (s->cc_op != CC_OP_DYNAMIC) {
2053
                    gen_op_set_cc_op(s->cc_op);
2054
                    s->cc_op = CC_OP_DYNAMIC;
2055
                }
2056
                gen_jmp_im(pc_start - s->cs_base);
2057
                SVM_movq_T1_im(param);
2058
                gen_op_geneflags();
2059
                gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2060
                /* this is a special case as we do not know if the interception occurs
2061
                   so we assume there was none */
2062
                return 0;
2063
            }
2064
            break;
2065
        default:
2066
            if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2067
                if (s->cc_op != CC_OP_DYNAMIC) {
2068
                    gen_op_set_cc_op(s->cc_op);
2069
                    s->cc_op = CC_OP_EFLAGS;
2070
                }
2071
                gen_jmp_im(pc_start - s->cs_base);
2072
                SVM_movq_T1_im(param);
2073
                gen_op_geneflags();
2074
                gen_op_svm_vmexit(type >> 32, type);
2075
                /* we can optimize this one so TBs don't get longer
2076
                   than up to vmexit */
2077
                gen_eob(s);
2078
                return 1;
2079
            }
2080
    }
2081
    return 0;
2082
}
2083

    
2084
static inline int
2085
gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2086
{
2087
    return gen_svm_check_intercept_param(s, pc_start, type, 0);
2088
}
2089

    
2090
static inline void gen_stack_update(DisasContext *s, int addend)
2091
{
2092
#ifdef TARGET_X86_64
2093
    if (CODE64(s)) {
2094
        if (addend == 8)
2095
            gen_op_addq_ESP_8();
2096
        else
2097
            gen_op_addq_ESP_im(addend);
2098
    } else
2099
#endif
2100
    if (s->ss32) {
2101
        if (addend == 2)
2102
            gen_op_addl_ESP_2();
2103
        else if (addend == 4)
2104
            gen_op_addl_ESP_4();
2105
        else
2106
            gen_op_addl_ESP_im(addend);
2107
    } else {
2108
        if (addend == 2)
2109
            gen_op_addw_ESP_2();
2110
        else if (addend == 4)
2111
            gen_op_addw_ESP_4();
2112
        else
2113
            gen_op_addw_ESP_im(addend);
2114
    }
2115
}
2116

    
2117
/* generate a push. It depends on ss32, addseg and dflag */
2118
static void gen_push_T0(DisasContext *s)
2119
{
2120
#ifdef TARGET_X86_64
2121
    if (CODE64(s)) {
2122
        gen_op_movq_A0_reg[R_ESP]();
2123
        if (s->dflag) {
2124
            gen_op_subq_A0_8();
2125
            gen_op_st_T0_A0[OT_QUAD + s->mem_index]();
2126
        } else {
2127
            gen_op_subq_A0_2();
2128
            gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2129
        }
2130
        gen_op_movq_ESP_A0();
2131
    } else
2132
#endif
2133
    {
2134
        gen_op_movl_A0_reg[R_ESP]();
2135
        if (!s->dflag)
2136
            gen_op_subl_A0_2();
2137
        else
2138
            gen_op_subl_A0_4();
2139
        if (s->ss32) {
2140
            if (s->addseg) {
2141
                gen_op_movl_T1_A0();
2142
                gen_op_addl_A0_SS();
2143
            }
2144
        } else {
2145
            gen_op_andl_A0_ffff();
2146
            gen_op_movl_T1_A0();
2147
            gen_op_addl_A0_SS();
2148
        }
2149
        gen_op_st_T0_A0[s->dflag + 1 + s->mem_index]();
2150
        if (s->ss32 && !s->addseg)
2151
            gen_op_movl_ESP_A0();
2152
        else
2153
            gen_op_mov_reg_T1[s->ss32 + 1][R_ESP]();
2154
    }
2155
}
2156

    
2157
/* generate a push. It depends on ss32, addseg and dflag */
2158
/* slower version for T1, only used for call Ev */
2159
static void gen_push_T1(DisasContext *s)
2160
{
2161
#ifdef TARGET_X86_64
2162
    if (CODE64(s)) {
2163
        gen_op_movq_A0_reg[R_ESP]();
2164
        if (s->dflag) {
2165
            gen_op_subq_A0_8();
2166
            gen_op_st_T1_A0[OT_QUAD + s->mem_index]();
2167
        } else {
2168
            gen_op_subq_A0_2();
2169
            gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2170
        }
2171
        gen_op_movq_ESP_A0();
2172
    } else
2173
#endif
2174
    {
2175
        gen_op_movl_A0_reg[R_ESP]();
2176
        if (!s->dflag)
2177
            gen_op_subl_A0_2();
2178
        else
2179
            gen_op_subl_A0_4();
2180
        if (s->ss32) {
2181
            if (s->addseg) {
2182
                gen_op_addl_A0_SS();
2183
            }
2184
        } else {
2185
            gen_op_andl_A0_ffff();
2186
            gen_op_addl_A0_SS();
2187
        }
2188
        gen_op_st_T1_A0[s->dflag + 1 + s->mem_index]();
2189

    
2190
        if (s->ss32 && !s->addseg)
2191
            gen_op_movl_ESP_A0();
2192
        else
2193
            gen_stack_update(s, (-2) << s->dflag);
2194
    }
2195
}
2196

    
2197
/* two step pop is necessary for precise exceptions */
2198
static void gen_pop_T0(DisasContext *s)
2199
{
2200
#ifdef TARGET_X86_64
2201
    if (CODE64(s)) {
2202
        gen_op_movq_A0_reg[R_ESP]();
2203
        gen_op_ld_T0_A0[(s->dflag ? OT_QUAD : OT_WORD) + s->mem_index]();
2204
    } else
2205
#endif
2206
    {
2207
        gen_op_movl_A0_reg[R_ESP]();
2208
        if (s->ss32) {
2209
            if (s->addseg)
2210
                gen_op_addl_A0_SS();
2211
        } else {
2212
            gen_op_andl_A0_ffff();
2213
            gen_op_addl_A0_SS();
2214
        }
2215
        gen_op_ld_T0_A0[s->dflag + 1 + s->mem_index]();
2216
    }
2217
}
2218

    
2219
static void gen_pop_update(DisasContext *s)
2220
{
2221
#ifdef TARGET_X86_64
2222
    if (CODE64(s) && s->dflag) {
2223
        gen_stack_update(s, 8);
2224
    } else
2225
#endif
2226
    {
2227
        gen_stack_update(s, 2 << s->dflag);
2228
    }
2229
}
2230

    
2231
static void gen_stack_A0(DisasContext *s)
2232
{
2233
    gen_op_movl_A0_ESP();
2234
    if (!s->ss32)
2235
        gen_op_andl_A0_ffff();
2236
    gen_op_movl_T1_A0();
2237
    if (s->addseg)
2238
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2239
}
2240

    
2241
/* NOTE: wrap around in 16 bit not fully handled */
2242
static void gen_pusha(DisasContext *s)
2243
{
2244
    int i;
2245
    gen_op_movl_A0_ESP();
2246
    gen_op_addl_A0_im(-16 <<  s->dflag);
2247
    if (!s->ss32)
2248
        gen_op_andl_A0_ffff();
2249
    gen_op_movl_T1_A0();
2250
    if (s->addseg)
2251
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2252
    for(i = 0;i < 8; i++) {
2253
        gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
2254
        gen_op_st_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2255
        gen_op_addl_A0_im(2 <<  s->dflag);
2256
    }
2257
    gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2258
}
2259

    
2260
/* NOTE: wrap around in 16 bit not fully handled */
2261
static void gen_popa(DisasContext *s)
2262
{
2263
    int i;
2264
    gen_op_movl_A0_ESP();
2265
    if (!s->ss32)
2266
        gen_op_andl_A0_ffff();
2267
    gen_op_movl_T1_A0();
2268
    gen_op_addl_T1_im(16 <<  s->dflag);
2269
    if (s->addseg)
2270
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2271
    for(i = 0;i < 8; i++) {
2272
        /* ESP is not reloaded */
2273
        if (i != 3) {
2274
            gen_op_ld_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2275
            gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
2276
        }
2277
        gen_op_addl_A0_im(2 <<  s->dflag);
2278
    }
2279
    gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2280
}
2281

    
2282
static void gen_enter(DisasContext *s, int esp_addend, int level)
2283
{
2284
    int ot, opsize;
2285

    
2286
    level &= 0x1f;
2287
#ifdef TARGET_X86_64
2288
    if (CODE64(s)) {
2289
        ot = s->dflag ? OT_QUAD : OT_WORD;
2290
        opsize = 1 << ot;
2291

    
2292
        gen_op_movl_A0_ESP();
2293
        gen_op_addq_A0_im(-opsize);
2294
        gen_op_movl_T1_A0();
2295

    
2296
        /* push bp */
2297
        gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2298
        gen_op_st_T0_A0[ot + s->mem_index]();
2299
        if (level) {
2300
            gen_op_enter64_level(level, (ot == OT_QUAD));
2301
        }
2302
        gen_op_mov_reg_T1[ot][R_EBP]();
2303
        gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2304
        gen_op_mov_reg_T1[OT_QUAD][R_ESP]();
2305
    } else
2306
#endif
2307
    {
2308
        ot = s->dflag + OT_WORD;
2309
        opsize = 2 << s->dflag;
2310

    
2311
        gen_op_movl_A0_ESP();
2312
        gen_op_addl_A0_im(-opsize);
2313
        if (!s->ss32)
2314
            gen_op_andl_A0_ffff();
2315
        gen_op_movl_T1_A0();
2316
        if (s->addseg)
2317
            gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2318
        /* push bp */
2319
        gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2320
        gen_op_st_T0_A0[ot + s->mem_index]();
2321
        if (level) {
2322
            gen_op_enter_level(level, s->dflag);
2323
        }
2324
        gen_op_mov_reg_T1[ot][R_EBP]();
2325
        gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2326
        gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2327
    }
2328
}
2329

    
2330
static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2331
{
2332
    if (s->cc_op != CC_OP_DYNAMIC)
2333
        gen_op_set_cc_op(s->cc_op);
2334
    gen_jmp_im(cur_eip);
2335
    gen_op_raise_exception(trapno);
2336
    s->is_jmp = 3;
2337
}
2338

    
2339
/* an interrupt is different from an exception because of the
2340
   privilege checks */
2341
static void gen_interrupt(DisasContext *s, int intno,
2342
                          target_ulong cur_eip, target_ulong next_eip)
2343
{
2344
    if (s->cc_op != CC_OP_DYNAMIC)
2345
        gen_op_set_cc_op(s->cc_op);
2346
    gen_jmp_im(cur_eip);
2347
    gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2348
    s->is_jmp = 3;
2349
}
2350

    
2351
static void gen_debug(DisasContext *s, target_ulong cur_eip)
2352
{
2353
    if (s->cc_op != CC_OP_DYNAMIC)
2354
        gen_op_set_cc_op(s->cc_op);
2355
    gen_jmp_im(cur_eip);
2356
    gen_op_debug();
2357
    s->is_jmp = 3;
2358
}
2359

    
2360
/* generate a generic end of block. Trace exception is also generated
2361
   if needed */
2362
static void gen_eob(DisasContext *s)
2363
{
2364
    if (s->cc_op != CC_OP_DYNAMIC)
2365
        gen_op_set_cc_op(s->cc_op);
2366
    if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2367
        gen_op_reset_inhibit_irq();
2368
    }
2369
    if (s->singlestep_enabled) {
2370
        gen_op_debug();
2371
    } else if (s->tf) {
2372
        gen_op_single_step();
2373
    } else {
2374
        gen_op_movl_T0_0();
2375
        gen_op_exit_tb();
2376
    }
2377
    s->is_jmp = 3;
2378
}
2379

    
2380
/* generate a jump to eip. No segment change must happen before as a
2381
   direct call to the next block may occur */
2382
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2383
{
2384
    if (s->jmp_opt) {
2385
        if (s->cc_op != CC_OP_DYNAMIC) {
2386
            gen_op_set_cc_op(s->cc_op);
2387
            s->cc_op = CC_OP_DYNAMIC;
2388
        }
2389
        gen_goto_tb(s, tb_num, eip);
2390
        s->is_jmp = 3;
2391
    } else {
2392
        gen_jmp_im(eip);
2393
        gen_eob(s);
2394
    }
2395
}
2396

    
2397
static void gen_jmp(DisasContext *s, target_ulong eip)
2398
{
2399
    gen_jmp_tb(s, eip, 0);
2400
}
2401

    
2402
static void gen_movtl_T0_im(target_ulong val)
2403
{
2404
#ifdef TARGET_X86_64
2405
    if ((int32_t)val == val) {
2406
        gen_op_movl_T0_im(val);
2407
    } else {
2408
        gen_op_movq_T0_im64(val >> 32, val);
2409
    }
2410
#else
2411
    gen_op_movl_T0_im(val);
2412
#endif
2413
}
2414

    
2415
static void gen_movtl_T1_im(target_ulong val)
2416
{
2417
#ifdef TARGET_X86_64
2418
    if ((int32_t)val == val) {
2419
        gen_op_movl_T1_im(val);
2420
    } else {
2421
        gen_op_movq_T1_im64(val >> 32, val);
2422
    }
2423
#else
2424
    gen_op_movl_T1_im(val);
2425
#endif
2426
}
2427

    
2428
static void gen_add_A0_im(DisasContext *s, int val)
2429
{
2430
#ifdef TARGET_X86_64
2431
    if (CODE64(s))
2432
        gen_op_addq_A0_im(val);
2433
    else
2434
#endif
2435
        gen_op_addl_A0_im(val);
2436
}
2437

    
2438
static GenOpFunc1 *gen_ldq_env_A0[3] = {
2439
    gen_op_ldq_raw_env_A0,
2440
#ifndef CONFIG_USER_ONLY
2441
    gen_op_ldq_kernel_env_A0,
2442
    gen_op_ldq_user_env_A0,
2443
#endif
2444
};
2445

    
2446
static GenOpFunc1 *gen_stq_env_A0[3] = {
2447
    gen_op_stq_raw_env_A0,
2448
#ifndef CONFIG_USER_ONLY
2449
    gen_op_stq_kernel_env_A0,
2450
    gen_op_stq_user_env_A0,
2451
#endif
2452
};
2453

    
2454
static GenOpFunc1 *gen_ldo_env_A0[3] = {
2455
    gen_op_ldo_raw_env_A0,
2456
#ifndef CONFIG_USER_ONLY
2457
    gen_op_ldo_kernel_env_A0,
2458
    gen_op_ldo_user_env_A0,
2459
#endif
2460
};
2461

    
2462
static GenOpFunc1 *gen_sto_env_A0[3] = {
2463
    gen_op_sto_raw_env_A0,
2464
#ifndef CONFIG_USER_ONLY
2465
    gen_op_sto_kernel_env_A0,
2466
    gen_op_sto_user_env_A0,
2467
#endif
2468
};
2469

    
2470
#define SSE_SPECIAL ((GenOpFunc2 *)1)
2471

    
2472
#define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2473
#define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2474
                     gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2475

    
2476
static GenOpFunc2 *sse_op_table1[256][4] = {
2477
    /* pure SSE operations */
2478
    [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2479
    [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2480
    [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2481
    [0x13] = { SSE_SPECIAL, SSE_SPECIAL },  /* movlps, movlpd */
2482
    [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2483
    [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2484
    [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd, movshdup */
2485
    [0x17] = { SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd */
2486

    
2487
    [0x28] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2488
    [0x29] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2489
    [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2490
    [0x2b] = { SSE_SPECIAL, SSE_SPECIAL },  /* movntps, movntpd */
2491
    [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2492
    [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2493
    [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2494
    [0x2f] = { gen_op_comiss, gen_op_comisd },
2495
    [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2496
    [0x51] = SSE_FOP(sqrt),
2497
    [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2498
    [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2499
    [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2500
    [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2501
    [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2502
    [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2503
    [0x58] = SSE_FOP(add),
2504
    [0x59] = SSE_FOP(mul),
2505
    [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps,
2506
               gen_op_cvtss2sd, gen_op_cvtsd2ss },
2507
    [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2508
    [0x5c] = SSE_FOP(sub),
2509
    [0x5d] = SSE_FOP(min),
2510
    [0x5e] = SSE_FOP(div),
2511
    [0x5f] = SSE_FOP(max),
2512

    
2513
    [0xc2] = SSE_FOP(cmpeq),
2514
    [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2515

    
2516
    /* MMX ops and their SSE extensions */
2517
    [0x60] = MMX_OP2(punpcklbw),
2518
    [0x61] = MMX_OP2(punpcklwd),
2519
    [0x62] = MMX_OP2(punpckldq),
2520
    [0x63] = MMX_OP2(packsswb),
2521
    [0x64] = MMX_OP2(pcmpgtb),
2522
    [0x65] = MMX_OP2(pcmpgtw),
2523
    [0x66] = MMX_OP2(pcmpgtl),
2524
    [0x67] = MMX_OP2(packuswb),
2525
    [0x68] = MMX_OP2(punpckhbw),
2526
    [0x69] = MMX_OP2(punpckhwd),
2527
    [0x6a] = MMX_OP2(punpckhdq),
2528
    [0x6b] = MMX_OP2(packssdw),
2529
    [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2530
    [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2531
    [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2532
    [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2533
    [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx,
2534
               (GenOpFunc2 *)gen_op_pshufd_xmm,
2535
               (GenOpFunc2 *)gen_op_pshufhw_xmm,
2536
               (GenOpFunc2 *)gen_op_pshuflw_xmm },
2537
    [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2538
    [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2539
    [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2540
    [0x74] = MMX_OP2(pcmpeqb),
2541
    [0x75] = MMX_OP2(pcmpeqw),
2542
    [0x76] = MMX_OP2(pcmpeql),
2543
    [0x77] = { SSE_SPECIAL }, /* emms */
2544
    [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2545
    [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2546
    [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2547
    [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2548
    [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2549
    [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2550
    [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2551
    [0xd1] = MMX_OP2(psrlw),
2552
    [0xd2] = MMX_OP2(psrld),
2553
    [0xd3] = MMX_OP2(psrlq),
2554
    [0xd4] = MMX_OP2(paddq),
2555
    [0xd5] = MMX_OP2(pmullw),
2556
    [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2557
    [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2558
    [0xd8] = MMX_OP2(psubusb),
2559
    [0xd9] = MMX_OP2(psubusw),
2560
    [0xda] = MMX_OP2(pminub),
2561
    [0xdb] = MMX_OP2(pand),
2562
    [0xdc] = MMX_OP2(paddusb),
2563
    [0xdd] = MMX_OP2(paddusw),
2564
    [0xde] = MMX_OP2(pmaxub),
2565
    [0xdf] = MMX_OP2(pandn),
2566
    [0xe0] = MMX_OP2(pavgb),
2567
    [0xe1] = MMX_OP2(psraw),
2568
    [0xe2] = MMX_OP2(psrad),
2569
    [0xe3] = MMX_OP2(pavgw),
2570
    [0xe4] = MMX_OP2(pmulhuw),
2571
    [0xe5] = MMX_OP2(pmulhw),
2572
    [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2573
    [0xe7] = { SSE_SPECIAL , SSE_SPECIAL },  /* movntq, movntq */
2574
    [0xe8] = MMX_OP2(psubsb),
2575
    [0xe9] = MMX_OP2(psubsw),
2576
    [0xea] = MMX_OP2(pminsw),
2577
    [0xeb] = MMX_OP2(por),
2578
    [0xec] = MMX_OP2(paddsb),
2579
    [0xed] = MMX_OP2(paddsw),
2580
    [0xee] = MMX_OP2(pmaxsw),
2581
    [0xef] = MMX_OP2(pxor),
2582
    [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2583
    [0xf1] = MMX_OP2(psllw),
2584
    [0xf2] = MMX_OP2(pslld),
2585
    [0xf3] = MMX_OP2(psllq),
2586
    [0xf4] = MMX_OP2(pmuludq),
2587
    [0xf5] = MMX_OP2(pmaddwd),
2588
    [0xf6] = MMX_OP2(psadbw),
2589
    [0xf7] = MMX_OP2(maskmov),
2590
    [0xf8] = MMX_OP2(psubb),
2591
    [0xf9] = MMX_OP2(psubw),
2592
    [0xfa] = MMX_OP2(psubl),
2593
    [0xfb] = MMX_OP2(psubq),
2594
    [0xfc] = MMX_OP2(paddb),
2595
    [0xfd] = MMX_OP2(paddw),
2596
    [0xfe] = MMX_OP2(paddl),
2597
};
2598

    
2599
static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2600
    [0 + 2] = MMX_OP2(psrlw),
2601
    [0 + 4] = MMX_OP2(psraw),
2602
    [0 + 6] = MMX_OP2(psllw),
2603
    [8 + 2] = MMX_OP2(psrld),
2604
    [8 + 4] = MMX_OP2(psrad),
2605
    [8 + 6] = MMX_OP2(pslld),
2606
    [16 + 2] = MMX_OP2(psrlq),
2607
    [16 + 3] = { NULL, gen_op_psrldq_xmm },
2608
    [16 + 6] = MMX_OP2(psllq),
2609
    [16 + 7] = { NULL, gen_op_pslldq_xmm },
2610
};
2611

    
2612
static GenOpFunc1 *sse_op_table3[4 * 3] = {
2613
    gen_op_cvtsi2ss,
2614
    gen_op_cvtsi2sd,
2615
    X86_64_ONLY(gen_op_cvtsq2ss),
2616
    X86_64_ONLY(gen_op_cvtsq2sd),
2617

    
2618
    gen_op_cvttss2si,
2619
    gen_op_cvttsd2si,
2620
    X86_64_ONLY(gen_op_cvttss2sq),
2621
    X86_64_ONLY(gen_op_cvttsd2sq),
2622

    
2623
    gen_op_cvtss2si,
2624
    gen_op_cvtsd2si,
2625
    X86_64_ONLY(gen_op_cvtss2sq),
2626
    X86_64_ONLY(gen_op_cvtsd2sq),
2627
};
2628

    
2629
static GenOpFunc2 *sse_op_table4[8][4] = {
2630
    SSE_FOP(cmpeq),
2631
    SSE_FOP(cmplt),
2632
    SSE_FOP(cmple),
2633
    SSE_FOP(cmpunord),
2634
    SSE_FOP(cmpneq),
2635
    SSE_FOP(cmpnlt),
2636
    SSE_FOP(cmpnle),
2637
    SSE_FOP(cmpord),
2638
};
2639

    
2640
static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2641
{
2642
    int b1, op1_offset, op2_offset, is_xmm, val, ot;
2643
    int modrm, mod, rm, reg, reg_addr, offset_addr;
2644
    GenOpFunc2 *sse_op2;
2645
    GenOpFunc3 *sse_op3;
2646

    
2647
    b &= 0xff;
2648
    if (s->prefix & PREFIX_DATA)
2649
        b1 = 1;
2650
    else if (s->prefix & PREFIX_REPZ)
2651
        b1 = 2;
2652
    else if (s->prefix & PREFIX_REPNZ)
2653
        b1 = 3;
2654
    else
2655
        b1 = 0;
2656
    sse_op2 = sse_op_table1[b][b1];
2657
    if (!sse_op2)
2658
        goto illegal_op;
2659
    if (b <= 0x5f || b == 0xc6 || b == 0xc2) {
2660
        is_xmm = 1;
2661
    } else {
2662
        if (b1 == 0) {
2663
            /* MMX case */
2664
            is_xmm = 0;
2665
        } else {
2666
            is_xmm = 1;
2667
        }
2668
    }
2669
    /* simple MMX/SSE operation */
2670
    if (s->flags & HF_TS_MASK) {
2671
        gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2672
        return;
2673
    }
2674
    if (s->flags & HF_EM_MASK) {
2675
    illegal_op:
2676
        gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2677
        return;
2678
    }
2679
    if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2680
        goto illegal_op;
2681
    if (b == 0x77) {
2682
        /* emms */
2683
        gen_op_emms();
2684
        return;
2685
    }
2686
    /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2687
       the static cpu state) */
2688
    if (!is_xmm) {
2689
        gen_op_enter_mmx();
2690
    }
2691

    
2692
    modrm = ldub_code(s->pc++);
2693
    reg = ((modrm >> 3) & 7);
2694
    if (is_xmm)
2695
        reg |= rex_r;
2696
    mod = (modrm >> 6) & 3;
2697
    if (sse_op2 == SSE_SPECIAL) {
2698
        b |= (b1 << 8);
2699
        switch(b) {
2700
        case 0x0e7: /* movntq */
2701
            if (mod == 3)
2702
                goto illegal_op;
2703
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2704
            gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2705
            break;
2706
        case 0x1e7: /* movntdq */
2707
        case 0x02b: /* movntps */
2708
        case 0x12b: /* movntps */
2709
        case 0x3f0: /* lddqu */
2710
            if (mod == 3)
2711
                goto illegal_op;
2712
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2713
            gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2714
            break;
2715
        case 0x6e: /* movd mm, ea */
2716
#ifdef TARGET_X86_64
2717
            if (s->dflag == 2) {
2718
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2719
                gen_op_movq_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2720
            } else
2721
#endif
2722
            {
2723
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2724
                gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2725
            }
2726
            break;
2727
        case 0x16e: /* movd xmm, ea */
2728
#ifdef TARGET_X86_64
2729
            if (s->dflag == 2) {
2730
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2731
                gen_op_movq_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2732
            } else
2733
#endif
2734
            {
2735
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2736
                gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2737
            }
2738
            break;
2739
        case 0x6f: /* movq mm, ea */
2740
            if (mod != 3) {
2741
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2742
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2743
            } else {
2744
                rm = (modrm & 7);
2745
                gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2746
                            offsetof(CPUX86State,fpregs[rm].mmx));
2747
            }
2748
            break;
2749
        case 0x010: /* movups */
2750
        case 0x110: /* movupd */
2751
        case 0x028: /* movaps */
2752
        case 0x128: /* movapd */
2753
        case 0x16f: /* movdqa xmm, ea */
2754
        case 0x26f: /* movdqu xmm, ea */
2755
            if (mod != 3) {
2756
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2757
                gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2758
            } else {
2759
                rm = (modrm & 7) | REX_B(s);
2760
                gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2761
                            offsetof(CPUX86State,xmm_regs[rm]));
2762
            }
2763
            break;
2764
        case 0x210: /* movss xmm, ea */
2765
            if (mod != 3) {
2766
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2767
                gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2768
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2769
                gen_op_movl_T0_0();
2770
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2771
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2772
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2773
            } else {
2774
                rm = (modrm & 7) | REX_B(s);
2775
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2776
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2777
            }
2778
            break;
2779
        case 0x310: /* movsd xmm, ea */
2780
            if (mod != 3) {
2781
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2782
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2783
                gen_op_movl_T0_0();
2784
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2785
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2786
            } else {
2787
                rm = (modrm & 7) | REX_B(s);
2788
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2789
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2790
            }
2791
            break;
2792
        case 0x012: /* movlps */
2793
        case 0x112: /* movlpd */
2794
            if (mod != 3) {
2795
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2796
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2797
            } else {
2798
                /* movhlps */
2799
                rm = (modrm & 7) | REX_B(s);
2800
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2801
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2802
            }
2803
            break;
2804
        case 0x212: /* movsldup */
2805
            if (mod != 3) {
2806
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2807
                gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2808
            } else {
2809
                rm = (modrm & 7) | REX_B(s);
2810
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2811
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2812
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2813
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2814
            }
2815
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2816
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2817
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2818
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2819
            break;
2820
        case 0x312: /* movddup */
2821
            if (mod != 3) {
2822
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2823
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2824
            } else {
2825
                rm = (modrm & 7) | REX_B(s);
2826
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2827
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2828
            }
2829
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2830
                        offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2831
            break;
2832
        case 0x016: /* movhps */
2833
        case 0x116: /* movhpd */
2834
            if (mod != 3) {
2835
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2836
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2837
            } else {
2838
                /* movlhps */
2839
                rm = (modrm & 7) | REX_B(s);
2840
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2841
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2842
            }
2843
            break;
2844
        case 0x216: /* movshdup */
2845
            if (mod != 3) {
2846
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2847
                gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2848
            } else {
2849
                rm = (modrm & 7) | REX_B(s);
2850
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2851
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2852
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2853
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2854
            }
2855
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2856
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2857
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2858
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2859
            break;
2860
        case 0x7e: /* movd ea, mm */
2861
#ifdef TARGET_X86_64
2862
            if (s->dflag == 2) {
2863
                gen_op_movq_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2864
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2865
            } else
2866
#endif
2867
            {
2868
                gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2869
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2870
            }
2871
            break;
2872
        case 0x17e: /* movd ea, xmm */
2873
#ifdef TARGET_X86_64
2874
            if (s->dflag == 2) {
2875
                gen_op_movq_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2876
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2877
            } else
2878
#endif
2879
            {
2880
                gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2881
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2882
            }
2883
            break;
2884
        case 0x27e: /* movq xmm, ea */
2885
            if (mod != 3) {
2886
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2887
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2888
            } else {
2889
                rm = (modrm & 7) | REX_B(s);
2890
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2891
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2892
            }
2893
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2894
            break;
2895
        case 0x7f: /* movq ea, mm */
2896
            if (mod != 3) {
2897
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2898
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2899
            } else {
2900
                rm = (modrm & 7);
2901
                gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2902
                            offsetof(CPUX86State,fpregs[reg].mmx));
2903
            }
2904
            break;
2905
        case 0x011: /* movups */
2906
        case 0x111: /* movupd */
2907
        case 0x029: /* movaps */
2908
        case 0x129: /* movapd */
2909
        case 0x17f: /* movdqa ea, xmm */
2910
        case 0x27f: /* movdqu ea, xmm */
2911
            if (mod != 3) {
2912
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2913
                gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2914
            } else {
2915
                rm = (modrm & 7) | REX_B(s);
2916
                gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2917
                            offsetof(CPUX86State,xmm_regs[reg]));
2918
            }
2919
            break;
2920
        case 0x211: /* movss ea, xmm */
2921
            if (mod != 3) {
2922
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2923
                gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2924
                gen_op_st_T0_A0[OT_LONG + s->mem_index]();
2925
            } else {
2926
                rm = (modrm & 7) | REX_B(s);
2927
                gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2928
                            offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2929
            }
2930
            break;
2931
        case 0x311: /* movsd ea, xmm */
2932
            if (mod != 3) {
2933
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2934
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2935
            } else {
2936
                rm = (modrm & 7) | REX_B(s);
2937
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2938
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2939
            }
2940
            break;
2941
        case 0x013: /* movlps */
2942
        case 0x113: /* movlpd */
2943
            if (mod != 3) {
2944
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2945
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2946
            } else {
2947
                goto illegal_op;
2948
            }
2949
            break;
2950
        case 0x017: /* movhps */
2951
        case 0x117: /* movhpd */
2952
            if (mod != 3) {
2953
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2954
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2955
            } else {
2956
                goto illegal_op;
2957
            }
2958
            break;
2959
        case 0x71: /* shift mm, im */
2960
        case 0x72:
2961
        case 0x73:
2962
        case 0x171: /* shift xmm, im */
2963
        case 0x172:
2964
        case 0x173:
2965
            val = ldub_code(s->pc++);
2966
            if (is_xmm) {
2967
                gen_op_movl_T0_im(val);
2968
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2969
                gen_op_movl_T0_0();
2970
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2971
                op1_offset = offsetof(CPUX86State,xmm_t0);
2972
            } else {
2973
                gen_op_movl_T0_im(val);
2974
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2975
                gen_op_movl_T0_0();
2976
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2977
                op1_offset = offsetof(CPUX86State,mmx_t0);
2978
            }
2979
            sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2980
            if (!sse_op2)
2981
                goto illegal_op;
2982
            if (is_xmm) {
2983
                rm = (modrm & 7) | REX_B(s);
2984
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2985
            } else {
2986
                rm = (modrm & 7);
2987
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2988
            }
2989
            sse_op2(op2_offset, op1_offset);
2990
            break;
2991
        case 0x050: /* movmskps */
2992
            rm = (modrm & 7) | REX_B(s);
2993
            gen_op_movmskps(offsetof(CPUX86State,xmm_regs[rm]));
2994
            gen_op_mov_reg_T0[OT_LONG][reg]();
2995
            break;
2996
        case 0x150: /* movmskpd */
2997
            rm = (modrm & 7) | REX_B(s);
2998
            gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[rm]));
2999
            gen_op_mov_reg_T0[OT_LONG][reg]();
3000
            break;
3001
        case 0x02a: /* cvtpi2ps */
3002
        case 0x12a: /* cvtpi2pd */
3003
            gen_op_enter_mmx();
3004
            if (mod != 3) {
3005
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3006
                op2_offset = offsetof(CPUX86State,mmx_t0);
3007
                gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
3008
            } else {
3009
                rm = (modrm & 7);
3010
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3011
            }
3012
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3013
            switch(b >> 8) {
3014
            case 0x0:
3015
                gen_op_cvtpi2ps(op1_offset, op2_offset);
3016
                break;
3017
            default:
3018
            case 0x1:
3019
                gen_op_cvtpi2pd(op1_offset, op2_offset);
3020
                break;
3021
            }
3022
            break;
3023
        case 0x22a: /* cvtsi2ss */
3024
        case 0x32a: /* cvtsi2sd */
3025
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3026
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3027
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3028
            sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
3029
            break;
3030
        case 0x02c: /* cvttps2pi */
3031
        case 0x12c: /* cvttpd2pi */
3032
        case 0x02d: /* cvtps2pi */
3033
        case 0x12d: /* cvtpd2pi */
3034
            gen_op_enter_mmx();
3035
            if (mod != 3) {
3036
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3037
                op2_offset = offsetof(CPUX86State,xmm_t0);
3038
                gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3039
            } else {
3040
                rm = (modrm & 7) | REX_B(s);
3041
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3042
            }
3043
            op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3044
            switch(b) {
3045
            case 0x02c:
3046
                gen_op_cvttps2pi(op1_offset, op2_offset);
3047
                break;
3048
            case 0x12c:
3049
                gen_op_cvttpd2pi(op1_offset, op2_offset);
3050
                break;
3051
            case 0x02d:
3052
                gen_op_cvtps2pi(op1_offset, op2_offset);
3053
                break;
3054
            case 0x12d:
3055
                gen_op_cvtpd2pi(op1_offset, op2_offset);
3056
                break;
3057
            }
3058
            break;
3059
        case 0x22c: /* cvttss2si */
3060
        case 0x32c: /* cvttsd2si */
3061
        case 0x22d: /* cvtss2si */
3062
        case 0x32d: /* cvtsd2si */
3063
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3064
            if (mod != 3) {
3065
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3066
                if ((b >> 8) & 1) {
3067
                    gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3068
                } else {
3069
                    gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
3070
                    gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3071
                }
3072
                op2_offset = offsetof(CPUX86State,xmm_t0);
3073
            } else {
3074
                rm = (modrm & 7) | REX_B(s);
3075
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3076
            }
3077
            sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3078
                          (b & 1) * 4](op2_offset);
3079
            gen_op_mov_reg_T0[ot][reg]();
3080
            break;
3081
        case 0xc4: /* pinsrw */
3082
        case 0x1c4:
3083
            s->rip_offset = 1;
3084
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3085
            val = ldub_code(s->pc++);
3086
            if (b1) {
3087
                val &= 7;
3088
                gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
3089
            } else {
3090
                val &= 3;
3091
                gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
3092
            }
3093
            break;
3094
        case 0xc5: /* pextrw */
3095
        case 0x1c5:
3096
            if (mod != 3)
3097
                goto illegal_op;
3098
            val = ldub_code(s->pc++);
3099
            if (b1) {
3100
                val &= 7;
3101
                rm = (modrm & 7) | REX_B(s);
3102
                gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
3103
            } else {
3104
                val &= 3;
3105
                rm = (modrm & 7);
3106
                gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
3107
            }
3108
            reg = ((modrm >> 3) & 7) | rex_r;
3109
            gen_op_mov_reg_T0[OT_LONG][reg]();
3110
            break;
3111
        case 0x1d6: /* movq ea, xmm */
3112
            if (mod != 3) {
3113
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3114
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3115
            } else {
3116
                rm = (modrm & 7) | REX_B(s);
3117
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3118
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3119
                gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3120
            }
3121
            break;
3122
        case 0x2d6: /* movq2dq */
3123
            gen_op_enter_mmx();
3124
            rm = (modrm & 7);
3125
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3126
                        offsetof(CPUX86State,fpregs[rm].mmx));
3127
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3128
            break;
3129
        case 0x3d6: /* movdq2q */
3130
            gen_op_enter_mmx();
3131
            rm = (modrm & 7) | REX_B(s);
3132
            gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3133
                        offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3134
            break;
3135
        case 0xd7: /* pmovmskb */
3136
        case 0x1d7:
3137
            if (mod != 3)
3138
                goto illegal_op;
3139
            if (b1) {
3140
                rm = (modrm & 7) | REX_B(s);
3141
                gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
3142
            } else {
3143
                rm = (modrm & 7);
3144
                gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
3145
            }
3146
            reg = ((modrm >> 3) & 7) | rex_r;
3147
            gen_op_mov_reg_T0[OT_LONG][reg]();
3148
            break;
3149
        default:
3150
            goto illegal_op;
3151
        }
3152
    } else {
3153
        /* generic MMX or SSE operation */
3154
        switch(b) {
3155
        case 0xf7:
3156
            /* maskmov : we must prepare A0 */
3157
            if (mod != 3)
3158
                goto illegal_op;
3159
#ifdef TARGET_X86_64
3160
            if (s->aflag == 2) {
3161
                gen_op_movq_A0_reg[R_EDI]();
3162
            } else
3163
#endif
3164
            {
3165
                gen_op_movl_A0_reg[R_EDI]();
3166
                if (s->aflag == 0)
3167
                    gen_op_andl_A0_ffff();
3168
            }
3169
            gen_add_A0_ds_seg(s);
3170
            break;
3171
        case 0x70: /* pshufx insn */
3172
        case 0xc6: /* pshufx insn */
3173
        case 0xc2: /* compare insns */
3174
            s->rip_offset = 1;
3175
            break;
3176
        default:
3177
            break;
3178
        }
3179
        if (is_xmm) {
3180
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3181
            if (mod != 3) {
3182
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3183
                op2_offset = offsetof(CPUX86State,xmm_t0);
3184
                if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3185
                                b == 0xc2)) {
3186
                    /* specific case for SSE single instructions */
3187
                    if (b1 == 2) {
3188
                        /* 32 bit access */
3189
                        gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
3190
                        gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3191
                    } else {
3192
                        /* 64 bit access */
3193
                        gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3194
                    }
3195
                } else {
3196
                    gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3197
                }
3198
            } else {
3199
                rm = (modrm & 7) | REX_B(s);
3200
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3201
            }
3202
        } else {
3203
            op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3204
            if (mod != 3) {
3205
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3206
                op2_offset = offsetof(CPUX86State,mmx_t0);
3207
                gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
3208
            } else {
3209
                rm = (modrm & 7);
3210
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3211
            }
3212
        }
3213
        switch(b) {
3214
        case 0x70: /* pshufx insn */
3215
        case 0xc6: /* pshufx insn */
3216
            val = ldub_code(s->pc++);
3217
            sse_op3 = (GenOpFunc3 *)sse_op2;
3218
            sse_op3(op1_offset, op2_offset, val);
3219
            break;
3220
        case 0xc2:
3221
            /* compare insns */
3222
            val = ldub_code(s->pc++);
3223
            if (val >= 8)
3224
                goto illegal_op;
3225
            sse_op2 = sse_op_table4[val][b1];
3226
            sse_op2(op1_offset, op2_offset);
3227
            break;
3228
        default:
3229
            sse_op2(op1_offset, op2_offset);
3230
            break;
3231
        }
3232
        if (b == 0x2e || b == 0x2f) {
3233
            s->cc_op = CC_OP_EFLAGS;
3234
        }
3235
    }
3236
}
3237

    
3238

    
3239
/* convert one instruction. s->is_jmp is set if the translation must
3240
   be stopped. Return the next pc value */
3241
static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3242
{
3243
    int b, prefixes, aflag, dflag;
3244
    int shift, ot;
3245
    int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3246
    target_ulong next_eip, tval;
3247
    int rex_w, rex_r;
3248

    
3249
    s->pc = pc_start;
3250
    prefixes = 0;
3251
    aflag = s->code32;
3252
    dflag = s->code32;
3253
    s->override = -1;
3254
    rex_w = -1;
3255
    rex_r = 0;
3256
#ifdef TARGET_X86_64
3257
    s->rex_x = 0;
3258
    s->rex_b = 0;
3259
    x86_64_hregs = 0;
3260
#endif
3261
    s->rip_offset = 0; /* for relative ip address */
3262
 next_byte:
3263
    b = ldub_code(s->pc);
3264
    s->pc++;
3265
    /* check prefixes */
3266
#ifdef TARGET_X86_64
3267
    if (CODE64(s)) {
3268
        switch (b) {
3269
        case 0xf3:
3270
            prefixes |= PREFIX_REPZ;
3271
            goto next_byte;
3272
        case 0xf2:
3273
            prefixes |= PREFIX_REPNZ;
3274
            goto next_byte;
3275
        case 0xf0:
3276
            prefixes |= PREFIX_LOCK;
3277
            goto next_byte;
3278
        case 0x2e:
3279
            s->override = R_CS;
3280
            goto next_byte;
3281
        case 0x36:
3282
            s->override = R_SS;
3283
            goto next_byte;
3284
        case 0x3e:
3285
            s->override = R_DS;
3286
            goto next_byte;
3287
        case 0x26:
3288
            s->override = R_ES;
3289
            goto next_byte;
3290
        case 0x64:
3291
            s->override = R_FS;
3292
            goto next_byte;
3293
        case 0x65:
3294
            s->override = R_GS;
3295
            goto next_byte;
3296
        case 0x66:
3297
            prefixes |= PREFIX_DATA;
3298
            goto next_byte;
3299
        case 0x67:
3300
            prefixes |= PREFIX_ADR;
3301
            goto next_byte;
3302
        case 0x40 ... 0x4f:
3303
            /* REX prefix */
3304
            rex_w = (b >> 3) & 1;
3305
            rex_r = (b & 0x4) << 1;
3306
            s->rex_x = (b & 0x2) << 2;
3307
            REX_B(s) = (b & 0x1) << 3;
3308
            x86_64_hregs = 1; /* select uniform byte register addressing */
3309
            goto next_byte;
3310
        }
3311
        if (rex_w == 1) {
3312
            /* 0x66 is ignored if rex.w is set */
3313
            dflag = 2;
3314
        } else {
3315
            if (prefixes & PREFIX_DATA)
3316
                dflag ^= 1;
3317
        }
3318
        if (!(prefixes & PREFIX_ADR))
3319
            aflag = 2;
3320
    } else
3321
#endif
3322
    {
3323
        switch (b) {
3324
        case 0xf3:
3325
            prefixes |= PREFIX_REPZ;
3326
            goto next_byte;
3327
        case 0xf2:
3328
            prefixes |= PREFIX_REPNZ;
3329
            goto next_byte;
3330
        case 0xf0:
3331
            prefixes |= PREFIX_LOCK;
3332
            goto next_byte;
3333
        case 0x2e:
3334
            s->override = R_CS;
3335
            goto next_byte;
3336
        case 0x36:
3337
            s->override = R_SS;
3338
            goto next_byte;
3339
        case 0x3e:
3340
            s->override = R_DS;
3341
            goto next_byte;
3342
        case 0x26:
3343
            s->override = R_ES;
3344
            goto next_byte;
3345
        case 0x64:
3346
            s->override = R_FS;
3347
            goto next_byte;
3348
        case 0x65:
3349
            s->override = R_GS;
3350
            goto next_byte;
3351
        case 0x66:
3352
            prefixes |= PREFIX_DATA;
3353
            goto next_byte;
3354
        case 0x67:
3355
            prefixes |= PREFIX_ADR;
3356
            goto next_byte;
3357
        }
3358
        if (prefixes & PREFIX_DATA)
3359
            dflag ^= 1;
3360
        if (prefixes & PREFIX_ADR)
3361
            aflag ^= 1;
3362
    }
3363

    
3364
    s->prefix = prefixes;
3365
    s->aflag = aflag;
3366
    s->dflag = dflag;
3367

    
3368
    /* lock generation */
3369
    if (prefixes & PREFIX_LOCK)
3370
        gen_op_lock();
3371

    
3372
    /* now check op code */
3373
 reswitch:
3374
    switch(b) {
3375
    case 0x0f:
3376
        /**************************/
3377
        /* extended op code */
3378
        b = ldub_code(s->pc++) | 0x100;
3379
        goto reswitch;
3380

    
3381
        /**************************/
3382
        /* arith & logic */
3383
    case 0x00 ... 0x05:
3384
    case 0x08 ... 0x0d:
3385
    case 0x10 ... 0x15:
3386
    case 0x18 ... 0x1d:
3387
    case 0x20 ... 0x25:
3388
    case 0x28 ... 0x2d:
3389
    case 0x30 ... 0x35:
3390
    case 0x38 ... 0x3d:
3391
        {
3392
            int op, f, val;
3393
            op = (b >> 3) & 7;
3394
            f = (b >> 1) & 3;
3395

    
3396
            if ((b & 1) == 0)
3397
                ot = OT_BYTE;
3398
            else
3399
                ot = dflag + OT_WORD;
3400

    
3401
            switch(f) {
3402
            case 0: /* OP Ev, Gv */
3403
                modrm = ldub_code(s->pc++);
3404
                reg = ((modrm >> 3) & 7) | rex_r;
3405
                mod = (modrm >> 6) & 3;
3406
                rm = (modrm & 7) | REX_B(s);
3407
                if (mod != 3) {
3408
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3409
                    opreg = OR_TMP0;
3410
                } else if (op == OP_XORL && rm == reg) {
3411
                xor_zero:
3412
                    /* xor reg, reg optimisation */
3413
                    gen_op_movl_T0_0();
3414
                    s->cc_op = CC_OP_LOGICB + ot;
3415
                    gen_op_mov_reg_T0[ot][reg]();
3416
                    gen_op_update1_cc();
3417
                    break;
3418
                } else {
3419
                    opreg = rm;
3420
                }
3421
                gen_op_mov_TN_reg[ot][1][reg]();
3422
                gen_op(s, op, ot, opreg);
3423
                break;
3424
            case 1: /* OP Gv, Ev */
3425
                modrm = ldub_code(s->pc++);
3426
                mod = (modrm >> 6) & 3;
3427
                reg = ((modrm >> 3) & 7) | rex_r;
3428
                rm = (modrm & 7) | REX_B(s);
3429
                if (mod != 3) {
3430
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3431
                    gen_op_ld_T1_A0[ot + s->mem_index]();
3432
                } else if (op == OP_XORL && rm == reg) {
3433
                    goto xor_zero;
3434
                } else {
3435
                    gen_op_mov_TN_reg[ot][1][rm]();
3436
                }
3437
                gen_op(s, op, ot, reg);
3438
                break;
3439
            case 2: /* OP A, Iv */
3440
                val = insn_get(s, ot);
3441
                gen_op_movl_T1_im(val);
3442
                gen_op(s, op, ot, OR_EAX);
3443
                break;
3444
            }
3445
        }
3446
        break;
3447

    
3448
    case 0x80: /* GRP1 */
3449
    case 0x81:
3450
    case 0x82:
3451
    case 0x83:
3452
        {
3453
            int val;
3454

    
3455
            if ((b & 1) == 0)
3456
                ot = OT_BYTE;
3457
            else
3458
                ot = dflag + OT_WORD;
3459

    
3460
            modrm = ldub_code(s->pc++);
3461
            mod = (modrm >> 6) & 3;
3462
            rm = (modrm & 7) | REX_B(s);
3463
            op = (modrm >> 3) & 7;
3464

    
3465
            if (mod != 3) {
3466
                if (b == 0x83)
3467
                    s->rip_offset = 1;
3468
                else
3469
                    s->rip_offset = insn_const_size(ot);
3470
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3471
                opreg = OR_TMP0;
3472
            } else {
3473
                opreg = rm;
3474
            }
3475

    
3476
            switch(b) {
3477
            default:
3478
            case 0x80:
3479
            case 0x81:
3480
            case 0x82:
3481
                val = insn_get(s, ot);
3482
                break;
3483
            case 0x83:
3484
                val = (int8_t)insn_get(s, OT_BYTE);
3485
                break;
3486
            }
3487
            gen_op_movl_T1_im(val);
3488
            gen_op(s, op, ot, opreg);
3489
        }
3490
        break;
3491

    
3492
        /**************************/
3493
        /* inc, dec, and other misc arith */
3494
    case 0x40 ... 0x47: /* inc Gv */
3495
        ot = dflag ? OT_LONG : OT_WORD;
3496
        gen_inc(s, ot, OR_EAX + (b & 7), 1);
3497
        break;
3498
    case 0x48 ... 0x4f: /* dec Gv */
3499
        ot = dflag ? OT_LONG : OT_WORD;
3500
        gen_inc(s, ot, OR_EAX + (b & 7), -1);
3501
        break;
3502
    case 0xf6: /* GRP3 */
3503
    case 0xf7:
3504
        if ((b & 1) == 0)
3505
            ot = OT_BYTE;
3506
        else
3507
            ot = dflag + OT_WORD;
3508

    
3509
        modrm = ldub_code(s->pc++);
3510
        mod = (modrm >> 6) & 3;
3511
        rm = (modrm & 7) | REX_B(s);
3512
        op = (modrm >> 3) & 7;
3513
        if (mod != 3) {
3514
            if (op == 0)
3515
                s->rip_offset = insn_const_size(ot);
3516
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3517
            gen_op_ld_T0_A0[ot + s->mem_index]();
3518
        } else {
3519
            gen_op_mov_TN_reg[ot][0][rm]();
3520
        }
3521

    
3522
        switch(op) {
3523
        case 0: /* test */
3524
            val = insn_get(s, ot);
3525
            gen_op_movl_T1_im(val);
3526
            gen_op_testl_T0_T1_cc();
3527
            s->cc_op = CC_OP_LOGICB + ot;
3528
            break;
3529
        case 2: /* not */
3530
            gen_op_notl_T0();
3531
            if (mod != 3) {
3532
                gen_op_st_T0_A0[ot + s->mem_index]();
3533
            } else {
3534
                gen_op_mov_reg_T0[ot][rm]();
3535
            }
3536
            break;
3537
        case 3: /* neg */
3538
            gen_op_negl_T0();
3539
            if (mod != 3) {
3540
                gen_op_st_T0_A0[ot + s->mem_index]();
3541
            } else {
3542
                gen_op_mov_reg_T0[ot][rm]();
3543
            }
3544
            gen_op_update_neg_cc();
3545
            s->cc_op = CC_OP_SUBB + ot;
3546
            break;
3547
        case 4: /* mul */
3548
            switch(ot) {
3549
            case OT_BYTE:
3550
                gen_op_mulb_AL_T0();
3551
                s->cc_op = CC_OP_MULB;
3552
                break;
3553
            case OT_WORD:
3554
                gen_op_mulw_AX_T0();
3555
                s->cc_op = CC_OP_MULW;
3556
                break;
3557
            default:
3558
            case OT_LONG:
3559
                gen_op_mull_EAX_T0();
3560
                s->cc_op = CC_OP_MULL;
3561
                break;
3562
#ifdef TARGET_X86_64
3563
            case OT_QUAD:
3564
                gen_op_mulq_EAX_T0();
3565
                s->cc_op = CC_OP_MULQ;
3566
                break;
3567
#endif
3568
            }
3569
            break;
3570
        case 5: /* imul */
3571
            switch(ot) {
3572
            case OT_BYTE:
3573
                gen_op_imulb_AL_T0();
3574
                s->cc_op = CC_OP_MULB;
3575
                break;
3576
            case OT_WORD:
3577
                gen_op_imulw_AX_T0();
3578
                s->cc_op = CC_OP_MULW;
3579
                break;
3580
            default:
3581
            case OT_LONG:
3582
                gen_op_imull_EAX_T0();
3583
                s->cc_op = CC_OP_MULL;
3584
                break;
3585
#ifdef TARGET_X86_64
3586
            case OT_QUAD:
3587
                gen_op_imulq_EAX_T0();
3588
                s->cc_op = CC_OP_MULQ;
3589
                break;
3590
#endif
3591
            }
3592
            break;
3593
        case 6: /* div */
3594
            switch(ot) {
3595
            case OT_BYTE:
3596
                gen_jmp_im(pc_start - s->cs_base);
3597
                gen_op_divb_AL_T0();
3598
                break;
3599
            case OT_WORD:
3600
                gen_jmp_im(pc_start - s->cs_base);
3601
                gen_op_divw_AX_T0();
3602
                break;
3603
            default:
3604
            case OT_LONG:
3605
                gen_jmp_im(pc_start - s->cs_base);
3606
                gen_op_divl_EAX_T0();
3607
                break;
3608
#ifdef TARGET_X86_64
3609
            case OT_QUAD:
3610
                gen_jmp_im(pc_start - s->cs_base);
3611
                gen_op_divq_EAX_T0();
3612
                break;
3613
#endif
3614
            }
3615
            break;
3616
        case 7: /* idiv */
3617
            switch(ot) {
3618
            case OT_BYTE:
3619
                gen_jmp_im(pc_start - s->cs_base);
3620
                gen_op_idivb_AL_T0();
3621
                break;
3622
            case OT_WORD:
3623
                gen_jmp_im(pc_start - s->cs_base);
3624
                gen_op_idivw_AX_T0();
3625
                break;
3626
            default:
3627
            case OT_LONG:
3628
                gen_jmp_im(pc_start - s->cs_base);
3629
                gen_op_idivl_EAX_T0();
3630
                break;
3631
#ifdef TARGET_X86_64
3632
            case OT_QUAD:
3633
                gen_jmp_im(pc_start - s->cs_base);
3634
                gen_op_idivq_EAX_T0();
3635
                break;
3636
#endif
3637
            }
3638
            break;
3639
        default:
3640
            goto illegal_op;
3641
        }
3642
        break;
3643

    
3644
    case 0xfe: /* GRP4 */
3645
    case 0xff: /* GRP5 */
3646
        if ((b & 1) == 0)
3647
            ot = OT_BYTE;
3648
        else
3649
            ot = dflag + OT_WORD;
3650

    
3651
        modrm = ldub_code(s->pc++);
3652
        mod = (modrm >> 6) & 3;
3653
        rm = (modrm & 7) | REX_B(s);
3654
        op = (modrm >> 3) & 7;
3655
        if (op >= 2 && b == 0xfe) {
3656
            goto illegal_op;
3657
        }
3658
        if (CODE64(s)) {
3659
            if (op == 2 || op == 4) {
3660
                /* operand size for jumps is 64 bit */
3661
                ot = OT_QUAD;
3662
            } else if (op == 3 || op == 5) {
3663
                /* for call calls, the operand is 16 or 32 bit, even
3664
                   in long mode */
3665
                ot = dflag ? OT_LONG : OT_WORD;
3666
            } else if (op == 6) {
3667
                /* default push size is 64 bit */
3668
                ot = dflag ? OT_QUAD : OT_WORD;
3669
            }
3670
        }
3671
        if (mod != 3) {
3672
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3673
            if (op >= 2 && op != 3 && op != 5)
3674
                gen_op_ld_T0_A0[ot + s->mem_index]();
3675
        } else {
3676
            gen_op_mov_TN_reg[ot][0][rm]();
3677
        }
3678

    
3679
        switch(op) {
3680
        case 0: /* inc Ev */
3681
            if (mod != 3)
3682
                opreg = OR_TMP0;
3683
            else
3684
                opreg = rm;
3685
            gen_inc(s, ot, opreg, 1);
3686
            break;
3687
        case 1: /* dec Ev */
3688
            if (mod != 3)
3689
                opreg = OR_TMP0;
3690
            else
3691
                opreg = rm;
3692
            gen_inc(s, ot, opreg, -1);
3693
            break;
3694
        case 2: /* call Ev */
3695
            /* XXX: optimize if memory (no 'and' is necessary) */
3696
            if (s->dflag == 0)
3697
                gen_op_andl_T0_ffff();
3698
            next_eip = s->pc - s->cs_base;
3699
            gen_movtl_T1_im(next_eip);
3700
            gen_push_T1(s);
3701
            gen_op_jmp_T0();
3702
            gen_eob(s);
3703
            break;
3704
        case 3: /* lcall Ev */
3705
            gen_op_ld_T1_A0[ot + s->mem_index]();
3706
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3707
            gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3708
        do_lcall:
3709
            if (s->pe && !s->vm86) {
3710
                if (s->cc_op != CC_OP_DYNAMIC)
3711
                    gen_op_set_cc_op(s->cc_op);
3712
                gen_jmp_im(pc_start - s->cs_base);
3713
                gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3714
            } else {
3715
                gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3716
            }
3717
            gen_eob(s);
3718
            break;
3719
        case 4: /* jmp Ev */
3720
            if (s->dflag == 0)
3721
                gen_op_andl_T0_ffff();
3722
            gen_op_jmp_T0();
3723
            gen_eob(s);
3724
            break;
3725
        case 5: /* ljmp Ev */
3726
            gen_op_ld_T1_A0[ot + s->mem_index]();
3727
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3728
            gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3729
        do_ljmp:
3730
            if (s->pe && !s->vm86) {
3731
                if (s->cc_op != CC_OP_DYNAMIC)
3732
                    gen_op_set_cc_op(s->cc_op);
3733
                gen_jmp_im(pc_start - s->cs_base);
3734
                gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3735
            } else {
3736
                gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3737
                gen_op_movl_T0_T1();
3738
                gen_op_jmp_T0();
3739
            }
3740
            gen_eob(s);
3741
            break;
3742
        case 6: /* push Ev */
3743
            gen_push_T0(s);
3744
            break;
3745
        default:
3746
            goto illegal_op;
3747
        }
3748
        break;
3749

    
3750
    case 0x84: /* test Ev, Gv */
3751
    case 0x85:
3752
        if ((b & 1) == 0)
3753
            ot = OT_BYTE;
3754
        else
3755
            ot = dflag + OT_WORD;
3756

    
3757
        modrm = ldub_code(s->pc++);
3758
        mod = (modrm >> 6) & 3;
3759
        rm = (modrm & 7) | REX_B(s);
3760
        reg = ((modrm >> 3) & 7) | rex_r;
3761

    
3762
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3763
        gen_op_mov_TN_reg[ot][1][reg]();
3764
        gen_op_testl_T0_T1_cc();
3765
        s->cc_op = CC_OP_LOGICB + ot;
3766
        break;
3767

    
3768
    case 0xa8: /* test eAX, Iv */
3769
    case 0xa9:
3770
        if ((b & 1) == 0)
3771
            ot = OT_BYTE;
3772
        else
3773
            ot = dflag + OT_WORD;
3774
        val = insn_get(s, ot);
3775

    
3776
        gen_op_mov_TN_reg[ot][0][OR_EAX]();
3777
        gen_op_movl_T1_im(val);
3778
        gen_op_testl_T0_T1_cc();
3779
        s->cc_op = CC_OP_LOGICB + ot;
3780
        break;
3781

    
3782
    case 0x98: /* CWDE/CBW */
3783
#ifdef TARGET_X86_64
3784
        if (dflag == 2) {
3785
            gen_op_movslq_RAX_EAX();
3786
        } else
3787
#endif
3788
        if (dflag == 1)
3789
            gen_op_movswl_EAX_AX();
3790
        else
3791
            gen_op_movsbw_AX_AL();
3792
        break;
3793
    case 0x99: /* CDQ/CWD */
3794
#ifdef TARGET_X86_64
3795
        if (dflag == 2) {
3796
            gen_op_movsqo_RDX_RAX();
3797
        } else
3798
#endif
3799
        if (dflag == 1)
3800
            gen_op_movslq_EDX_EAX();
3801
        else
3802
            gen_op_movswl_DX_AX();
3803
        break;
3804
    case 0x1af: /* imul Gv, Ev */
3805
    case 0x69: /* imul Gv, Ev, I */
3806
    case 0x6b:
3807
        ot = dflag + OT_WORD;
3808
        modrm = ldub_code(s->pc++);
3809
        reg = ((modrm >> 3) & 7) | rex_r;
3810
        if (b == 0x69)
3811
            s->rip_offset = insn_const_size(ot);
3812
        else if (b == 0x6b)
3813
            s->rip_offset = 1;
3814
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3815
        if (b == 0x69) {
3816
            val = insn_get(s, ot);
3817
            gen_op_movl_T1_im(val);
3818
        } else if (b == 0x6b) {
3819
            val = (int8_t)insn_get(s, OT_BYTE);
3820
            gen_op_movl_T1_im(val);
3821
        } else {
3822
            gen_op_mov_TN_reg[ot][1][reg]();
3823
        }
3824

    
3825
#ifdef TARGET_X86_64
3826
        if (ot == OT_QUAD) {
3827
            gen_op_imulq_T0_T1();
3828
        } else
3829
#endif
3830
        if (ot == OT_LONG) {
3831
            gen_op_imull_T0_T1();
3832
        } else {
3833
            gen_op_imulw_T0_T1();
3834
        }
3835
        gen_op_mov_reg_T0[ot][reg]();
3836
        s->cc_op = CC_OP_MULB + ot;
3837
        break;
3838
    case 0x1c0:
3839
    case 0x1c1: /* xadd Ev, Gv */
3840
        if ((b & 1) == 0)
3841
            ot = OT_BYTE;
3842
        else
3843
            ot = dflag + OT_WORD;
3844
        modrm = ldub_code(s->pc++);
3845
        reg = ((modrm >> 3) & 7) | rex_r;
3846
        mod = (modrm >> 6) & 3;
3847
        if (mod == 3) {
3848
            rm = (modrm & 7) | REX_B(s);
3849
            gen_op_mov_TN_reg[ot][0][reg]();
3850
            gen_op_mov_TN_reg[ot][1][rm]();
3851
            gen_op_addl_T0_T1();
3852
            gen_op_mov_reg_T1[ot][reg]();
3853
            gen_op_mov_reg_T0[ot][rm]();
3854
        } else {
3855
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3856
            gen_op_mov_TN_reg[ot][0][reg]();
3857
            gen_op_ld_T1_A0[ot + s->mem_index]();
3858
            gen_op_addl_T0_T1();
3859
            gen_op_st_T0_A0[ot + s->mem_index]();
3860
            gen_op_mov_reg_T1[ot][reg]();
3861
        }
3862
        gen_op_update2_cc();
3863
        s->cc_op = CC_OP_ADDB + ot;
3864
        break;
3865
    case 0x1b0:
3866
    case 0x1b1: /* cmpxchg Ev, Gv */
3867
        if ((b & 1) == 0)
3868
            ot = OT_BYTE;
3869
        else
3870
            ot = dflag + OT_WORD;
3871
        modrm = ldub_code(s->pc++);
3872
        reg = ((modrm >> 3) & 7) | rex_r;
3873
        mod = (modrm >> 6) & 3;
3874
        gen_op_mov_TN_reg[ot][1][reg]();
3875
        if (mod == 3) {
3876
            rm = (modrm & 7) | REX_B(s);
3877
            gen_op_mov_TN_reg[ot][0][rm]();
3878
            gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3879
            gen_op_mov_reg_T0[ot][rm]();
3880
        } else {
3881
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3882
            gen_op_ld_T0_A0[ot + s->mem_index]();
3883
            gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3884
        }
3885
        s->cc_op = CC_OP_SUBB + ot;
3886
        break;
3887
    case 0x1c7: /* cmpxchg8b */
3888
        modrm = ldub_code(s->pc++);
3889
        mod = (modrm >> 6) & 3;
3890
        if (mod == 3)
3891
            goto illegal_op;
3892
        gen_jmp_im(pc_start - s->cs_base);
3893
        if (s->cc_op != CC_OP_DYNAMIC)
3894
            gen_op_set_cc_op(s->cc_op);
3895
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3896
        gen_op_cmpxchg8b();
3897
        s->cc_op = CC_OP_EFLAGS;
3898
        break;
3899

    
3900
        /**************************/
3901
        /* push/pop */
3902
    case 0x50 ... 0x57: /* push */
3903
        gen_op_mov_TN_reg[OT_LONG][0][(b & 7) | REX_B(s)]();
3904
        gen_push_T0(s);
3905
        break;
3906
    case 0x58 ... 0x5f: /* pop */
3907
        if (CODE64(s)) {
3908
            ot = dflag ? OT_QUAD : OT_WORD;
3909
        } else {
3910
            ot = dflag + OT_WORD;
3911
        }
3912
        gen_pop_T0(s);
3913
        /* NOTE: order is important for pop %sp */
3914
        gen_pop_update(s);
3915
        gen_op_mov_reg_T0[ot][(b & 7) | REX_B(s)]();
3916
        break;
3917
    case 0x60: /* pusha */
3918
        if (CODE64(s))
3919
            goto illegal_op;
3920
        gen_pusha(s);
3921
        break;
3922
    case 0x61: /* popa */
3923
        if (CODE64(s))
3924
            goto illegal_op;
3925
        gen_popa(s);
3926
        break;
3927
    case 0x68: /* push Iv */
3928
    case 0x6a:
3929
        if (CODE64(s)) {
3930
            ot = dflag ? OT_QUAD : OT_WORD;
3931
        } else {
3932
            ot = dflag + OT_WORD;
3933
        }
3934
        if (b == 0x68)
3935
            val = insn_get(s, ot);
3936
        else
3937
            val = (int8_t)insn_get(s, OT_BYTE);
3938
        gen_op_movl_T0_im(val);
3939
        gen_push_T0(s);
3940
        break;
3941
    case 0x8f: /* pop Ev */
3942
        if (CODE64(s)) {
3943
            ot = dflag ? OT_QUAD : OT_WORD;
3944
        } else {
3945
            ot = dflag + OT_WORD;
3946
        }
3947
        modrm = ldub_code(s->pc++);
3948
        mod = (modrm >> 6) & 3;
3949
        gen_pop_T0(s);
3950
        if (mod == 3) {
3951
            /* NOTE: order is important for pop %sp */
3952
            gen_pop_update(s);
3953
            rm = (modrm & 7) | REX_B(s);
3954
            gen_op_mov_reg_T0[ot][rm]();
3955
        } else {
3956
            /* NOTE: order is important too for MMU exceptions */
3957
            s->popl_esp_hack = 1 << ot;
3958
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3959
            s->popl_esp_hack = 0;
3960
            gen_pop_update(s);
3961
        }
3962
        break;
3963
    case 0xc8: /* enter */
3964
        {
3965
            int level;
3966
            val = lduw_code(s->pc);
3967
            s->pc += 2;
3968
            level = ldub_code(s->pc++);
3969
            gen_enter(s, val, level);
3970
        }
3971
        break;
3972
    case 0xc9: /* leave */
3973
        /* XXX: exception not precise (ESP is updated before potential exception) */
3974
        if (CODE64(s)) {
3975
            gen_op_mov_TN_reg[OT_QUAD][0][R_EBP]();
3976
            gen_op_mov_reg_T0[OT_QUAD][R_ESP]();
3977
        } else if (s->ss32) {
3978
            gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
3979
            gen_op_mov_reg_T0[OT_LONG][R_ESP]();
3980
        } else {
3981
            gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
3982
            gen_op_mov_reg_T0[OT_WORD][R_ESP]();
3983
        }
3984
        gen_pop_T0(s);
3985
        if (CODE64(s)) {
3986
            ot = dflag ? OT_QUAD : OT_WORD;
3987
        } else {
3988
            ot = dflag + OT_WORD;
3989
        }
3990
        gen_op_mov_reg_T0[ot][R_EBP]();
3991
        gen_pop_update(s);
3992
        break;
3993
    case 0x06: /* push es */
3994
    case 0x0e: /* push cs */
3995
    case 0x16: /* push ss */
3996
    case 0x1e: /* push ds */
3997
        if (CODE64(s))
3998
            goto illegal_op;
3999
        gen_op_movl_T0_seg(b >> 3);
4000
        gen_push_T0(s);
4001
        break;
4002
    case 0x1a0: /* push fs */
4003
    case 0x1a8: /* push gs */
4004
        gen_op_movl_T0_seg((b >> 3) & 7);
4005
        gen_push_T0(s);
4006
        break;
4007
    case 0x07: /* pop es */
4008
    case 0x17: /* pop ss */
4009
    case 0x1f: /* pop ds */
4010
        if (CODE64(s))
4011
            goto illegal_op;
4012
        reg = b >> 3;
4013
        gen_pop_T0(s);
4014
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4015
        gen_pop_update(s);
4016
        if (reg == R_SS) {
4017
            /* if reg == SS, inhibit interrupts/trace. */
4018
            /* If several instructions disable interrupts, only the
4019
               _first_ does it */
4020
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4021
                gen_op_set_inhibit_irq();
4022
            s->tf = 0;
4023
        }
4024
        if (s->is_jmp) {
4025
            gen_jmp_im(s->pc - s->cs_base);
4026
            gen_eob(s);
4027
        }
4028
        break;
4029
    case 0x1a1: /* pop fs */
4030
    case 0x1a9: /* pop gs */
4031
        gen_pop_T0(s);
4032
        gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4033
        gen_pop_update(s);
4034
        if (s->is_jmp) {
4035
            gen_jmp_im(s->pc - s->cs_base);
4036
            gen_eob(s);
4037
        }
4038
        break;
4039

    
4040
        /**************************/
4041
        /* mov */
4042
    case 0x88:
4043
    case 0x89: /* mov Gv, Ev */
4044
        if ((b & 1) == 0)
4045
            ot = OT_BYTE;
4046
        else
4047
            ot = dflag + OT_WORD;
4048
        modrm = ldub_code(s->pc++);
4049
        reg = ((modrm >> 3) & 7) | rex_r;
4050

    
4051
        /* generate a generic store */
4052
        gen_ldst_modrm(s, modrm, ot, reg, 1);
4053
        break;
4054
    case 0xc6:
4055
    case 0xc7: /* mov Ev, Iv */
4056
        if ((b & 1) == 0)
4057
            ot = OT_BYTE;
4058
        else
4059
            ot = dflag + OT_WORD;
4060
        modrm = ldub_code(s->pc++);
4061
        mod = (modrm >> 6) & 3;
4062
        if (mod != 3) {
4063
            s->rip_offset = insn_const_size(ot);
4064
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4065
        }
4066
        val = insn_get(s, ot);
4067
        gen_op_movl_T0_im(val);
4068
        if (mod != 3)
4069
            gen_op_st_T0_A0[ot + s->mem_index]();
4070
        else
4071
            gen_op_mov_reg_T0[ot][(modrm & 7) | REX_B(s)]();
4072
        break;
4073
    case 0x8a:
4074
    case 0x8b: /* mov Ev, Gv */
4075
        if ((b & 1) == 0)
4076
            ot = OT_BYTE;
4077
        else
4078
            ot = OT_WORD + dflag;
4079
        modrm = ldub_code(s->pc++);
4080
        reg = ((modrm >> 3) & 7) | rex_r;
4081

    
4082
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4083
        gen_op_mov_reg_T0[ot][reg]();
4084
        break;
4085
    case 0x8e: /* mov seg, Gv */
4086
        modrm = ldub_code(s->pc++);
4087
        reg = (modrm >> 3) & 7;
4088
        if (reg >= 6 || reg == R_CS)
4089
            goto illegal_op;
4090
        gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4091
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4092
        if (reg == R_SS) {
4093
            /* if reg == SS, inhibit interrupts/trace */
4094
            /* If several instructions disable interrupts, only the
4095
               _first_ does it */
4096
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4097
                gen_op_set_inhibit_irq();
4098
            s->tf = 0;
4099
        }
4100
        if (s->is_jmp) {
4101
            gen_jmp_im(s->pc - s->cs_base);
4102
            gen_eob(s);
4103
        }
4104
        break;
4105
    case 0x8c: /* mov Gv, seg */
4106
        modrm = ldub_code(s->pc++);
4107
        reg = (modrm >> 3) & 7;
4108
        mod = (modrm >> 6) & 3;
4109
        if (reg >= 6)
4110
            goto illegal_op;
4111
        gen_op_movl_T0_seg(reg);
4112
        if (mod == 3)
4113
            ot = OT_WORD + dflag;
4114
        else
4115
            ot = OT_WORD;
4116
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4117
        break;
4118

    
4119
    case 0x1b6: /* movzbS Gv, Eb */
4120
    case 0x1b7: /* movzwS Gv, Eb */
4121
    case 0x1be: /* movsbS Gv, Eb */
4122
    case 0x1bf: /* movswS Gv, Eb */
4123
        {
4124
            int d_ot;
4125
            /* d_ot is the size of destination */
4126
            d_ot = dflag + OT_WORD;
4127
            /* ot is the size of source */
4128
            ot = (b & 1) + OT_BYTE;
4129
            modrm = ldub_code(s->pc++);
4130
            reg = ((modrm >> 3) & 7) | rex_r;
4131
            mod = (modrm >> 6) & 3;
4132
            rm = (modrm & 7) | REX_B(s);
4133

    
4134
            if (mod == 3) {
4135
                gen_op_mov_TN_reg[ot][0][rm]();
4136
                switch(ot | (b & 8)) {
4137
                case OT_BYTE:
4138
                    gen_op_movzbl_T0_T0();
4139
                    break;
4140
                case OT_BYTE | 8:
4141
                    gen_op_movsbl_T0_T0();
4142
                    break;
4143
                case OT_WORD:
4144
                    gen_op_movzwl_T0_T0();
4145
                    break;
4146
                default:
4147
                case OT_WORD | 8:
4148
                    gen_op_movswl_T0_T0();
4149
                    break;
4150
                }
4151
                gen_op_mov_reg_T0[d_ot][reg]();
4152
            } else {
4153
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4154
                if (b & 8) {
4155
                    gen_op_lds_T0_A0[ot + s->mem_index]();
4156
                } else {
4157
                    gen_op_ldu_T0_A0[ot + s->mem_index]();
4158
                }
4159
                gen_op_mov_reg_T0[d_ot][reg]();
4160
            }
4161
        }
4162
        break;
4163

    
4164
    case 0x8d: /* lea */
4165
        ot = dflag + OT_WORD;
4166
        modrm = ldub_code(s->pc++);
4167
        mod = (modrm >> 6) & 3;
4168
        if (mod == 3)
4169
            goto illegal_op;
4170
        reg = ((modrm >> 3) & 7) | rex_r;
4171
        /* we must ensure that no segment is added */
4172
        s->override = -1;
4173
        val = s->addseg;
4174
        s->addseg = 0;
4175
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4176
        s->addseg = val;
4177
        gen_op_mov_reg_A0[ot - OT_WORD][reg]();
4178
        break;
4179

    
4180
    case 0xa0: /* mov EAX, Ov */
4181
    case 0xa1:
4182
    case 0xa2: /* mov Ov, EAX */
4183
    case 0xa3:
4184
        {
4185
            target_ulong offset_addr;
4186

    
4187
            if ((b & 1) == 0)
4188
                ot = OT_BYTE;
4189
            else
4190
                ot = dflag + OT_WORD;
4191
#ifdef TARGET_X86_64
4192
            if (s->aflag == 2) {
4193
                offset_addr = ldq_code(s->pc);
4194
                s->pc += 8;
4195
                if (offset_addr == (int32_t)offset_addr)
4196
                    gen_op_movq_A0_im(offset_addr);
4197
                else
4198
                    gen_op_movq_A0_im64(offset_addr >> 32, offset_addr);
4199
            } else
4200
#endif
4201
            {
4202
                if (s->aflag) {
4203
                    offset_addr = insn_get(s, OT_LONG);
4204
                } else {
4205
                    offset_addr = insn_get(s, OT_WORD);
4206
                }
4207
                gen_op_movl_A0_im(offset_addr);
4208
            }
4209
            gen_add_A0_ds_seg(s);
4210
            if ((b & 2) == 0) {
4211
                gen_op_ld_T0_A0[ot + s->mem_index]();
4212
                gen_op_mov_reg_T0[ot][R_EAX]();
4213
            } else {
4214
                gen_op_mov_TN_reg[ot][0][R_EAX]();
4215
                gen_op_st_T0_A0[ot + s->mem_index]();
4216
            }
4217
        }
4218
        break;
4219
    case 0xd7: /* xlat */
4220
#ifdef TARGET_X86_64
4221
        if (s->aflag == 2) {
4222
            gen_op_movq_A0_reg[R_EBX]();
4223
            gen_op_addq_A0_AL();
4224
        } else
4225
#endif
4226
        {
4227
            gen_op_movl_A0_reg[R_EBX]();
4228
            gen_op_addl_A0_AL();
4229
            if (s->aflag == 0)
4230
                gen_op_andl_A0_ffff();
4231
        }
4232
        gen_add_A0_ds_seg(s);
4233
        gen_op_ldu_T0_A0[OT_BYTE + s->mem_index]();
4234
        gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
4235
        break;
4236
    case 0xb0 ... 0xb7: /* mov R, Ib */
4237
        val = insn_get(s, OT_BYTE);
4238
        gen_op_movl_T0_im(val);
4239
        gen_op_mov_reg_T0[OT_BYTE][(b & 7) | REX_B(s)]();
4240
        break;
4241
    case 0xb8 ... 0xbf: /* mov R, Iv */
4242
#ifdef TARGET_X86_64
4243
        if (dflag == 2) {
4244
            uint64_t tmp;
4245
            /* 64 bit case */
4246
            tmp = ldq_code(s->pc);
4247
            s->pc += 8;
4248
            reg = (b & 7) | REX_B(s);
4249
            gen_movtl_T0_im(tmp);
4250
            gen_op_mov_reg_T0[OT_QUAD][reg]();
4251
        } else
4252
#endif
4253
        {
4254
            ot = dflag ? OT_LONG : OT_WORD;
4255
            val = insn_get(s, ot);
4256
            reg = (b & 7) | REX_B(s);
4257
            gen_op_movl_T0_im(val);
4258
            gen_op_mov_reg_T0[ot][reg]();
4259
        }
4260
        break;
4261

    
4262
    case 0x91 ... 0x97: /* xchg R, EAX */
4263
        ot = dflag + OT_WORD;
4264
        reg = (b & 7) | REX_B(s);
4265
        rm = R_EAX;
4266
        goto do_xchg_reg;
4267
    case 0x86:
4268
    case 0x87: /* xchg Ev, Gv */
4269
        if ((b & 1) == 0)
4270
            ot = OT_BYTE;
4271
        else
4272
            ot = dflag + OT_WORD;
4273
        modrm = ldub_code(s->pc++);
4274
        reg = ((modrm >> 3) & 7) | rex_r;
4275
        mod = (modrm >> 6) & 3;
4276
        if (mod == 3) {
4277
            rm = (modrm & 7) | REX_B(s);
4278
        do_xchg_reg:
4279
            gen_op_mov_TN_reg[ot][0][reg]();
4280
            gen_op_mov_TN_reg[ot][1][rm]();
4281
            gen_op_mov_reg_T0[ot][rm]();
4282
            gen_op_mov_reg_T1[ot][reg]();
4283
        } else {
4284
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4285
            gen_op_mov_TN_reg[ot][0][reg]();
4286
            /* for xchg, lock is implicit */
4287
            if (!(prefixes & PREFIX_LOCK))
4288
                gen_op_lock();
4289
            gen_op_ld_T1_A0[ot + s->mem_index]();
4290
            gen_op_st_T0_A0[ot + s->mem_index]();
4291
            if (!(prefixes & PREFIX_LOCK))
4292
                gen_op_unlock();
4293
            gen_op_mov_reg_T1[ot][reg]();
4294
        }
4295
        break;
4296
    case 0xc4: /* les Gv */
4297
        if (CODE64(s))
4298
            goto illegal_op;
4299
        op = R_ES;
4300
        goto do_lxx;
4301
    case 0xc5: /* lds Gv */
4302
        if (CODE64(s))
4303
            goto illegal_op;
4304
        op = R_DS;
4305
        goto do_lxx;
4306
    case 0x1b2: /* lss Gv */
4307
        op = R_SS;
4308
        goto do_lxx;
4309
    case 0x1b4: /* lfs Gv */
4310
        op = R_FS;
4311
        goto do_lxx;
4312
    case 0x1b5: /* lgs Gv */
4313
        op = R_GS;
4314
    do_lxx:
4315
        ot = dflag ? OT_LONG : OT_WORD;
4316
        modrm = ldub_code(s->pc++);
4317
        reg = ((modrm >> 3) & 7) | rex_r;
4318
        mod = (modrm >> 6) & 3;
4319
        if (mod == 3)
4320
            goto illegal_op;
4321
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4322
        gen_op_ld_T1_A0[ot + s->mem_index]();
4323
        gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4324
        /* load the segment first to handle exceptions properly */
4325
        gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
4326
        gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4327
        /* then put the data */
4328
        gen_op_mov_reg_T1[ot][reg]();
4329
        if (s->is_jmp) {
4330
            gen_jmp_im(s->pc - s->cs_base);
4331
            gen_eob(s);
4332
        }
4333
        break;
4334

    
4335
        /************************/
4336
        /* shifts */
4337
    case 0xc0:
4338
    case 0xc1:
4339
        /* shift Ev,Ib */
4340
        shift = 2;
4341
    grp2:
4342
        {
4343
            if ((b & 1) == 0)
4344
                ot = OT_BYTE;
4345
            else
4346
                ot = dflag + OT_WORD;
4347

    
4348
            modrm = ldub_code(s->pc++);
4349
            mod = (modrm >> 6) & 3;
4350
            op = (modrm >> 3) & 7;
4351

    
4352
            if (mod != 3) {
4353
                if (shift == 2) {
4354
                    s->rip_offset = 1;
4355
                }
4356
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4357
                opreg = OR_TMP0;
4358
            } else {
4359
                opreg = (modrm & 7) | REX_B(s);
4360
            }
4361

    
4362
            /* simpler op */
4363
            if (shift == 0) {
4364
                gen_shift(s, op, ot, opreg, OR_ECX);
4365
            } else {
4366
                if (shift == 2) {
4367
                    shift = ldub_code(s->pc++);
4368
                }
4369
                gen_shifti(s, op, ot, opreg, shift);
4370
            }
4371
        }
4372
        break;
4373
    case 0xd0:
4374
    case 0xd1:
4375
        /* shift Ev,1 */
4376
        shift = 1;
4377
        goto grp2;
4378
    case 0xd2:
4379
    case 0xd3:
4380
        /* shift Ev,cl */
4381
        shift = 0;
4382
        goto grp2;
4383

    
4384
    case 0x1a4: /* shld imm */
4385
        op = 0;
4386
        shift = 1;
4387
        goto do_shiftd;
4388
    case 0x1a5: /* shld cl */
4389
        op = 0;
4390
        shift = 0;
4391
        goto do_shiftd;
4392
    case 0x1ac: /* shrd imm */
4393
        op = 1;
4394
        shift = 1;
4395
        goto do_shiftd;
4396
    case 0x1ad: /* shrd cl */
4397
        op = 1;
4398
        shift = 0;
4399
    do_shiftd:
4400
        ot = dflag + OT_WORD;
4401
        modrm = ldub_code(s->pc++);
4402
        mod = (modrm >> 6) & 3;
4403
        rm = (modrm & 7) | REX_B(s);
4404
        reg = ((modrm >> 3) & 7) | rex_r;
4405

    
4406
        if (mod != 3) {
4407
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4408
            gen_op_ld_T0_A0[ot + s->mem_index]();
4409
        } else {
4410
            gen_op_mov_TN_reg[ot][0][rm]();
4411
        }
4412
        gen_op_mov_TN_reg[ot][1][reg]();
4413

    
4414
        if (shift) {
4415
            val = ldub_code(s->pc++);
4416
            if (ot == OT_QUAD)
4417
                val &= 0x3f;
4418
            else
4419
                val &= 0x1f;
4420
            if (val) {
4421
                if (mod == 3)
4422
                    gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4423
                else
4424
                    gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4425
                if (op == 0 && ot != OT_WORD)
4426
                    s->cc_op = CC_OP_SHLB + ot;
4427
                else
4428
                    s->cc_op = CC_OP_SARB + ot;
4429
            }
4430
        } else {
4431
            if (s->cc_op != CC_OP_DYNAMIC)
4432
                gen_op_set_cc_op(s->cc_op);
4433
            if (mod == 3)
4434
                gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4435
            else
4436
                gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4437
            s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4438
        }
4439
        if (mod == 3) {
4440
            gen_op_mov_reg_T0[ot][rm]();
4441
        }
4442
        break;
4443

    
4444
        /************************/
4445
        /* floats */
4446
    case 0xd8 ... 0xdf:
4447
        if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4448
            /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4449
            /* XXX: what to do if illegal op ? */
4450
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4451
            break;
4452
        }
4453
        modrm = ldub_code(s->pc++);
4454
        mod = (modrm >> 6) & 3;
4455
        rm = modrm & 7;
4456
        op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4457
        if (mod != 3) {
4458
            /* memory op */
4459
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4460
            switch(op) {
4461
            case 0x00 ... 0x07: /* fxxxs */
4462
            case 0x10 ... 0x17: /* fixxxl */
4463
            case 0x20 ... 0x27: /* fxxxl */
4464
            case 0x30 ... 0x37: /* fixxx */
4465
                {
4466
                    int op1;
4467
                    op1 = op & 7;
4468

    
4469
                    switch(op >> 4) {
4470
                    case 0:
4471
                        gen_op_flds_FT0_A0();
4472
                        break;
4473
                    case 1:
4474
                        gen_op_fildl_FT0_A0();
4475
                        break;
4476
                    case 2:
4477
                        gen_op_fldl_FT0_A0();
4478
                        break;
4479
                    case 3:
4480
                    default:
4481
                        gen_op_fild_FT0_A0();
4482
                        break;
4483
                    }
4484

    
4485
                    gen_op_fp_arith_ST0_FT0[op1]();
4486
                    if (op1 == 3) {
4487
                        /* fcomp needs pop */
4488
                        gen_op_fpop();
4489
                    }
4490
                }
4491
                break;
4492
            case 0x08: /* flds */
4493
            case 0x0a: /* fsts */
4494
            case 0x0b: /* fstps */
4495
            case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4496
            case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4497
            case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4498
                switch(op & 7) {
4499
                case 0:
4500
                    switch(op >> 4) {
4501
                    case 0:
4502
                        gen_op_flds_ST0_A0();
4503
                        break;
4504
                    case 1:
4505
                        gen_op_fildl_ST0_A0();
4506
                        break;
4507
                    case 2:
4508
                        gen_op_fldl_ST0_A0();
4509
                        break;
4510
                    case 3:
4511
                    default:
4512
                        gen_op_fild_ST0_A0();
4513
                        break;
4514
                    }
4515
                    break;
4516
                case 1:
4517
                    switch(op >> 4) {
4518
                    case 1:
4519
                        gen_op_fisttl_ST0_A0();
4520
                        break;
4521
                    case 2:
4522
                        gen_op_fisttll_ST0_A0();
4523
                        break;
4524
                    case 3:
4525
                    default:
4526
                        gen_op_fistt_ST0_A0();
4527
                    }
4528
                    gen_op_fpop();
4529
                    break;
4530
                default:
4531
                    switch(op >> 4) {
4532
                    case 0:
4533
                        gen_op_fsts_ST0_A0();
4534
                        break;
4535
                    case 1:
4536
                        gen_op_fistl_ST0_A0();
4537
                        break;
4538
                    case 2:
4539
                        gen_op_fstl_ST0_A0();
4540
                        break;
4541
                    case 3:
4542
                    default:
4543
                        gen_op_fist_ST0_A0();
4544
                        break;
4545
                    }
4546
                    if ((op & 7) == 3)
4547
                        gen_op_fpop();
4548
                    break;
4549
                }
4550
                break;
4551
            case 0x0c: /* fldenv mem */
4552
                gen_op_fldenv_A0(s->dflag);
4553
                break;
4554
            case 0x0d: /* fldcw mem */
4555
                gen_op_fldcw_A0();
4556
                break;
4557
            case 0x0e: /* fnstenv mem */
4558
                gen_op_fnstenv_A0(s->dflag);
4559
                break;
4560
            case 0x0f: /* fnstcw mem */
4561
                gen_op_fnstcw_A0();
4562
                break;
4563
            case 0x1d: /* fldt mem */
4564
                gen_op_fldt_ST0_A0();
4565
                break;
4566
            case 0x1f: /* fstpt mem */
4567
                gen_op_fstt_ST0_A0();
4568
                gen_op_fpop();
4569
                break;
4570
            case 0x2c: /* frstor mem */
4571
                gen_op_frstor_A0(s->dflag);
4572
                break;
4573
            case 0x2e: /* fnsave mem */
4574
                gen_op_fnsave_A0(s->dflag);
4575
                break;
4576
            case 0x2f: /* fnstsw mem */
4577
                gen_op_fnstsw_A0();
4578
                break;
4579
            case 0x3c: /* fbld */
4580
                gen_op_fbld_ST0_A0();
4581
                break;
4582
            case 0x3e: /* fbstp */
4583
                gen_op_fbst_ST0_A0();
4584
                gen_op_fpop();
4585
                break;
4586
            case 0x3d: /* fildll */
4587
                gen_op_fildll_ST0_A0();
4588
                break;
4589
            case 0x3f: /* fistpll */
4590
                gen_op_fistll_ST0_A0();
4591
                gen_op_fpop();
4592
                break;
4593
            default:
4594
                goto illegal_op;
4595
            }
4596
        } else {
4597
            /* register float ops */
4598
            opreg = rm;
4599

    
4600
            switch(op) {
4601
            case 0x08: /* fld sti */
4602
                gen_op_fpush();
4603
                gen_op_fmov_ST0_STN((opreg + 1) & 7);
4604
                break;
4605
            case 0x09: /* fxchg sti */
4606
            case 0x29: /* fxchg4 sti, undocumented op */
4607
            case 0x39: /* fxchg7 sti, undocumented op */
4608
                gen_op_fxchg_ST0_STN(opreg);
4609
                break;
4610
            case 0x0a: /* grp d9/2 */
4611
                switch(rm) {
4612
                case 0: /* fnop */
4613
                    /* check exceptions (FreeBSD FPU probe) */
4614
                    if (s->cc_op != CC_OP_DYNAMIC)
4615
                        gen_op_set_cc_op(s->cc_op);
4616
                    gen_jmp_im(pc_start - s->cs_base);
4617
                    gen_op_fwait();
4618
                    break;
4619
                default:
4620
                    goto illegal_op;
4621
                }
4622
                break;
4623
            case 0x0c: /* grp d9/4 */
4624
                switch(rm) {
4625
                case 0: /* fchs */
4626
                    gen_op_fchs_ST0();
4627
                    break;
4628
                case 1: /* fabs */
4629
                    gen_op_fabs_ST0();
4630
                    break;
4631
                case 4: /* ftst */
4632
                    gen_op_fldz_FT0();
4633
                    gen_op_fcom_ST0_FT0();
4634
                    break;
4635
                case 5: /* fxam */
4636
                    gen_op_fxam_ST0();
4637
                    break;
4638
                default:
4639
                    goto illegal_op;
4640
                }
4641
                break;
4642
            case 0x0d: /* grp d9/5 */
4643
                {
4644
                    switch(rm) {
4645
                    case 0:
4646
                        gen_op_fpush();
4647
                        gen_op_fld1_ST0();
4648
                        break;
4649
                    case 1:
4650
                        gen_op_fpush();
4651
                        gen_op_fldl2t_ST0();
4652
                        break;
4653
                    case 2:
4654
                        gen_op_fpush();
4655
                        gen_op_fldl2e_ST0();
4656
                        break;
4657
                    case 3:
4658
                        gen_op_fpush();
4659
                        gen_op_fldpi_ST0();
4660
                        break;
4661
                    case 4:
4662
                        gen_op_fpush();
4663
                        gen_op_fldlg2_ST0();
4664
                        break;
4665
                    case 5:
4666
                        gen_op_fpush();
4667
                        gen_op_fldln2_ST0();
4668
                        break;
4669
                    case 6:
4670
                        gen_op_fpush();
4671
                        gen_op_fldz_ST0();
4672
                        break;
4673
                    default:
4674
                        goto illegal_op;
4675
                    }
4676
                }
4677
                break;
4678
            case 0x0e: /* grp d9/6 */
4679
                switch(rm) {
4680
                case 0: /* f2xm1 */
4681
                    gen_op_f2xm1();
4682
                    break;
4683
                case 1: /* fyl2x */
4684
                    gen_op_fyl2x();
4685
                    break;
4686
                case 2: /* fptan */
4687
                    gen_op_fptan();
4688
                    break;
4689
                case 3: /* fpatan */
4690
                    gen_op_fpatan();
4691
                    break;
4692
                case 4: /* fxtract */
4693
                    gen_op_fxtract();
4694
                    break;
4695
                case 5: /* fprem1 */
4696
                    gen_op_fprem1();
4697
                    break;
4698
                case 6: /* fdecstp */
4699
                    gen_op_fdecstp();
4700
                    break;
4701
                default:
4702
                case 7: /* fincstp */
4703
                    gen_op_fincstp();
4704
                    break;
4705
                }
4706
                break;
4707
            case 0x0f: /* grp d9/7 */
4708
                switch(rm) {
4709
                case 0: /* fprem */
4710
                    gen_op_fprem();
4711
                    break;
4712
                case 1: /* fyl2xp1 */
4713
                    gen_op_fyl2xp1();
4714
                    break;
4715
                case 2: /* fsqrt */
4716
                    gen_op_fsqrt();
4717
                    break;
4718
                case 3: /* fsincos */
4719
                    gen_op_fsincos();
4720
                    break;
4721
                case 5: /* fscale */
4722
                    gen_op_fscale();
4723
                    break;
4724
                case 4: /* frndint */
4725
                    gen_op_frndint();
4726
                    break;
4727
                case 6: /* fsin */
4728
                    gen_op_fsin();
4729
                    break;
4730
                default:
4731
                case 7: /* fcos */
4732
                    gen_op_fcos();
4733
                    break;
4734
                }
4735
                break;
4736
            case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4737
            case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4738
            case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4739
                {
4740
                    int op1;
4741

    
4742
                    op1 = op & 7;
4743
                    if (op >= 0x20) {
4744
                        gen_op_fp_arith_STN_ST0[op1](opreg);
4745
                        if (op >= 0x30)
4746
                            gen_op_fpop();
4747
                    } else {
4748
                        gen_op_fmov_FT0_STN(opreg);
4749
                        gen_op_fp_arith_ST0_FT0[op1]();
4750
                    }
4751
                }
4752
                break;
4753
            case 0x02: /* fcom */
4754
            case 0x22: /* fcom2, undocumented op */
4755
                gen_op_fmov_FT0_STN(opreg);
4756
                gen_op_fcom_ST0_FT0();
4757
                break;
4758
            case 0x03: /* fcomp */
4759
            case 0x23: /* fcomp3, undocumented op */
4760
            case 0x32: /* fcomp5, undocumented op */
4761
                gen_op_fmov_FT0_STN(opreg);
4762
                gen_op_fcom_ST0_FT0();
4763
                gen_op_fpop();
4764
                break;
4765
            case 0x15: /* da/5 */
4766
                switch(rm) {
4767
                case 1: /* fucompp */
4768
                    gen_op_fmov_FT0_STN(1);
4769
                    gen_op_fucom_ST0_FT0();
4770
                    gen_op_fpop();
4771
                    gen_op_fpop();
4772
                    break;
4773
                default:
4774
                    goto illegal_op;
4775
                }
4776
                break;
4777
            case 0x1c:
4778
                switch(rm) {
4779
                case 0: /* feni (287 only, just do nop here) */
4780
                    break;
4781
                case 1: /* fdisi (287 only, just do nop here) */
4782
                    break;
4783
                case 2: /* fclex */
4784
                    gen_op_fclex();
4785
                    break;
4786
                case 3: /* fninit */
4787
                    gen_op_fninit();
4788
                    break;
4789
                case 4: /* fsetpm (287 only, just do nop here) */
4790
                    break;
4791
                default:
4792
                    goto illegal_op;
4793
                }
4794
                break;
4795
            case 0x1d: /* fucomi */
4796
                if (s->cc_op != CC_OP_DYNAMIC)
4797
                    gen_op_set_cc_op(s->cc_op);
4798
                gen_op_fmov_FT0_STN(opreg);
4799
                gen_op_fucomi_ST0_FT0();
4800
                s->cc_op = CC_OP_EFLAGS;
4801
                break;
4802
            case 0x1e: /* fcomi */
4803
                if (s->cc_op != CC_OP_DYNAMIC)
4804
                    gen_op_set_cc_op(s->cc_op);
4805
                gen_op_fmov_FT0_STN(opreg);
4806
                gen_op_fcomi_ST0_FT0();
4807
                s->cc_op = CC_OP_EFLAGS;
4808
                break;
4809
            case 0x28: /* ffree sti */
4810
                gen_op_ffree_STN(opreg);
4811
                break;
4812
            case 0x2a: /* fst sti */
4813
                gen_op_fmov_STN_ST0(opreg);
4814
                break;
4815
            case 0x2b: /* fstp sti */
4816
            case 0x0b: /* fstp1 sti, undocumented op */
4817
            case 0x3a: /* fstp8 sti, undocumented op */
4818
            case 0x3b: /* fstp9 sti, undocumented op */
4819
                gen_op_fmov_STN_ST0(opreg);
4820
                gen_op_fpop();
4821
                break;
4822
            case 0x2c: /* fucom st(i) */
4823
                gen_op_fmov_FT0_STN(opreg);
4824
                gen_op_fucom_ST0_FT0();
4825
                break;
4826
            case 0x2d: /* fucomp st(i) */
4827
                gen_op_fmov_FT0_STN(opreg);
4828
                gen_op_fucom_ST0_FT0();
4829
                gen_op_fpop();
4830
                break;
4831
            case 0x33: /* de/3 */
4832
                switch(rm) {
4833
                case 1: /* fcompp */
4834
                    gen_op_fmov_FT0_STN(1);
4835
                    gen_op_fcom_ST0_FT0();
4836
                    gen_op_fpop();
4837
                    gen_op_fpop();
4838
                    break;
4839
                default:
4840
                    goto illegal_op;
4841
                }
4842
                break;
4843
            case 0x38: /* ffreep sti, undocumented op */
4844
                gen_op_ffree_STN(opreg);
4845
                gen_op_fpop();
4846
                break;
4847
            case 0x3c: /* df/4 */
4848
                switch(rm) {
4849
                case 0:
4850
                    gen_op_fnstsw_EAX();
4851
                    break;
4852
                default:
4853
                    goto illegal_op;
4854
                }
4855
                break;
4856
            case 0x3d: /* fucomip */
4857
                if (s->cc_op != CC_OP_DYNAMIC)
4858
                    gen_op_set_cc_op(s->cc_op);
4859
                gen_op_fmov_FT0_STN(opreg);
4860
                gen_op_fucomi_ST0_FT0();
4861
                gen_op_fpop();
4862
                s->cc_op = CC_OP_EFLAGS;
4863
                break;
4864
            case 0x3e: /* fcomip */
4865
                if (s->cc_op != CC_OP_DYNAMIC)
4866
                    gen_op_set_cc_op(s->cc_op);
4867
                gen_op_fmov_FT0_STN(opreg);
4868
                gen_op_fcomi_ST0_FT0();
4869
                gen_op_fpop();
4870
                s->cc_op = CC_OP_EFLAGS;
4871
                break;
4872
            case 0x10 ... 0x13: /* fcmovxx */
4873
            case 0x18 ... 0x1b:
4874
                {
4875
                    int op1;
4876
                    const static uint8_t fcmov_cc[8] = {
4877
                        (JCC_B << 1),
4878
                        (JCC_Z << 1),
4879
                        (JCC_BE << 1),
4880
                        (JCC_P << 1),
4881
                    };
4882
                    op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
4883
                    gen_setcc(s, op1);
4884
                    gen_op_fcmov_ST0_STN_T0(opreg);
4885
                }
4886
                break;
4887
            default:
4888
                goto illegal_op;
4889
            }
4890
        }
4891
        break;
4892
        /************************/
4893
        /* string ops */
4894

    
4895
    case 0xa4: /* movsS */
4896
    case 0xa5:
4897
        if ((b & 1) == 0)
4898
            ot = OT_BYTE;
4899
        else
4900
            ot = dflag + OT_WORD;
4901

    
4902
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4903
            gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4904
        } else {
4905
            gen_movs(s, ot);
4906
        }
4907
        break;
4908

    
4909
    case 0xaa: /* stosS */
4910
    case 0xab:
4911
        if ((b & 1) == 0)
4912
            ot = OT_BYTE;
4913
        else
4914
            ot = dflag + OT_WORD;
4915

    
4916
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4917
            gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4918
        } else {
4919
            gen_stos(s, ot);
4920
        }
4921
        break;
4922
    case 0xac: /* lodsS */
4923
    case 0xad:
4924
        if ((b & 1) == 0)
4925
            ot = OT_BYTE;
4926
        else
4927
            ot = dflag + OT_WORD;
4928
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4929
            gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4930
        } else {
4931
            gen_lods(s, ot);
4932
        }
4933
        break;
4934
    case 0xae: /* scasS */
4935
    case 0xaf:
4936
        if ((b & 1) == 0)
4937
            ot = OT_BYTE;
4938
        else
4939
            ot = dflag + OT_WORD;
4940
        if (prefixes & PREFIX_REPNZ) {
4941
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4942
        } else if (prefixes & PREFIX_REPZ) {
4943
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4944
        } else {
4945
            gen_scas(s, ot);
4946
            s->cc_op = CC_OP_SUBB + ot;
4947
        }
4948
        break;
4949

    
4950
    case 0xa6: /* cmpsS */
4951
    case 0xa7:
4952
        if ((b & 1) == 0)
4953
            ot = OT_BYTE;
4954
        else
4955
            ot = dflag + OT_WORD;
4956
        if (prefixes & PREFIX_REPNZ) {
4957
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4958
        } else if (prefixes & PREFIX_REPZ) {
4959
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4960
        } else {
4961
            gen_cmps(s, ot);
4962
            s->cc_op = CC_OP_SUBB + ot;
4963
        }
4964
        break;
4965
    case 0x6c: /* insS */
4966
    case 0x6d:
4967
        if ((b & 1) == 0)
4968
            ot = OT_BYTE;
4969
        else
4970
            ot = dflag ? OT_LONG : OT_WORD;
4971
        gen_check_io(s, ot, 1, pc_start - s->cs_base);
4972
        gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4973
        gen_op_andl_T0_ffff();
4974
        if (gen_svm_check_io(s, pc_start,
4975
                             SVM_IOIO_TYPE_MASK | (1 << (4+ot)) |
4976
                             svm_is_rep(prefixes) | 4 | (1 << (7+s->aflag))))
4977
            break;
4978
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4979
            gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4980
        } else {
4981
            gen_ins(s, ot);
4982
        }
4983
        break;
4984
    case 0x6e: /* outsS */
4985
    case 0x6f:
4986
        if ((b & 1) == 0)
4987
            ot = OT_BYTE;
4988
        else
4989
            ot = dflag ? OT_LONG : OT_WORD;
4990
        gen_check_io(s, ot, 1, pc_start - s->cs_base);
4991
        gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4992
        gen_op_andl_T0_ffff();
4993
        if (gen_svm_check_io(s, pc_start,
4994
                             (1 << (4+ot)) | svm_is_rep(prefixes) |
4995
                             4 | (1 << (7+s->aflag))))
4996
            break;
4997
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4998
            gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4999
        } else {
5000
            gen_outs(s, ot);
5001
        }
5002
        break;
5003

    
5004
        /************************/
5005
        /* port I/O */
5006

    
5007
    case 0xe4:
5008
    case 0xe5:
5009
        if ((b & 1) == 0)
5010
            ot = OT_BYTE;
5011
        else
5012
            ot = dflag ? OT_LONG : OT_WORD;
5013
        val = ldub_code(s->pc++);
5014
        gen_op_movl_T0_im(val);
5015
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5016
        if (gen_svm_check_io(s, pc_start,
5017
                             SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5018
                             (1 << (4+ot))))
5019
            break;
5020
        gen_op_in[ot]();
5021
        gen_op_mov_reg_T1[ot][R_EAX]();
5022
        break;
5023
    case 0xe6:
5024
    case 0xe7:
5025
        if ((b & 1) == 0)
5026
            ot = OT_BYTE;
5027
        else
5028
            ot = dflag ? OT_LONG : OT_WORD;
5029
        val = ldub_code(s->pc++);
5030
        gen_op_movl_T0_im(val);
5031
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5032
        if (gen_svm_check_io(s, pc_start, svm_is_rep(prefixes) |
5033
                             (1 << (4+ot))))
5034
            break;
5035
        gen_op_mov_TN_reg[ot][1][R_EAX]();
5036
        gen_op_out[ot]();
5037
        break;
5038
    case 0xec:
5039
    case 0xed:
5040
        if ((b & 1) == 0)
5041
            ot = OT_BYTE;
5042
        else
5043
            ot = dflag ? OT_LONG : OT_WORD;
5044
        gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
5045
        gen_op_andl_T0_ffff();
5046
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5047
        if (gen_svm_check_io(s, pc_start,
5048
                             SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5049
                             (1 << (4+ot))))
5050
            break;
5051
        gen_op_in[ot]();
5052
        gen_op_mov_reg_T1[ot][R_EAX]();
5053
        break;
5054
    case 0xee:
5055
    case 0xef:
5056
        if ((b & 1) == 0)
5057
            ot = OT_BYTE;
5058
        else
5059
            ot = dflag ? OT_LONG : OT_WORD;
5060
        gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
5061
        gen_op_andl_T0_ffff();
5062
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5063
        if (gen_svm_check_io(s, pc_start,
5064
                             svm_is_rep(prefixes) | (1 << (4+ot))))
5065
            break;
5066
        gen_op_mov_TN_reg[ot][1][R_EAX]();
5067
        gen_op_out[ot]();
5068
        break;
5069

    
5070
        /************************/
5071
        /* control */
5072
    case 0xc2: /* ret im */
5073
        val = ldsw_code(s->pc);
5074
        s->pc += 2;
5075
        gen_pop_T0(s);
5076
        if (CODE64(s) && s->dflag)
5077
            s->dflag = 2;
5078
        gen_stack_update(s, val + (2 << s->dflag));
5079
        if (s->dflag == 0)
5080
            gen_op_andl_T0_ffff();
5081
        gen_op_jmp_T0();
5082
        gen_eob(s);
5083
        break;
5084
    case 0xc3: /* ret */
5085
        gen_pop_T0(s);
5086
        gen_pop_update(s);
5087
        if (s->dflag == 0)
5088
            gen_op_andl_T0_ffff();
5089
        gen_op_jmp_T0();
5090
        gen_eob(s);
5091
        break;
5092
    case 0xca: /* lret im */
5093
        val = ldsw_code(s->pc);
5094
        s->pc += 2;
5095
    do_lret:
5096
        if (s->pe && !s->vm86) {
5097
            if (s->cc_op != CC_OP_DYNAMIC)
5098
                gen_op_set_cc_op(s->cc_op);
5099
            gen_jmp_im(pc_start - s->cs_base);
5100
            gen_op_lret_protected(s->dflag, val);
5101
        } else {
5102
            gen_stack_A0(s);
5103
            /* pop offset */
5104
            gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
5105
            if (s->dflag == 0)
5106
                gen_op_andl_T0_ffff();
5107
            /* NOTE: keeping EIP updated is not a problem in case of
5108
               exception */
5109
            gen_op_jmp_T0();
5110
            /* pop selector */
5111
            gen_op_addl_A0_im(2 << s->dflag);
5112
            gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
5113
            gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5114
            /* add stack offset */
5115
            gen_stack_update(s, val + (4 << s->dflag));
5116
        }
5117
        gen_eob(s);
5118
        break;
5119
    case 0xcb: /* lret */
5120
        val = 0;
5121
        goto do_lret;
5122
    case 0xcf: /* iret */
5123
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5124
            break;
5125
        if (!s->pe) {
5126
            /* real mode */
5127
            gen_op_iret_real(s->dflag);
5128
            s->cc_op = CC_OP_EFLAGS;
5129
        } else if (s->vm86) {
5130
            if (s->iopl != 3) {
5131
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5132
            } else {
5133
                gen_op_iret_real(s->dflag);
5134
                s->cc_op = CC_OP_EFLAGS;
5135
            }
5136
        } else {
5137
            if (s->cc_op != CC_OP_DYNAMIC)
5138
                gen_op_set_cc_op(s->cc_op);
5139
            gen_jmp_im(pc_start - s->cs_base);
5140
            gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
5141
            s->cc_op = CC_OP_EFLAGS;
5142
        }
5143
        gen_eob(s);
5144
        break;
5145
    case 0xe8: /* call im */
5146
        {
5147
            if (dflag)
5148
                tval = (int32_t)insn_get(s, OT_LONG);
5149
            else
5150
                tval = (int16_t)insn_get(s, OT_WORD);
5151
            next_eip = s->pc - s->cs_base;
5152
            tval += next_eip;
5153
            if (s->dflag == 0)
5154
                tval &= 0xffff;
5155
            gen_movtl_T0_im(next_eip);
5156
            gen_push_T0(s);
5157
            gen_jmp(s, tval);
5158
        }
5159
        break;
5160
    case 0x9a: /* lcall im */
5161
        {
5162
            unsigned int selector, offset;
5163

    
5164
            if (CODE64(s))
5165
                goto illegal_op;
5166
            ot = dflag ? OT_LONG : OT_WORD;
5167
            offset = insn_get(s, ot);
5168
            selector = insn_get(s, OT_WORD);
5169

    
5170
            gen_op_movl_T0_im(selector);
5171
            gen_op_movl_T1_imu(offset);
5172
        }
5173
        goto do_lcall;
5174
    case 0xe9: /* jmp im */
5175
        if (dflag)
5176
            tval = (int32_t)insn_get(s, OT_LONG);
5177
        else
5178
            tval = (int16_t)insn_get(s, OT_WORD);
5179
        tval += s->pc - s->cs_base;
5180
        if (s->dflag == 0)
5181
            tval &= 0xffff;
5182
        gen_jmp(s, tval);
5183
        break;
5184
    case 0xea: /* ljmp im */
5185
        {
5186
            unsigned int selector, offset;
5187

    
5188
            if (CODE64(s))
5189
                goto illegal_op;
5190
            ot = dflag ? OT_LONG : OT_WORD;
5191
            offset = insn_get(s, ot);
5192
            selector = insn_get(s, OT_WORD);
5193

    
5194
            gen_op_movl_T0_im(selector);
5195
            gen_op_movl_T1_imu(offset);
5196
        }
5197
        goto do_ljmp;
5198
    case 0xeb: /* jmp Jb */
5199
        tval = (int8_t)insn_get(s, OT_BYTE);
5200
        tval += s->pc - s->cs_base;
5201
        if (s->dflag == 0)
5202
            tval &= 0xffff;
5203
        gen_jmp(s, tval);
5204
        break;
5205
    case 0x70 ... 0x7f: /* jcc Jb */
5206
        tval = (int8_t)insn_get(s, OT_BYTE);
5207
        goto do_jcc;
5208
    case 0x180 ... 0x18f: /* jcc Jv */
5209
        if (dflag) {
5210
            tval = (int32_t)insn_get(s, OT_LONG);
5211
        } else {
5212
            tval = (int16_t)insn_get(s, OT_WORD);
5213
        }
5214
    do_jcc:
5215
        next_eip = s->pc - s->cs_base;
5216
        tval += next_eip;
5217
        if (s->dflag == 0)
5218
            tval &= 0xffff;
5219
        gen_jcc(s, b, tval, next_eip);
5220
        break;
5221

    
5222
    case 0x190 ... 0x19f: /* setcc Gv */
5223
        modrm = ldub_code(s->pc++);
5224
        gen_setcc(s, b);
5225
        gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5226
        break;
5227
    case 0x140 ... 0x14f: /* cmov Gv, Ev */
5228
        ot = dflag + OT_WORD;
5229
        modrm = ldub_code(s->pc++);
5230
        reg = ((modrm >> 3) & 7) | rex_r;
5231
        mod = (modrm >> 6) & 3;
5232
        gen_setcc(s, b);
5233
        if (mod != 3) {
5234
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5235
            gen_op_ld_T1_A0[ot + s->mem_index]();
5236
        } else {
5237
            rm = (modrm & 7) | REX_B(s);
5238
            gen_op_mov_TN_reg[ot][1][rm]();
5239
        }
5240
        gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5241
        break;
5242

    
5243
        /************************/
5244
        /* flags */
5245
    case 0x9c: /* pushf */
5246
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5247
            break;
5248
        if (s->vm86 && s->iopl != 3) {
5249
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5250
        } else {
5251
            if (s->cc_op != CC_OP_DYNAMIC)
5252
                gen_op_set_cc_op(s->cc_op);
5253
            gen_op_movl_T0_eflags();
5254
            gen_push_T0(s);
5255
        }
5256
        break;
5257
    case 0x9d: /* popf */
5258
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5259
            break;
5260
        if (s->vm86 && s->iopl != 3) {
5261
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5262
        } else {
5263
            gen_pop_T0(s);
5264
            if (s->cpl == 0) {
5265
                if (s->dflag) {
5266
                    gen_op_movl_eflags_T0_cpl0();
5267
                } else {
5268
                    gen_op_movw_eflags_T0_cpl0();
5269
                }
5270
            } else {
5271
                if (s->cpl <= s->iopl) {
5272
                    if (s->dflag) {
5273
                        gen_op_movl_eflags_T0_io();
5274
                    } else {
5275
                        gen_op_movw_eflags_T0_io();
5276
                    }
5277
                } else {
5278
                    if (s->dflag) {
5279
                        gen_op_movl_eflags_T0();
5280
                    } else {
5281
                        gen_op_movw_eflags_T0();
5282
                    }
5283
                }
5284
            }
5285
            gen_pop_update(s);
5286
            s->cc_op = CC_OP_EFLAGS;
5287
            /* abort translation because TF flag may change */
5288
            gen_jmp_im(s->pc - s->cs_base);
5289
            gen_eob(s);
5290
        }
5291
        break;
5292
    case 0x9e: /* sahf */
5293
        if (CODE64(s))
5294
            goto illegal_op;
5295
        gen_op_mov_TN_reg[OT_BYTE][0][R_AH]();
5296
        if (s->cc_op != CC_OP_DYNAMIC)
5297
            gen_op_set_cc_op(s->cc_op);
5298
        gen_op_movb_eflags_T0();
5299
        s->cc_op = CC_OP_EFLAGS;
5300
        break;
5301
    case 0x9f: /* lahf */
5302
        if (CODE64(s))
5303
            goto illegal_op;
5304
        if (s->cc_op != CC_OP_DYNAMIC)
5305
            gen_op_set_cc_op(s->cc_op);
5306
        gen_op_movl_T0_eflags();
5307
        gen_op_mov_reg_T0[OT_BYTE][R_AH]();
5308
        break;
5309
    case 0xf5: /* cmc */
5310
        if (s->cc_op != CC_OP_DYNAMIC)
5311
            gen_op_set_cc_op(s->cc_op);
5312
        gen_op_cmc();
5313
        s->cc_op = CC_OP_EFLAGS;
5314
        break;
5315
    case 0xf8: /* clc */
5316
        if (s->cc_op != CC_OP_DYNAMIC)
5317
            gen_op_set_cc_op(s->cc_op);
5318
        gen_op_clc();
5319
        s->cc_op = CC_OP_EFLAGS;
5320
        break;
5321
    case 0xf9: /* stc */
5322
        if (s->cc_op != CC_OP_DYNAMIC)
5323
            gen_op_set_cc_op(s->cc_op);
5324
        gen_op_stc();
5325
        s->cc_op = CC_OP_EFLAGS;
5326
        break;
5327
    case 0xfc: /* cld */
5328
        gen_op_cld();
5329
        break;
5330
    case 0xfd: /* std */
5331
        gen_op_std();
5332
        break;
5333

    
5334
        /************************/
5335
        /* bit operations */
5336
    case 0x1ba: /* bt/bts/btr/btc Gv, im */
5337
        ot = dflag + OT_WORD;
5338
        modrm = ldub_code(s->pc++);
5339
        op = (modrm >> 3) & 7;
5340
        mod = (modrm >> 6) & 3;
5341
        rm = (modrm & 7) | REX_B(s);
5342
        if (mod != 3) {
5343
            s->rip_offset = 1;
5344
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5345
            gen_op_ld_T0_A0[ot + s->mem_index]();
5346
        } else {
5347
            gen_op_mov_TN_reg[ot][0][rm]();
5348
        }
5349
        /* load shift */
5350
        val = ldub_code(s->pc++);
5351
        gen_op_movl_T1_im(val);
5352
        if (op < 4)
5353
            goto illegal_op;
5354
        op -= 4;
5355
        gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5356
        s->cc_op = CC_OP_SARB + ot;
5357
        if (op != 0) {
5358
            if (mod != 3)
5359
                gen_op_st_T0_A0[ot + s->mem_index]();
5360
            else
5361
                gen_op_mov_reg_T0[ot][rm]();
5362
            gen_op_update_bt_cc();
5363
        }
5364
        break;
5365
    case 0x1a3: /* bt Gv, Ev */
5366
        op = 0;
5367
        goto do_btx;
5368
    case 0x1ab: /* bts */
5369
        op = 1;
5370
        goto do_btx;
5371
    case 0x1b3: /* btr */
5372
        op = 2;
5373
        goto do_btx;
5374
    case 0x1bb: /* btc */
5375
        op = 3;
5376
    do_btx:
5377
        ot = dflag + OT_WORD;
5378
        modrm = ldub_code(s->pc++);
5379
        reg = ((modrm >> 3) & 7) | rex_r;
5380
        mod = (modrm >> 6) & 3;
5381
        rm = (modrm & 7) | REX_B(s);
5382
        gen_op_mov_TN_reg[OT_LONG][1][reg]();
5383
        if (mod != 3) {
5384
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5385
            /* specific case: we need to add a displacement */
5386
            gen_op_add_bit_A0_T1[ot - OT_WORD]();
5387
            gen_op_ld_T0_A0[ot + s->mem_index]();
5388
        } else {
5389
            gen_op_mov_TN_reg[ot][0][rm]();
5390
        }
5391
        gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5392
        s->cc_op = CC_OP_SARB + ot;
5393
        if (op != 0) {
5394
            if (mod != 3)
5395
                gen_op_st_T0_A0[ot + s->mem_index]();
5396
            else
5397
                gen_op_mov_reg_T0[ot][rm]();
5398
            gen_op_update_bt_cc();
5399
        }
5400
        break;
5401
    case 0x1bc: /* bsf */
5402
    case 0x1bd: /* bsr */
5403
        ot = dflag + OT_WORD;
5404
        modrm = ldub_code(s->pc++);
5405
        reg = ((modrm >> 3) & 7) | rex_r;
5406
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5407
        /* NOTE: in order to handle the 0 case, we must load the
5408
           result. It could be optimized with a generated jump */
5409
        gen_op_mov_TN_reg[ot][1][reg]();
5410
        gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5411
        gen_op_mov_reg_T1[ot][reg]();
5412
        s->cc_op = CC_OP_LOGICB + ot;
5413
        break;
5414
        /************************/
5415
        /* bcd */
5416
    case 0x27: /* daa */
5417
        if (CODE64(s))
5418
            goto illegal_op;
5419
        if (s->cc_op != CC_OP_DYNAMIC)
5420
            gen_op_set_cc_op(s->cc_op);
5421
        gen_op_daa();
5422
        s->cc_op = CC_OP_EFLAGS;
5423
        break;
5424
    case 0x2f: /* das */
5425
        if (CODE64(s))
5426
            goto illegal_op;
5427
        if (s->cc_op != CC_OP_DYNAMIC)
5428
            gen_op_set_cc_op(s->cc_op);
5429
        gen_op_das();
5430
        s->cc_op = CC_OP_EFLAGS;
5431
        break;
5432
    case 0x37: /* aaa */
5433
        if (CODE64(s))
5434
            goto illegal_op;
5435
        if (s->cc_op != CC_OP_DYNAMIC)
5436
            gen_op_set_cc_op(s->cc_op);
5437
        gen_op_aaa();
5438
        s->cc_op = CC_OP_EFLAGS;
5439
        break;
5440
    case 0x3f: /* aas */
5441
        if (CODE64(s))
5442
            goto illegal_op;
5443
        if (s->cc_op != CC_OP_DYNAMIC)
5444
            gen_op_set_cc_op(s->cc_op);
5445
        gen_op_aas();
5446
        s->cc_op = CC_OP_EFLAGS;
5447
        break;
5448
    case 0xd4: /* aam */
5449
        if (CODE64(s))
5450
            goto illegal_op;
5451
        val = ldub_code(s->pc++);
5452
        if (val == 0) {
5453
            gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5454
        } else {
5455
            gen_op_aam(val);
5456
            s->cc_op = CC_OP_LOGICB;
5457
        }
5458
        break;
5459
    case 0xd5: /* aad */
5460
        if (CODE64(s))
5461
            goto illegal_op;
5462
        val = ldub_code(s->pc++);
5463
        gen_op_aad(val);
5464
        s->cc_op = CC_OP_LOGICB;
5465
        break;
5466
        /************************/
5467
        /* misc */
5468
    case 0x90: /* nop */
5469
        /* XXX: xchg + rex handling */
5470
        /* XXX: correct lock test for all insn */
5471
        if (prefixes & PREFIX_LOCK)
5472
            goto illegal_op;
5473
        if (prefixes & PREFIX_REPZ) {
5474
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
5475
        }
5476
        break;
5477
    case 0x9b: /* fwait */
5478
        if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5479
            (HF_MP_MASK | HF_TS_MASK)) {
5480
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5481
        } else {
5482
            if (s->cc_op != CC_OP_DYNAMIC)
5483
                gen_op_set_cc_op(s->cc_op);
5484
            gen_jmp_im(pc_start - s->cs_base);
5485
            gen_op_fwait();
5486
        }
5487
        break;
5488
    case 0xcc: /* int3 */
5489
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5490
            break;
5491
        gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5492
        break;
5493
    case 0xcd: /* int N */
5494
        val = ldub_code(s->pc++);
5495
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5496
            break;
5497
        if (s->vm86 && s->iopl != 3) {
5498
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5499
        } else {
5500
            gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5501
        }
5502
        break;
5503
    case 0xce: /* into */
5504
        if (CODE64(s))
5505
            goto illegal_op;
5506
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5507
            break;
5508
        if (s->cc_op != CC_OP_DYNAMIC)
5509
            gen_op_set_cc_op(s->cc_op);
5510
        gen_jmp_im(pc_start - s->cs_base);
5511
        gen_op_into(s->pc - pc_start);
5512
        break;
5513
    case 0xf1: /* icebp (undocumented, exits to external debugger) */
5514
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
5515
            break;
5516
#if 1
5517
        gen_debug(s, pc_start - s->cs_base);
5518
#else
5519
        /* start debug */
5520
        tb_flush(cpu_single_env);
5521
        cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5522
#endif
5523
        break;
5524
    case 0xfa: /* cli */
5525
        if (!s->vm86) {
5526
            if (s->cpl <= s->iopl) {
5527
                gen_op_cli();
5528
            } else {
5529
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5530
            }
5531
        } else {
5532
            if (s->iopl == 3) {
5533
                gen_op_cli();
5534
            } else {
5535
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5536
            }
5537
        }
5538
        break;
5539
    case 0xfb: /* sti */
5540
        if (!s->vm86) {
5541
            if (s->cpl <= s->iopl) {
5542
            gen_sti:
5543
                gen_op_sti();
5544
                /* interruptions are enabled only the first insn after sti */
5545
                /* If several instructions disable interrupts, only the
5546
                   _first_ does it */
5547
                if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5548
                    gen_op_set_inhibit_irq();
5549
                /* give a chance to handle pending irqs */
5550
                gen_jmp_im(s->pc - s->cs_base);
5551
                gen_eob(s);
5552
            } else {
5553
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5554
            }
5555
        } else {
5556
            if (s->iopl == 3) {
5557
                goto gen_sti;
5558
            } else {
5559
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5560
            }
5561
        }
5562
        break;
5563
    case 0x62: /* bound */
5564
        if (CODE64(s))
5565
            goto illegal_op;
5566
        ot = dflag ? OT_LONG : OT_WORD;
5567
        modrm = ldub_code(s->pc++);
5568
        reg = (modrm >> 3) & 7;
5569
        mod = (modrm >> 6) & 3;
5570
        if (mod == 3)
5571
            goto illegal_op;
5572
        gen_op_mov_TN_reg[ot][0][reg]();
5573
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5574
        gen_jmp_im(pc_start - s->cs_base);
5575
        if (ot == OT_WORD)
5576
            gen_op_boundw();
5577
        else
5578
            gen_op_boundl();
5579
        break;
5580
    case 0x1c8 ... 0x1cf: /* bswap reg */
5581
        reg = (b & 7) | REX_B(s);
5582
#ifdef TARGET_X86_64
5583
        if (dflag == 2) {
5584
            gen_op_mov_TN_reg[OT_QUAD][0][reg]();
5585
            gen_op_bswapq_T0();
5586
            gen_op_mov_reg_T0[OT_QUAD][reg]();
5587
        } else
5588
#endif
5589
        {
5590
            gen_op_mov_TN_reg[OT_LONG][0][reg]();
5591
            gen_op_bswapl_T0();
5592
            gen_op_mov_reg_T0[OT_LONG][reg]();
5593
        }
5594
        break;
5595
    case 0xd6: /* salc */
5596
        if (CODE64(s))
5597
            goto illegal_op;
5598
        if (s->cc_op != CC_OP_DYNAMIC)
5599
            gen_op_set_cc_op(s->cc_op);
5600
        gen_op_salc();
5601
        break;
5602
    case 0xe0: /* loopnz */
5603
    case 0xe1: /* loopz */
5604
        if (s->cc_op != CC_OP_DYNAMIC)
5605
            gen_op_set_cc_op(s->cc_op);
5606
        /* FALL THRU */
5607
    case 0xe2: /* loop */
5608
    case 0xe3: /* jecxz */
5609
        {
5610
            int l1, l2;
5611

    
5612
            tval = (int8_t)insn_get(s, OT_BYTE);
5613
            next_eip = s->pc - s->cs_base;
5614
            tval += next_eip;
5615
            if (s->dflag == 0)
5616
                tval &= 0xffff;
5617

    
5618
            l1 = gen_new_label();
5619
            l2 = gen_new_label();
5620
            b &= 3;
5621
            if (b == 3) {
5622
                gen_op_jz_ecx[s->aflag](l1);
5623
            } else {
5624
                gen_op_dec_ECX[s->aflag]();
5625
                if (b <= 1)
5626
                    gen_op_mov_T0_cc();
5627
                gen_op_loop[s->aflag][b](l1);
5628
            }
5629

    
5630
            gen_jmp_im(next_eip);
5631
            gen_op_jmp_label(l2);
5632
            gen_set_label(l1);
5633
            gen_jmp_im(tval);
5634
            gen_set_label(l2);
5635
            gen_eob(s);
5636
        }
5637
        break;
5638
    case 0x130: /* wrmsr */
5639
    case 0x132: /* rdmsr */
5640
        if (s->cpl != 0) {
5641
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5642
        } else {
5643
            int retval = 0;
5644
            if (b & 2) {
5645
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
5646
                gen_op_rdmsr();
5647
            } else {
5648
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
5649
                gen_op_wrmsr();
5650
            }
5651
            if(retval)
5652
                gen_eob(s);
5653
        }
5654
        break;
5655
    case 0x131: /* rdtsc */
5656
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
5657
            break;
5658
        gen_jmp_im(pc_start - s->cs_base);
5659
        gen_op_rdtsc();
5660
        break;
5661
    case 0x134: /* sysenter */
5662
        if (CODE64(s))
5663
            goto illegal_op;
5664
        if (!s->pe) {
5665
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5666
        } else {
5667
            if (s->cc_op != CC_OP_DYNAMIC) {
5668
                gen_op_set_cc_op(s->cc_op);
5669
                s->cc_op = CC_OP_DYNAMIC;
5670
            }
5671
            gen_jmp_im(pc_start - s->cs_base);
5672
            gen_op_sysenter();
5673
            gen_eob(s);
5674
        }
5675
        break;
5676
    case 0x135: /* sysexit */
5677
        if (CODE64(s))
5678
            goto illegal_op;
5679
        if (!s->pe) {
5680
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5681
        } else {
5682
            if (s->cc_op != CC_OP_DYNAMIC) {
5683
                gen_op_set_cc_op(s->cc_op);
5684
                s->cc_op = CC_OP_DYNAMIC;
5685
            }
5686
            gen_jmp_im(pc_start - s->cs_base);
5687
            gen_op_sysexit();
5688
            gen_eob(s);
5689
        }
5690
        break;
5691
#ifdef TARGET_X86_64
5692
    case 0x105: /* syscall */
5693
        /* XXX: is it usable in real mode ? */
5694
        if (s->cc_op != CC_OP_DYNAMIC) {
5695
            gen_op_set_cc_op(s->cc_op);
5696
            s->cc_op = CC_OP_DYNAMIC;
5697
        }
5698
        gen_jmp_im(pc_start - s->cs_base);
5699
        gen_op_syscall(s->pc - pc_start);
5700
        gen_eob(s);
5701
        break;
5702
    case 0x107: /* sysret */
5703
        if (!s->pe) {
5704
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5705
        } else {
5706
            if (s->cc_op != CC_OP_DYNAMIC) {
5707
                gen_op_set_cc_op(s->cc_op);
5708
                s->cc_op = CC_OP_DYNAMIC;
5709
            }
5710
            gen_jmp_im(pc_start - s->cs_base);
5711
            gen_op_sysret(s->dflag);
5712
            /* condition codes are modified only in long mode */
5713
            if (s->lma)
5714
                s->cc_op = CC_OP_EFLAGS;
5715
            gen_eob(s);
5716
        }
5717
        break;
5718
#endif
5719
    case 0x1a2: /* cpuid */
5720
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
5721
            break;
5722
        gen_op_cpuid();
5723
        break;
5724
    case 0xf4: /* hlt */
5725
        if (s->cpl != 0) {
5726
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5727
        } else {
5728
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
5729
                break;
5730
            if (s->cc_op != CC_OP_DYNAMIC)
5731
                gen_op_set_cc_op(s->cc_op);
5732
            gen_jmp_im(s->pc - s->cs_base);
5733
            gen_op_hlt();
5734
            s->is_jmp = 3;
5735
        }
5736
        break;
5737
    case 0x100:
5738
        modrm = ldub_code(s->pc++);
5739
        mod = (modrm >> 6) & 3;
5740
        op = (modrm >> 3) & 7;
5741
        switch(op) {
5742
        case 0: /* sldt */
5743
            if (!s->pe || s->vm86)
5744
                goto illegal_op;
5745
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
5746
                break;
5747
            gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5748
            ot = OT_WORD;
5749
            if (mod == 3)
5750
                ot += s->dflag;
5751
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5752
            break;
5753
        case 2: /* lldt */
5754
            if (!s->pe || s->vm86)
5755
                goto illegal_op;
5756
            if (s->cpl != 0) {
5757
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5758
            } else {
5759
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
5760
                    break;
5761
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5762
                gen_jmp_im(pc_start - s->cs_base);
5763
                gen_op_lldt_T0();
5764
            }
5765
            break;
5766
        case 1: /* str */
5767
            if (!s->pe || s->vm86)
5768
                goto illegal_op;
5769
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
5770
                break;
5771
            gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5772
            ot = OT_WORD;
5773
            if (mod == 3)
5774
                ot += s->dflag;
5775
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5776
            break;
5777
        case 3: /* ltr */
5778
            if (!s->pe || s->vm86)
5779
                goto illegal_op;
5780
            if (s->cpl != 0) {
5781
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5782
            } else {
5783
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
5784
                    break;
5785
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5786
                gen_jmp_im(pc_start - s->cs_base);
5787
                gen_op_ltr_T0();
5788
            }
5789
            break;
5790
        case 4: /* verr */
5791
        case 5: /* verw */
5792
            if (!s->pe || s->vm86)
5793
                goto illegal_op;
5794
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5795
            if (s->cc_op != CC_OP_DYNAMIC)
5796
                gen_op_set_cc_op(s->cc_op);
5797
            if (op == 4)
5798
                gen_op_verr();
5799
            else
5800
                gen_op_verw();
5801
            s->cc_op = CC_OP_EFLAGS;
5802
            break;
5803
        default:
5804
            goto illegal_op;
5805
        }
5806
        break;
5807
    case 0x101:
5808
        modrm = ldub_code(s->pc++);
5809
        mod = (modrm >> 6) & 3;
5810
        op = (modrm >> 3) & 7;
5811
        rm = modrm & 7;
5812
        switch(op) {
5813
        case 0: /* sgdt */
5814
            if (mod == 3)
5815
                goto illegal_op;
5816
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
5817
                break;
5818
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5819
            gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5820
            gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5821
            gen_add_A0_im(s, 2);
5822
            gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5823
            if (!s->dflag)
5824
                gen_op_andl_T0_im(0xffffff);
5825
            gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5826
            break;
5827
        case 1:
5828
            if (mod == 3) {
5829
                switch (rm) {
5830
                case 0: /* monitor */
5831
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5832
                        s->cpl != 0)
5833
                        goto illegal_op;
5834
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
5835
                        break;
5836
                    gen_jmp_im(pc_start - s->cs_base);
5837
#ifdef TARGET_X86_64
5838
                    if (s->aflag == 2) {
5839
                        gen_op_movq_A0_reg[R_EBX]();
5840
                        gen_op_addq_A0_AL();
5841
                    } else
5842
#endif
5843
                    {
5844
                        gen_op_movl_A0_reg[R_EBX]();
5845
                        gen_op_addl_A0_AL();
5846
                        if (s->aflag == 0)
5847
                            gen_op_andl_A0_ffff();
5848
                    }
5849
                    gen_add_A0_ds_seg(s);
5850
                    gen_op_monitor();
5851
                    break;
5852
                case 1: /* mwait */
5853
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5854
                        s->cpl != 0)
5855
                        goto illegal_op;
5856
                    if (s->cc_op != CC_OP_DYNAMIC) {
5857
                        gen_op_set_cc_op(s->cc_op);
5858
                        s->cc_op = CC_OP_DYNAMIC;
5859
                    }
5860
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
5861
                        break;
5862
                    gen_jmp_im(s->pc - s->cs_base);
5863
                    gen_op_mwait();
5864
                    gen_eob(s);
5865
                    break;
5866
                default:
5867
                    goto illegal_op;
5868
                }
5869
            } else { /* sidt */
5870
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
5871
                    break;
5872
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5873
                gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
5874
                gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5875
                gen_add_A0_im(s, 2);
5876
                gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
5877
                if (!s->dflag)
5878
                    gen_op_andl_T0_im(0xffffff);
5879
                gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5880
            }
5881
            break;
5882
        case 2: /* lgdt */
5883
        case 3: /* lidt */
5884
            if (mod == 3) {
5885
                switch(rm) {
5886
                case 0: /* VMRUN */
5887
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
5888
                        break;
5889
                    if (s->cc_op != CC_OP_DYNAMIC)
5890
                        gen_op_set_cc_op(s->cc_op);
5891
                    gen_jmp_im(s->pc - s->cs_base);
5892
                    gen_op_vmrun();
5893
                    s->cc_op = CC_OP_EFLAGS;
5894
                    gen_eob(s);
5895
                    break;
5896
                case 1: /* VMMCALL */
5897
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
5898
                         break;
5899
                    /* FIXME: cause #UD if hflags & SVM */
5900
                    gen_op_vmmcall();
5901
                    break;
5902
                case 2: /* VMLOAD */
5903
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
5904
                         break;
5905
                    gen_op_vmload();
5906
                    break;
5907
                case 3: /* VMSAVE */
5908
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
5909
                         break;
5910
                    gen_op_vmsave();
5911
                    break;
5912
                case 4: /* STGI */
5913
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
5914
                         break;
5915
                    gen_op_stgi();
5916
                    break;
5917
                case 5: /* CLGI */
5918
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
5919
                         break;
5920
                    gen_op_clgi();
5921
                    break;
5922
                case 6: /* SKINIT */
5923
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
5924
                         break;
5925
                    gen_op_skinit();
5926
                    break;
5927
                case 7: /* INVLPGA */
5928
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
5929
                         break;
5930
                    gen_op_invlpga();
5931
                    break;
5932
                default:
5933
                    goto illegal_op;
5934
                }
5935
            } else if (s->cpl != 0) {
5936
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5937
            } else {
5938
                if (gen_svm_check_intercept(s, pc_start,
5939
                                            op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
5940
                    break;
5941
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5942
                gen_op_ld_T1_A0[OT_WORD + s->mem_index]();
5943
                gen_add_A0_im(s, 2);
5944
                gen_op_ld_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5945
                if (!s->dflag)
5946
                    gen_op_andl_T0_im(0xffffff);
5947
                if (op == 2) {
5948
                    gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
5949
                    gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
5950
                } else {
5951
                    gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
5952
                    gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
5953
                }
5954
            }
5955
            break;
5956
        case 4: /* smsw */
5957
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
5958
                break;
5959
            gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
5960
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
5961
            break;
5962
        case 6: /* lmsw */
5963
            if (s->cpl != 0) {
5964
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5965
            } else {
5966
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
5967
                    break;
5968
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5969
                gen_op_lmsw_T0();
5970
                gen_jmp_im(s->pc - s->cs_base);
5971
                gen_eob(s);
5972
            }
5973
            break;
5974
        case 7: /* invlpg */
5975
            if (s->cpl != 0) {
5976
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5977
            } else {
5978
                if (mod == 3) {
5979
#ifdef TARGET_X86_64
5980
                    if (CODE64(s) && rm == 0) {
5981
                        /* swapgs */
5982
                        gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
5983
                        gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
5984
                        gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
5985
                        gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
5986
                    } else
5987
#endif
5988
                    {
5989
                        goto illegal_op;
5990
                    }
5991
                } else {
5992
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
5993
                        break;
5994
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5995
                    gen_op_invlpg_A0();
5996
                    gen_jmp_im(s->pc - s->cs_base);
5997
                    gen_eob(s);
5998
                }
5999
            }
6000
            break;
6001
        default:
6002
            goto illegal_op;
6003
        }
6004
        break;
6005
    case 0x108: /* invd */
6006
    case 0x109: /* wbinvd */
6007
        if (s->cpl != 0) {
6008
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6009
        } else {
6010
            if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6011
                break;
6012
            /* nothing to do */
6013
        }
6014
        break;
6015
    case 0x63: /* arpl or movslS (x86_64) */
6016
#ifdef TARGET_X86_64
6017
        if (CODE64(s)) {
6018
            int d_ot;
6019
            /* d_ot is the size of destination */
6020
            d_ot = dflag + OT_WORD;
6021

    
6022
            modrm = ldub_code(s->pc++);
6023
            reg = ((modrm >> 3) & 7) | rex_r;
6024
            mod = (modrm >> 6) & 3;
6025
            rm = (modrm & 7) | REX_B(s);
6026

    
6027
            if (mod == 3) {
6028
                gen_op_mov_TN_reg[OT_LONG][0][rm]();
6029
                /* sign extend */
6030
                if (d_ot == OT_QUAD)
6031
                    gen_op_movslq_T0_T0();
6032
                gen_op_mov_reg_T0[d_ot][reg]();
6033
            } else {
6034
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6035
                if (d_ot == OT_QUAD) {
6036
                    gen_op_lds_T0_A0[OT_LONG + s->mem_index]();
6037
                } else {
6038
                    gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
6039
                }
6040
                gen_op_mov_reg_T0[d_ot][reg]();
6041
            }
6042
        } else
6043
#endif
6044
        {
6045
            if (!s->pe || s->vm86)
6046
                goto illegal_op;
6047
            ot = dflag ? OT_LONG : OT_WORD;
6048
            modrm = ldub_code(s->pc++);
6049
            reg = (modrm >> 3) & 7;
6050
            mod = (modrm >> 6) & 3;
6051
            rm = modrm & 7;
6052
            if (mod != 3) {
6053
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6054
                gen_op_ld_T0_A0[ot + s->mem_index]();
6055
            } else {
6056
                gen_op_mov_TN_reg[ot][0][rm]();
6057
            }
6058
            if (s->cc_op != CC_OP_DYNAMIC)
6059
                gen_op_set_cc_op(s->cc_op);
6060
            gen_op_arpl();
6061
            s->cc_op = CC_OP_EFLAGS;
6062
            if (mod != 3) {
6063
                gen_op_st_T0_A0[ot + s->mem_index]();
6064
            } else {
6065
                gen_op_mov_reg_T0[ot][rm]();
6066
            }
6067
            gen_op_arpl_update();
6068
        }
6069
        break;
6070
    case 0x102: /* lar */
6071
    case 0x103: /* lsl */
6072
        if (!s->pe || s->vm86)
6073
            goto illegal_op;
6074
        ot = dflag ? OT_LONG : OT_WORD;
6075
        modrm = ldub_code(s->pc++);
6076
        reg = ((modrm >> 3) & 7) | rex_r;
6077
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6078
        gen_op_mov_TN_reg[ot][1][reg]();
6079
        if (s->cc_op != CC_OP_DYNAMIC)
6080
            gen_op_set_cc_op(s->cc_op);
6081
        if (b == 0x102)
6082
            gen_op_lar();
6083
        else
6084
            gen_op_lsl();
6085
        s->cc_op = CC_OP_EFLAGS;
6086
        gen_op_mov_reg_T1[ot][reg]();
6087
        break;
6088
    case 0x118:
6089
        modrm = ldub_code(s->pc++);
6090
        mod = (modrm >> 6) & 3;
6091
        op = (modrm >> 3) & 7;
6092
        switch(op) {
6093
        case 0: /* prefetchnta */
6094
        case 1: /* prefetchnt0 */
6095
        case 2: /* prefetchnt0 */
6096
        case 3: /* prefetchnt0 */
6097
            if (mod == 3)
6098
                goto illegal_op;
6099
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6100
            /* nothing more to do */
6101
            break;
6102
        default: /* nop (multi byte) */
6103
            gen_nop_modrm(s, modrm);
6104
            break;
6105
        }
6106
        break;
6107
    case 0x119 ... 0x11f: /* nop (multi byte) */
6108
        modrm = ldub_code(s->pc++);
6109
        gen_nop_modrm(s, modrm);
6110
        break;
6111
    case 0x120: /* mov reg, crN */
6112
    case 0x122: /* mov crN, reg */
6113
        if (s->cpl != 0) {
6114
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6115
        } else {
6116
            modrm = ldub_code(s->pc++);
6117
            if ((modrm & 0xc0) != 0xc0)
6118
                goto illegal_op;
6119
            rm = (modrm & 7) | REX_B(s);
6120
            reg = ((modrm >> 3) & 7) | rex_r;
6121
            if (CODE64(s))
6122
                ot = OT_QUAD;
6123
            else
6124
                ot = OT_LONG;
6125
            switch(reg) {
6126
            case 0:
6127
            case 2:
6128
            case 3:
6129
            case 4:
6130
            case 8:
6131
                if (b & 2) {
6132
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6133
                    gen_op_mov_TN_reg[ot][0][rm]();
6134
                    gen_op_movl_crN_T0(reg);
6135
                    gen_jmp_im(s->pc - s->cs_base);
6136
                    gen_eob(s);
6137
                } else {
6138
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6139
#if !defined(CONFIG_USER_ONLY)
6140
                    if (reg == 8)
6141
                        gen_op_movtl_T0_cr8();
6142
                    else
6143
#endif
6144
                        gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6145
                    gen_op_mov_reg_T0[ot][rm]();
6146
                }
6147
                break;
6148
            default:
6149
                goto illegal_op;
6150
            }
6151
        }
6152
        break;
6153
    case 0x121: /* mov reg, drN */
6154
    case 0x123: /* mov drN, reg */
6155
        if (s->cpl != 0) {
6156
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6157
        } else {
6158
            modrm = ldub_code(s->pc++);
6159
            if ((modrm & 0xc0) != 0xc0)
6160
                goto illegal_op;
6161
            rm = (modrm & 7) | REX_B(s);
6162
            reg = ((modrm >> 3) & 7) | rex_r;
6163
            if (CODE64(s))
6164
                ot = OT_QUAD;
6165
            else
6166
                ot = OT_LONG;
6167
            /* XXX: do it dynamically with CR4.DE bit */
6168
            if (reg == 4 || reg == 5 || reg >= 8)
6169
                goto illegal_op;
6170
            if (b & 2) {
6171
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6172
                gen_op_mov_TN_reg[ot][0][rm]();
6173
                gen_op_movl_drN_T0(reg);
6174
                gen_jmp_im(s->pc - s->cs_base);
6175
                gen_eob(s);
6176
            } else {
6177
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6178
                gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6179
                gen_op_mov_reg_T0[ot][rm]();
6180
            }
6181
        }
6182
        break;
6183
    case 0x106: /* clts */
6184
        if (s->cpl != 0) {
6185
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6186
        } else {
6187
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6188
            gen_op_clts();
6189
            /* abort block because static cpu state changed */
6190
            gen_jmp_im(s->pc - s->cs_base);
6191
            gen_eob(s);
6192
        }
6193
        break;
6194
    /* MMX/SSE/SSE2/PNI support */
6195
    case 0x1c3: /* MOVNTI reg, mem */
6196
        if (!(s->cpuid_features & CPUID_SSE2))
6197
            goto illegal_op;
6198
        ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6199
        modrm = ldub_code(s->pc++);
6200
        mod = (modrm >> 6) & 3;
6201
        if (mod == 3)
6202
            goto illegal_op;
6203
        reg = ((modrm >> 3) & 7) | rex_r;
6204
        /* generate a generic store */
6205
        gen_ldst_modrm(s, modrm, ot, reg, 1);
6206
        break;
6207
    case 0x1ae:
6208
        modrm = ldub_code(s->pc++);
6209
        mod = (modrm >> 6) & 3;
6210
        op = (modrm >> 3) & 7;
6211
        switch(op) {
6212
        case 0: /* fxsave */
6213
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6214
                (s->flags & HF_EM_MASK))
6215
                goto illegal_op;
6216
            if (s->flags & HF_TS_MASK) {
6217
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6218
                break;
6219
            }
6220
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6221
            gen_op_fxsave_A0((s->dflag == 2));
6222
            break;
6223
        case 1: /* fxrstor */
6224
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6225
                (s->flags & HF_EM_MASK))
6226
                goto illegal_op;
6227
            if (s->flags & HF_TS_MASK) {
6228
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6229
                break;
6230
            }
6231
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6232
            gen_op_fxrstor_A0((s->dflag == 2));
6233
            break;
6234
        case 2: /* ldmxcsr */
6235
        case 3: /* stmxcsr */
6236
            if (s->flags & HF_TS_MASK) {
6237
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6238
                break;
6239
            }
6240
            if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6241
                mod == 3)
6242
                goto illegal_op;
6243
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6244
            if (op == 2) {
6245
                gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
6246
                gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6247
            } else {
6248
                gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6249
                gen_op_st_T0_A0[OT_LONG + s->mem_index]();
6250
            }
6251
            break;
6252
        case 5: /* lfence */
6253
        case 6: /* mfence */
6254
            if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6255
                goto illegal_op;
6256
            break;
6257
        case 7: /* sfence / clflush */
6258
            if ((modrm & 0xc7) == 0xc0) {
6259
                /* sfence */
6260
                if (!(s->cpuid_features & CPUID_SSE))
6261
                    goto illegal_op;
6262
            } else {
6263
                /* clflush */
6264
                if (!(s->cpuid_features & CPUID_CLFLUSH))
6265
                    goto illegal_op;
6266
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6267
            }
6268
            break;
6269
        default:
6270
            goto illegal_op;
6271
        }
6272
        break;
6273
    case 0x10d: /* prefetch */
6274
        modrm = ldub_code(s->pc++);
6275
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6276
        /* ignore for now */
6277
        break;
6278
    case 0x1aa: /* rsm */
6279
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6280
            break;
6281
        if (!(s->flags & HF_SMM_MASK))
6282
            goto illegal_op;
6283
        if (s->cc_op != CC_OP_DYNAMIC) {
6284
            gen_op_set_cc_op(s->cc_op);
6285
            s->cc_op = CC_OP_DYNAMIC;
6286
        }
6287
        gen_jmp_im(s->pc - s->cs_base);
6288
        gen_op_rsm();
6289
        gen_eob(s);
6290
        break;
6291
    case 0x110 ... 0x117:
6292
    case 0x128 ... 0x12f:
6293
    case 0x150 ... 0x177:
6294
    case 0x17c ... 0x17f:
6295
    case 0x1c2:
6296
    case 0x1c4 ... 0x1c6:
6297
    case 0x1d0 ... 0x1fe:
6298
        gen_sse(s, b, pc_start, rex_r);
6299
        break;
6300
    default:
6301
        goto illegal_op;
6302
    }
6303
    /* lock generation */
6304
    if (s->prefix & PREFIX_LOCK)
6305
        gen_op_unlock();
6306
    return s->pc;
6307
 illegal_op:
6308
    if (s->prefix & PREFIX_LOCK)
6309
        gen_op_unlock();
6310
    /* XXX: ensure that no lock was generated */
6311
    gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6312
    return s->pc;
6313
}
6314

    
6315
#define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6316
#define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6317

    
6318
/* flags read by an operation */
6319
static uint16_t opc_read_flags[NB_OPS] = {
6320
    [INDEX_op_aas] = CC_A,
6321
    [INDEX_op_aaa] = CC_A,
6322
    [INDEX_op_das] = CC_A | CC_C,
6323
    [INDEX_op_daa] = CC_A | CC_C,
6324

    
6325
    /* subtle: due to the incl/decl implementation, C is used */
6326
    [INDEX_op_update_inc_cc] = CC_C,
6327

    
6328
    [INDEX_op_into] = CC_O,
6329

    
6330
    [INDEX_op_jb_subb] = CC_C,
6331
    [INDEX_op_jb_subw] = CC_C,
6332
    [INDEX_op_jb_subl] = CC_C,
6333

    
6334
    [INDEX_op_jz_subb] = CC_Z,
6335
    [INDEX_op_jz_subw] = CC_Z,
6336
    [INDEX_op_jz_subl] = CC_Z,
6337

    
6338
    [INDEX_op_jbe_subb] = CC_Z | CC_C,
6339
    [INDEX_op_jbe_subw] = CC_Z | CC_C,
6340
    [INDEX_op_jbe_subl] = CC_Z | CC_C,
6341

    
6342
    [INDEX_op_js_subb] = CC_S,
6343
    [INDEX_op_js_subw] = CC_S,
6344
    [INDEX_op_js_subl] = CC_S,
6345

    
6346
    [INDEX_op_jl_subb] = CC_O | CC_S,
6347
    [INDEX_op_jl_subw] = CC_O | CC_S,
6348
    [INDEX_op_jl_subl] = CC_O | CC_S,
6349

    
6350
    [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6351
    [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6352
    [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6353

    
6354
    [INDEX_op_loopnzw] = CC_Z,
6355
    [INDEX_op_loopnzl] = CC_Z,
6356
    [INDEX_op_loopzw] = CC_Z,
6357
    [INDEX_op_loopzl] = CC_Z,
6358

    
6359
    [INDEX_op_seto_T0_cc] = CC_O,
6360
    [INDEX_op_setb_T0_cc] = CC_C,
6361
    [INDEX_op_setz_T0_cc] = CC_Z,
6362
    [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6363
    [INDEX_op_sets_T0_cc] = CC_S,
6364
    [INDEX_op_setp_T0_cc] = CC_P,
6365
    [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6366
    [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6367

    
6368
    [INDEX_op_setb_T0_subb] = CC_C,
6369
    [INDEX_op_setb_T0_subw] = CC_C,
6370
    [INDEX_op_setb_T0_subl] = CC_C,
6371

    
6372
    [INDEX_op_setz_T0_subb] = CC_Z,
6373
    [INDEX_op_setz_T0_subw] = CC_Z,
6374
    [INDEX_op_setz_T0_subl] = CC_Z,
6375

    
6376
    [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6377
    [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6378
    [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6379

    
6380
    [INDEX_op_sets_T0_subb] = CC_S,
6381
    [INDEX_op_sets_T0_subw] = CC_S,
6382
    [INDEX_op_sets_T0_subl] = CC_S,
6383

    
6384
    [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6385
    [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6386
    [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6387

    
6388
    [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6389
    [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6390
    [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6391

    
6392
    [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6393
    [INDEX_op_cmc] = CC_C,
6394
    [INDEX_op_salc] = CC_C,
6395

    
6396
    /* needed for correct flag optimisation before string ops */
6397
    [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6398
    [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6399
    [INDEX_op_jz_ecxw] = CC_OSZAPC,
6400
    [INDEX_op_jz_ecxl] = CC_OSZAPC,
6401

    
6402
#ifdef TARGET_X86_64
6403
    [INDEX_op_jb_subq] = CC_C,
6404
    [INDEX_op_jz_subq] = CC_Z,
6405
    [INDEX_op_jbe_subq] = CC_Z | CC_C,
6406
    [INDEX_op_js_subq] = CC_S,
6407
    [INDEX_op_jl_subq] = CC_O | CC_S,
6408
    [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6409

    
6410
    [INDEX_op_loopnzq] = CC_Z,
6411
    [INDEX_op_loopzq] = CC_Z,
6412

    
6413
    [INDEX_op_setb_T0_subq] = CC_C,
6414
    [INDEX_op_setz_T0_subq] = CC_Z,
6415
    [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6416
    [INDEX_op_sets_T0_subq] = CC_S,
6417
    [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6418
    [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6419

    
6420
    [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6421
    [INDEX_op_jz_ecxq] = CC_OSZAPC,
6422
#endif
6423

    
6424
#define DEF_READF(SUFFIX)\
6425
    [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6426
    [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6427
    [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6428
    X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6429
    [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6430
    [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6431
    [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6432
    X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6433
\
6434
    [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6435
    [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6436
    [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6437
    X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6438
    [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6439
    [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6440
    [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6441
    X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6442

    
6443
    DEF_READF( )
6444
    DEF_READF(_raw)
6445
#ifndef CONFIG_USER_ONLY
6446
    DEF_READF(_kernel)
6447
    DEF_READF(_user)
6448
#endif
6449
};
6450

    
6451
/* flags written by an operation */
6452
static uint16_t opc_write_flags[NB_OPS] = {
6453
    [INDEX_op_update2_cc] = CC_OSZAPC,
6454
    [INDEX_op_update1_cc] = CC_OSZAPC,
6455
    [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6456
    [INDEX_op_update_neg_cc] = CC_OSZAPC,
6457
    /* subtle: due to the incl/decl implementation, C is used */
6458
    [INDEX_op_update_inc_cc] = CC_OSZAPC,
6459
    [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6460

    
6461
    [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6462
    [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6463
    [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6464
    X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6465
    [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6466
    [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6467
    [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6468
    X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6469
    [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6470
    [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6471
    X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6472

    
6473
    /* sse */
6474
    [INDEX_op_ucomiss] = CC_OSZAPC,
6475
    [INDEX_op_ucomisd] = CC_OSZAPC,
6476
    [INDEX_op_comiss] = CC_OSZAPC,
6477
    [INDEX_op_comisd] = CC_OSZAPC,
6478

    
6479
    /* bcd */
6480
    [INDEX_op_aam] = CC_OSZAPC,
6481
    [INDEX_op_aad] = CC_OSZAPC,
6482
    [INDEX_op_aas] = CC_OSZAPC,
6483
    [INDEX_op_aaa] = CC_OSZAPC,
6484
    [INDEX_op_das] = CC_OSZAPC,
6485
    [INDEX_op_daa] = CC_OSZAPC,
6486

    
6487
    [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6488
    [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6489
    [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6490
    [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6491
    [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6492
    [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6493
    [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6494
    [INDEX_op_clc] = CC_C,
6495
    [INDEX_op_stc] = CC_C,
6496
    [INDEX_op_cmc] = CC_C,
6497

    
6498
    [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6499
    [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6500
    X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6501
    [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6502
    [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6503
    X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6504
    [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6505
    [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6506
    X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6507
    [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6508
    [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6509
    X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6510

    
6511
    [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6512
    [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6513
    X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6514
    [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6515
    [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6516
    X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6517

    
6518
    [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6519
    [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6520
    [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6521
    X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6522

    
6523
    [INDEX_op_cmpxchg8b] = CC_Z,
6524
    [INDEX_op_lar] = CC_Z,
6525
    [INDEX_op_lsl] = CC_Z,
6526
    [INDEX_op_verr] = CC_Z,
6527
    [INDEX_op_verw] = CC_Z,
6528
    [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6529
    [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6530

    
6531
#define DEF_WRITEF(SUFFIX)\
6532
    [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6533
    [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6534
    [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6535
    X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6536
    [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6537
    [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6538
    [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6539
    X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6540
\
6541
    [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6542
    [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6543
    [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6544
    X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6545
    [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6546
    [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6547
    [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6548
    X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6549
\
6550
    [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6551
    [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6552
    [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6553
    X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6554
    [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6555
    [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6556
    [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6557
    X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6558
\
6559
    [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6560
    [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6561
    [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6562
    X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6563
\
6564
    [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6565
    [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6566
    [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6567
    X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6568
\
6569
    [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6570
    [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6571
    [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6572
    X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6573
\
6574
    [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6575
    [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6576
    X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6577
    [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6578
    [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6579
    X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6580
\
6581
    [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6582
    [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6583
    X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6584
    [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6585
    [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6586
    X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6587
\
6588
    [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6589
    [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6590
    [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6591
    X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6592

    
6593

    
6594
    DEF_WRITEF( )
6595
    DEF_WRITEF(_raw)
6596
#ifndef CONFIG_USER_ONLY
6597
    DEF_WRITEF(_kernel)
6598
    DEF_WRITEF(_user)
6599
#endif
6600
};
6601

    
6602
/* simpler form of an operation if no flags need to be generated */
6603
static uint16_t opc_simpler[NB_OPS] = {
6604
    [INDEX_op_update2_cc] = INDEX_op_nop,
6605
    [INDEX_op_update1_cc] = INDEX_op_nop,
6606
    [INDEX_op_update_neg_cc] = INDEX_op_nop,
6607
#if 0
6608
    /* broken: CC_OP logic must be rewritten */
6609
    [INDEX_op_update_inc_cc] = INDEX_op_nop,
6610
#endif
6611

    
6612
    [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6613
    [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6614
    [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6615
    X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6616

    
6617
    [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6618
    [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6619
    [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6620
    X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6621

    
6622
    [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6623
    [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6624
    [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6625
    X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6626

    
6627
#define DEF_SIMPLER(SUFFIX)\
6628
    [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6629
    [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6630
    [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6631
    X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6632
\
6633
    [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6634
    [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6635
    [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6636
    X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6637

    
6638
    DEF_SIMPLER( )
6639
    DEF_SIMPLER(_raw)
6640
#ifndef CONFIG_USER_ONLY
6641
    DEF_SIMPLER(_kernel)
6642
    DEF_SIMPLER(_user)
6643
#endif
6644
};
6645

    
6646
void optimize_flags_init(void)
6647
{
6648
    int i;
6649
    /* put default values in arrays */
6650
    for(i = 0; i < NB_OPS; i++) {
6651
        if (opc_simpler[i] == 0)
6652
            opc_simpler[i] = i;
6653
    }
6654
}
6655

    
6656
/* CPU flags computation optimization: we move backward thru the
6657
   generated code to see which flags are needed. The operation is
6658
   modified if suitable */
6659
static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6660
{
6661
    uint16_t *opc_ptr;
6662
    int live_flags, write_flags, op;
6663

    
6664
    opc_ptr = opc_buf + opc_buf_len;
6665
    /* live_flags contains the flags needed by the next instructions
6666
       in the code. At the end of the block, we consider that all the
6667
       flags are live. */
6668
    live_flags = CC_OSZAPC;
6669
    while (opc_ptr > opc_buf) {
6670
        op = *--opc_ptr;
6671
        /* if none of the flags written by the instruction is used,
6672
           then we can try to find a simpler instruction */
6673
        write_flags = opc_write_flags[op];
6674
        if ((live_flags & write_flags) == 0) {
6675
            *opc_ptr = opc_simpler[op];
6676
        }
6677
        /* compute the live flags before the instruction */
6678
        live_flags &= ~write_flags;
6679
        live_flags |= opc_read_flags[op];
6680
    }
6681
}
6682

    
6683
/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6684
   basic block 'tb'. If search_pc is TRUE, also generate PC
6685
   information for each intermediate instruction. */
6686
static inline int gen_intermediate_code_internal(CPUState *env,
6687
                                                 TranslationBlock *tb,
6688
                                                 int search_pc)
6689
{
6690
    DisasContext dc1, *dc = &dc1;
6691
    target_ulong pc_ptr;
6692
    uint16_t *gen_opc_end;
6693
    int j, lj, cflags;
6694
    uint64_t flags;
6695
    target_ulong pc_start;
6696
    target_ulong cs_base;
6697

    
6698
    /* generate intermediate code */
6699
    pc_start = tb->pc;
6700
    cs_base = tb->cs_base;
6701
    flags = tb->flags;
6702
    cflags = tb->cflags;
6703

    
6704
    dc->pe = (flags >> HF_PE_SHIFT) & 1;
6705
    dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6706
    dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6707
    dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6708
    dc->f_st = 0;
6709
    dc->vm86 = (flags >> VM_SHIFT) & 1;
6710
    dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6711
    dc->iopl = (flags >> IOPL_SHIFT) & 3;
6712
    dc->tf = (flags >> TF_SHIFT) & 1;
6713
    dc->singlestep_enabled = env->singlestep_enabled;
6714
    dc->cc_op = CC_OP_DYNAMIC;
6715
    dc->cs_base = cs_base;
6716
    dc->tb = tb;
6717
    dc->popl_esp_hack = 0;
6718
    /* select memory access functions */
6719
    dc->mem_index = 0;
6720
    if (flags & HF_SOFTMMU_MASK) {
6721
        if (dc->cpl == 3)
6722
            dc->mem_index = 2 * 4;
6723
        else
6724
            dc->mem_index = 1 * 4;
6725
    }
6726
    dc->cpuid_features = env->cpuid_features;
6727
    dc->cpuid_ext_features = env->cpuid_ext_features;
6728
#ifdef TARGET_X86_64
6729
    dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6730
    dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6731
#endif
6732
    dc->flags = flags;
6733
    dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6734
                    (flags & HF_INHIBIT_IRQ_MASK)
6735
#ifndef CONFIG_SOFTMMU
6736
                    || (flags & HF_SOFTMMU_MASK)
6737
#endif
6738
                    );
6739
#if 0
6740
    /* check addseg logic */
6741
    if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6742
        printf("ERROR addseg\n");
6743
#endif
6744

    
6745
    gen_opc_ptr = gen_opc_buf;
6746
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6747
    gen_opparam_ptr = gen_opparam_buf;
6748
    nb_gen_labels = 0;
6749

    
6750
    dc->is_jmp = DISAS_NEXT;
6751
    pc_ptr = pc_start;
6752
    lj = -1;
6753

    
6754
    for(;;) {
6755
        if (env->nb_breakpoints > 0) {
6756
            for(j = 0; j < env->nb_breakpoints; j++) {
6757
                if (env->breakpoints[j] == pc_ptr) {
6758
                    gen_debug(dc, pc_ptr - dc->cs_base);
6759
                    break;
6760
                }
6761
            }
6762
        }
6763
        if (search_pc) {
6764
            j = gen_opc_ptr - gen_opc_buf;
6765
            if (lj < j) {
6766
                lj++;
6767
                while (lj < j)
6768
                    gen_opc_instr_start[lj++] = 0;
6769
            }
6770
            gen_opc_pc[lj] = pc_ptr;
6771
            gen_opc_cc_op[lj] = dc->cc_op;
6772
            gen_opc_instr_start[lj] = 1;
6773
        }
6774
        pc_ptr = disas_insn(dc, pc_ptr);
6775
        /* stop translation if indicated */
6776
        if (dc->is_jmp)
6777
            break;
6778
        /* if single step mode, we generate only one instruction and
6779
           generate an exception */
6780
        /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6781
           the flag and abort the translation to give the irqs a
6782
           change to be happen */
6783
        if (dc->tf || dc->singlestep_enabled ||
6784
            (flags & HF_INHIBIT_IRQ_MASK) ||
6785
            (cflags & CF_SINGLE_INSN)) {
6786
            gen_jmp_im(pc_ptr - dc->cs_base);
6787
            gen_eob(dc);
6788
            break;
6789
        }
6790
        /* if too long translation, stop generation too */
6791
        if (gen_opc_ptr >= gen_opc_end ||
6792
            (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6793
            gen_jmp_im(pc_ptr - dc->cs_base);
6794
            gen_eob(dc);
6795
            break;
6796
        }
6797
    }
6798
    *gen_opc_ptr = INDEX_op_end;
6799
    /* we don't forget to fill the last values */
6800
    if (search_pc) {
6801
        j = gen_opc_ptr - gen_opc_buf;
6802
        lj++;
6803
        while (lj <= j)
6804
            gen_opc_instr_start[lj++] = 0;
6805
    }
6806

    
6807
#ifdef DEBUG_DISAS
6808
    if (loglevel & CPU_LOG_TB_CPU) {
6809
        cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6810
    }
6811
    if (loglevel & CPU_LOG_TB_IN_ASM) {
6812
        int disas_flags;
6813
        fprintf(logfile, "----------------\n");
6814
        fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6815
#ifdef TARGET_X86_64
6816
        if (dc->code64)
6817
            disas_flags = 2;
6818
        else
6819
#endif
6820
            disas_flags = !dc->code32;
6821
        target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6822
        fprintf(logfile, "\n");
6823
        if (loglevel & CPU_LOG_TB_OP) {
6824
            fprintf(logfile, "OP:\n");
6825
            dump_ops(gen_opc_buf, gen_opparam_buf);
6826
            fprintf(logfile, "\n");
6827
        }
6828
    }
6829
#endif
6830

    
6831
    /* optimize flag computations */
6832
    optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6833

    
6834
#ifdef DEBUG_DISAS
6835
    if (loglevel & CPU_LOG_TB_OP_OPT) {
6836
        fprintf(logfile, "AFTER FLAGS OPT:\n");
6837
        dump_ops(gen_opc_buf, gen_opparam_buf);
6838
        fprintf(logfile, "\n");
6839
    }
6840
#endif
6841
    if (!search_pc)
6842
        tb->size = pc_ptr - pc_start;
6843
    return 0;
6844
}
6845

    
6846
int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6847
{
6848
    return gen_intermediate_code_internal(env, tb, 0);
6849
}
6850

    
6851
int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6852
{
6853
    return gen_intermediate_code_internal(env, tb, 1);
6854
}
6855