Statistics
| Branch: | Revision:

root / target-i386 / translate.c @ 3d7374c5

History | View | Annotate | Download (194.6 kB)

1
/*
2
 *  i386 translation
3
 * 
4
 *  Copyright (c) 2003 Fabrice Bellard
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20
#include <stdarg.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23
#include <string.h>
24
#include <inttypes.h>
25
#include <signal.h>
26
#include <assert.h>
27

    
28
#include "cpu.h"
29
#include "exec-all.h"
30
#include "disas.h"
31

    
32
/* XXX: move that elsewhere */
33
static uint16_t *gen_opc_ptr;
34
static uint32_t *gen_opparam_ptr;
35

    
36
#define PREFIX_REPZ   0x01
37
#define PREFIX_REPNZ  0x02
38
#define PREFIX_LOCK   0x04
39
#define PREFIX_DATA   0x08
40
#define PREFIX_ADR    0x10
41

    
42
#ifdef TARGET_X86_64
43
#define X86_64_ONLY(x) x
44
#define X86_64_DEF(x...) x
45
#define CODE64(s) ((s)->code64)
46
#define REX_X(s) ((s)->rex_x)
47
#define REX_B(s) ((s)->rex_b)
48
/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
49
#if 1
50
#define BUGGY_64(x) NULL
51
#endif
52
#else
53
#define X86_64_ONLY(x) NULL
54
#define X86_64_DEF(x...)
55
#define CODE64(s) 0
56
#define REX_X(s) 0
57
#define REX_B(s) 0
58
#endif
59

    
60
#ifdef TARGET_X86_64
61
static int x86_64_hregs;
62
#endif
63

    
64
#ifdef USE_DIRECT_JUMP
65
#define TBPARAM(x)
66
#else
67
#define TBPARAM(x) (long)(x)
68
#endif
69

    
70
typedef struct DisasContext {
71
    /* current insn context */
72
    int override; /* -1 if no override */
73
    int prefix;
74
    int aflag, dflag;
75
    target_ulong pc; /* pc = eip + cs_base */
76
    int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
77
                   static state change (stop translation) */
78
    /* current block context */
79
    target_ulong cs_base; /* base of CS segment */
80
    int pe;     /* protected mode */
81
    int code32; /* 32 bit code segment */
82
#ifdef TARGET_X86_64
83
    int lma;    /* long mode active */
84
    int code64; /* 64 bit code segment */
85
    int rex_x, rex_b;
86
#endif
87
    int ss32;   /* 32 bit stack segment */
88
    int cc_op;  /* current CC operation */
89
    int addseg; /* non zero if either DS/ES/SS have a non zero base */
90
    int f_st;   /* currently unused */
91
    int vm86;   /* vm86 mode */
92
    int cpl;
93
    int iopl;
94
    int tf;     /* TF cpu flag */
95
    int singlestep_enabled; /* "hardware" single step enabled */
96
    int jmp_opt; /* use direct block chaining for direct jumps */
97
    int mem_index; /* select memory access functions */
98
    int flags; /* all execution flags */
99
    struct TranslationBlock *tb;
100
    int popl_esp_hack; /* for correct popl with esp base handling */
101
    int rip_offset; /* only used in x86_64, but left for simplicity */
102
    int cpuid_features;
103
    int cpuid_ext_features;
104
} DisasContext;
105

    
106
static void gen_eob(DisasContext *s);
107
static void gen_jmp(DisasContext *s, target_ulong eip);
108
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
109

    
110
/* i386 arith/logic operations */
111
enum {
112
    OP_ADDL, 
113
    OP_ORL, 
114
    OP_ADCL, 
115
    OP_SBBL,
116
    OP_ANDL, 
117
    OP_SUBL, 
118
    OP_XORL, 
119
    OP_CMPL,
120
};
121

    
122
/* i386 shift ops */
123
enum {
124
    OP_ROL, 
125
    OP_ROR, 
126
    OP_RCL, 
127
    OP_RCR, 
128
    OP_SHL, 
129
    OP_SHR, 
130
    OP_SHL1, /* undocumented */
131
    OP_SAR = 7,
132
};
133

    
134
enum {
135
#define DEF(s, n, copy_size) INDEX_op_ ## s,
136
#include "opc.h"
137
#undef DEF
138
    NB_OPS,
139
};
140

    
141
#include "gen-op.h"
142

    
143
/* operand size */
144
enum {
145
    OT_BYTE = 0,
146
    OT_WORD,
147
    OT_LONG, 
148
    OT_QUAD,
149
};
150

    
151
enum {
152
    /* I386 int registers */
153
    OR_EAX,   /* MUST be even numbered */
154
    OR_ECX,
155
    OR_EDX,
156
    OR_EBX,
157
    OR_ESP,
158
    OR_EBP,
159
    OR_ESI,
160
    OR_EDI,
161

    
162
    OR_TMP0 = 16,    /* temporary operand register */
163
    OR_TMP1,
164
    OR_A0, /* temporary register used when doing address evaluation */
165
};
166

    
167
#ifdef TARGET_X86_64
168

    
169
#define NB_OP_SIZES 4
170

    
171
#define DEF_REGS(prefix, suffix) \
172
  prefix ## EAX ## suffix,\
173
  prefix ## ECX ## suffix,\
174
  prefix ## EDX ## suffix,\
175
  prefix ## EBX ## suffix,\
176
  prefix ## ESP ## suffix,\
177
  prefix ## EBP ## suffix,\
178
  prefix ## ESI ## suffix,\
179
  prefix ## EDI ## suffix,\
180
  prefix ## R8 ## suffix,\
181
  prefix ## R9 ## suffix,\
182
  prefix ## R10 ## suffix,\
183
  prefix ## R11 ## suffix,\
184
  prefix ## R12 ## suffix,\
185
  prefix ## R13 ## suffix,\
186
  prefix ## R14 ## suffix,\
187
  prefix ## R15 ## suffix,
188

    
189
#define DEF_BREGS(prefixb, prefixh, suffix)             \
190
                                                        \
191
static void prefixb ## ESP ## suffix ## _wrapper(void)  \
192
{                                                       \
193
    if (x86_64_hregs)                                 \
194
        prefixb ## ESP ## suffix ();                    \
195
    else                                                \
196
        prefixh ## EAX ## suffix ();                    \
197
}                                                       \
198
                                                        \
199
static void prefixb ## EBP ## suffix ## _wrapper(void)  \
200
{                                                       \
201
    if (x86_64_hregs)                                 \
202
        prefixb ## EBP ## suffix ();                    \
203
    else                                                \
204
        prefixh ## ECX ## suffix ();                    \
205
}                                                       \
206
                                                        \
207
static void prefixb ## ESI ## suffix ## _wrapper(void)  \
208
{                                                       \
209
    if (x86_64_hregs)                                 \
210
        prefixb ## ESI ## suffix ();                    \
211
    else                                                \
212
        prefixh ## EDX ## suffix ();                    \
213
}                                                       \
214
                                                        \
215
static void prefixb ## EDI ## suffix ## _wrapper(void)  \
216
{                                                       \
217
    if (x86_64_hregs)                                 \
218
        prefixb ## EDI ## suffix ();                    \
219
    else                                                \
220
        prefixh ## EBX ## suffix ();                    \
221
}
222

    
223
DEF_BREGS(gen_op_movb_, gen_op_movh_, _T0)
224
DEF_BREGS(gen_op_movb_, gen_op_movh_, _T1)
225
DEF_BREGS(gen_op_movl_T0_, gen_op_movh_T0_, )
226
DEF_BREGS(gen_op_movl_T1_, gen_op_movh_T1_, )
227

    
228
#else /* !TARGET_X86_64 */
229

    
230
#define NB_OP_SIZES 3
231

    
232
#define DEF_REGS(prefix, suffix) \
233
  prefix ## EAX ## suffix,\
234
  prefix ## ECX ## suffix,\
235
  prefix ## EDX ## suffix,\
236
  prefix ## EBX ## suffix,\
237
  prefix ## ESP ## suffix,\
238
  prefix ## EBP ## suffix,\
239
  prefix ## ESI ## suffix,\
240
  prefix ## EDI ## suffix,
241

    
242
#endif /* !TARGET_X86_64 */
243

    
244
static GenOpFunc *gen_op_mov_reg_T0[NB_OP_SIZES][CPU_NB_REGS] = {
245
    [OT_BYTE] = {
246
        gen_op_movb_EAX_T0,
247
        gen_op_movb_ECX_T0,
248
        gen_op_movb_EDX_T0,
249
        gen_op_movb_EBX_T0,
250
#ifdef TARGET_X86_64
251
        gen_op_movb_ESP_T0_wrapper,
252
        gen_op_movb_EBP_T0_wrapper,
253
        gen_op_movb_ESI_T0_wrapper,
254
        gen_op_movb_EDI_T0_wrapper,
255
        gen_op_movb_R8_T0,
256
        gen_op_movb_R9_T0,
257
        gen_op_movb_R10_T0,
258
        gen_op_movb_R11_T0,
259
        gen_op_movb_R12_T0,
260
        gen_op_movb_R13_T0,
261
        gen_op_movb_R14_T0,
262
        gen_op_movb_R15_T0,
263
#else
264
        gen_op_movh_EAX_T0,
265
        gen_op_movh_ECX_T0,
266
        gen_op_movh_EDX_T0,
267
        gen_op_movh_EBX_T0,
268
#endif
269
    },
270
    [OT_WORD] = {
271
        DEF_REGS(gen_op_movw_, _T0)
272
    },
273
    [OT_LONG] = {
274
        DEF_REGS(gen_op_movl_, _T0)
275
    },
276
#ifdef TARGET_X86_64
277
    [OT_QUAD] = {
278
        DEF_REGS(gen_op_movq_, _T0)
279
    },
280
#endif
281
};
282

    
283
static GenOpFunc *gen_op_mov_reg_T1[NB_OP_SIZES][CPU_NB_REGS] = {
284
    [OT_BYTE] = {
285
        gen_op_movb_EAX_T1,
286
        gen_op_movb_ECX_T1,
287
        gen_op_movb_EDX_T1,
288
        gen_op_movb_EBX_T1,
289
#ifdef TARGET_X86_64
290
        gen_op_movb_ESP_T1_wrapper,
291
        gen_op_movb_EBP_T1_wrapper,
292
        gen_op_movb_ESI_T1_wrapper,
293
        gen_op_movb_EDI_T1_wrapper,
294
        gen_op_movb_R8_T1,
295
        gen_op_movb_R9_T1,
296
        gen_op_movb_R10_T1,
297
        gen_op_movb_R11_T1,
298
        gen_op_movb_R12_T1,
299
        gen_op_movb_R13_T1,
300
        gen_op_movb_R14_T1,
301
        gen_op_movb_R15_T1,
302
#else
303
        gen_op_movh_EAX_T1,
304
        gen_op_movh_ECX_T1,
305
        gen_op_movh_EDX_T1,
306
        gen_op_movh_EBX_T1,
307
#endif
308
    },
309
    [OT_WORD] = {
310
        DEF_REGS(gen_op_movw_, _T1)
311
    },
312
    [OT_LONG] = {
313
        DEF_REGS(gen_op_movl_, _T1)
314
    },
315
#ifdef TARGET_X86_64
316
    [OT_QUAD] = {
317
        DEF_REGS(gen_op_movq_, _T1)
318
    },
319
#endif
320
};
321

    
322
static GenOpFunc *gen_op_mov_reg_A0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
323
    [0] = {
324
        DEF_REGS(gen_op_movw_, _A0)
325
    },
326
    [1] = {
327
        DEF_REGS(gen_op_movl_, _A0)
328
    },
329
#ifdef TARGET_X86_64
330
    [2] = {
331
        DEF_REGS(gen_op_movq_, _A0)
332
    },
333
#endif
334
};
335

    
336
static GenOpFunc *gen_op_mov_TN_reg[NB_OP_SIZES][2][CPU_NB_REGS] = 
337
{
338
    [OT_BYTE] = {
339
        {
340
            gen_op_movl_T0_EAX,
341
            gen_op_movl_T0_ECX,
342
            gen_op_movl_T0_EDX,
343
            gen_op_movl_T0_EBX,
344
#ifdef TARGET_X86_64
345
            gen_op_movl_T0_ESP_wrapper,
346
            gen_op_movl_T0_EBP_wrapper,
347
            gen_op_movl_T0_ESI_wrapper,
348
            gen_op_movl_T0_EDI_wrapper,
349
            gen_op_movl_T0_R8,
350
            gen_op_movl_T0_R9,
351
            gen_op_movl_T0_R10,
352
            gen_op_movl_T0_R11,
353
            gen_op_movl_T0_R12,
354
            gen_op_movl_T0_R13,
355
            gen_op_movl_T0_R14,
356
            gen_op_movl_T0_R15,
357
#else
358
            gen_op_movh_T0_EAX,
359
            gen_op_movh_T0_ECX,
360
            gen_op_movh_T0_EDX,
361
            gen_op_movh_T0_EBX,
362
#endif
363
        },
364
        {
365
            gen_op_movl_T1_EAX,
366
            gen_op_movl_T1_ECX,
367
            gen_op_movl_T1_EDX,
368
            gen_op_movl_T1_EBX,
369
#ifdef TARGET_X86_64
370
            gen_op_movl_T1_ESP_wrapper,
371
            gen_op_movl_T1_EBP_wrapper,
372
            gen_op_movl_T1_ESI_wrapper,
373
            gen_op_movl_T1_EDI_wrapper,
374
            gen_op_movl_T1_R8,
375
            gen_op_movl_T1_R9,
376
            gen_op_movl_T1_R10,
377
            gen_op_movl_T1_R11,
378
            gen_op_movl_T1_R12,
379
            gen_op_movl_T1_R13,
380
            gen_op_movl_T1_R14,
381
            gen_op_movl_T1_R15,
382
#else
383
            gen_op_movh_T1_EAX,
384
            gen_op_movh_T1_ECX,
385
            gen_op_movh_T1_EDX,
386
            gen_op_movh_T1_EBX,
387
#endif
388
        },
389
    },
390
    [OT_WORD] = {
391
        {
392
            DEF_REGS(gen_op_movl_T0_, )
393
        },
394
        {
395
            DEF_REGS(gen_op_movl_T1_, )
396
        },
397
    },
398
    [OT_LONG] = {
399
        {
400
            DEF_REGS(gen_op_movl_T0_, )
401
        },
402
        {
403
            DEF_REGS(gen_op_movl_T1_, )
404
        },
405
    },
406
#ifdef TARGET_X86_64
407
    [OT_QUAD] = {
408
        {
409
            DEF_REGS(gen_op_movl_T0_, )
410
        },
411
        {
412
            DEF_REGS(gen_op_movl_T1_, )
413
        },
414
    },
415
#endif
416
};
417

    
418
static GenOpFunc *gen_op_movl_A0_reg[CPU_NB_REGS] = {
419
    DEF_REGS(gen_op_movl_A0_, )
420
};
421

    
422
static GenOpFunc *gen_op_addl_A0_reg_sN[4][CPU_NB_REGS] = {
423
    [0] = {
424
        DEF_REGS(gen_op_addl_A0_, )
425
    },
426
    [1] = {
427
        DEF_REGS(gen_op_addl_A0_, _s1)
428
    },
429
    [2] = {
430
        DEF_REGS(gen_op_addl_A0_, _s2)
431
    },
432
    [3] = {
433
        DEF_REGS(gen_op_addl_A0_, _s3)
434
    },
435
};
436

    
437
#ifdef TARGET_X86_64
438
static GenOpFunc *gen_op_movq_A0_reg[CPU_NB_REGS] = {
439
    DEF_REGS(gen_op_movq_A0_, )
440
};
441

    
442
static GenOpFunc *gen_op_addq_A0_reg_sN[4][CPU_NB_REGS] = {
443
    [0] = {
444
        DEF_REGS(gen_op_addq_A0_, )
445
    },
446
    [1] = {
447
        DEF_REGS(gen_op_addq_A0_, _s1)
448
    },
449
    [2] = {
450
        DEF_REGS(gen_op_addq_A0_, _s2)
451
    },
452
    [3] = {
453
        DEF_REGS(gen_op_addq_A0_, _s3)
454
    },
455
};
456
#endif
457

    
458
static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
459
    [0] = {
460
        DEF_REGS(gen_op_cmovw_, _T1_T0)
461
    },
462
    [1] = {
463
        DEF_REGS(gen_op_cmovl_, _T1_T0)
464
    },
465
#ifdef TARGET_X86_64
466
    [2] = {
467
        DEF_REGS(gen_op_cmovq_, _T1_T0)
468
    },
469
#endif
470
};
471

    
472
static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
473
    NULL,
474
    gen_op_orl_T0_T1,
475
    NULL,
476
    NULL,
477
    gen_op_andl_T0_T1,
478
    NULL,
479
    gen_op_xorl_T0_T1,
480
    NULL,
481
};
482

    
483
#define DEF_ARITHC(SUFFIX)\
484
    {\
485
        gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
486
        gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
487
    },\
488
    {\
489
        gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
490
        gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
491
    },\
492
    {\
493
        gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
494
        gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
495
    },\
496
    {\
497
        X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
498
        X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
499
    },
500

    
501
static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
502
    DEF_ARITHC( )
503
};
504

    
505
static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
506
    DEF_ARITHC(_raw)
507
#ifndef CONFIG_USER_ONLY
508
    DEF_ARITHC(_kernel)
509
    DEF_ARITHC(_user)
510
#endif
511
};
512

    
513
static const int cc_op_arithb[8] = {
514
    CC_OP_ADDB,
515
    CC_OP_LOGICB,
516
    CC_OP_ADDB,
517
    CC_OP_SUBB,
518
    CC_OP_LOGICB,
519
    CC_OP_SUBB,
520
    CC_OP_LOGICB,
521
    CC_OP_SUBB,
522
};
523

    
524
#define DEF_CMPXCHG(SUFFIX)\
525
    gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
526
    gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
527
    gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
528
    X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
529

    
530
static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
531
    DEF_CMPXCHG( )
532
};
533

    
534
static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
535
    DEF_CMPXCHG(_raw)
536
#ifndef CONFIG_USER_ONLY
537
    DEF_CMPXCHG(_kernel)
538
    DEF_CMPXCHG(_user)
539
#endif
540
};
541

    
542
#define DEF_SHIFT(SUFFIX)\
543
    {\
544
        gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
545
        gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
546
        gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
547
        gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
548
        gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
549
        gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
550
        gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
551
        gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
552
    },\
553
    {\
554
        gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
555
        gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
556
        gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
557
        gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
558
        gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
559
        gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
560
        gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
561
        gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
562
    },\
563
    {\
564
        gen_op_roll ## SUFFIX ## _T0_T1_cc,\
565
        gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
566
        gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
567
        gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
568
        gen_op_shll ## SUFFIX ## _T0_T1_cc,\
569
        gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
570
        gen_op_shll ## SUFFIX ## _T0_T1_cc,\
571
        gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
572
    },\
573
    {\
574
        X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
575
        X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
576
        X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
577
        X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
578
        X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
579
        X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
580
        X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
581
        X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
582
    },
583

    
584
static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
585
    DEF_SHIFT( )
586
};
587

    
588
static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
589
    DEF_SHIFT(_raw)
590
#ifndef CONFIG_USER_ONLY
591
    DEF_SHIFT(_kernel)
592
    DEF_SHIFT(_user)
593
#endif
594
};
595

    
596
#define DEF_SHIFTD(SUFFIX, op)\
597
    {\
598
        NULL,\
599
        NULL,\
600
    },\
601
    {\
602
        gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
603
        gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
604
     },\
605
    {\
606
        gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
607
        gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
608
    },\
609
    {\
610
X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
611
           gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
612
    },
613

    
614
static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
615
    DEF_SHIFTD(, im)
616
};
617

    
618
static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
619
    DEF_SHIFTD(, ECX)
620
};
621

    
622
static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
623
    DEF_SHIFTD(_raw, im)
624
#ifndef CONFIG_USER_ONLY
625
    DEF_SHIFTD(_kernel, im)
626
    DEF_SHIFTD(_user, im)
627
#endif
628
};
629

    
630
static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
631
    DEF_SHIFTD(_raw, ECX)
632
#ifndef CONFIG_USER_ONLY
633
    DEF_SHIFTD(_kernel, ECX)
634
    DEF_SHIFTD(_user, ECX)
635
#endif
636
};
637

    
638
static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
639
    [0] = {
640
        gen_op_btw_T0_T1_cc,
641
        gen_op_btsw_T0_T1_cc,
642
        gen_op_btrw_T0_T1_cc,
643
        gen_op_btcw_T0_T1_cc,
644
    },
645
    [1] = {
646
        gen_op_btl_T0_T1_cc,
647
        gen_op_btsl_T0_T1_cc,
648
        gen_op_btrl_T0_T1_cc,
649
        gen_op_btcl_T0_T1_cc,
650
    },
651
#ifdef TARGET_X86_64
652
    [2] = {
653
        gen_op_btq_T0_T1_cc,
654
        gen_op_btsq_T0_T1_cc,
655
        gen_op_btrq_T0_T1_cc,
656
        gen_op_btcq_T0_T1_cc,
657
    },
658
#endif
659
};
660

    
661
static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
662
    gen_op_add_bitw_A0_T1,
663
    gen_op_add_bitl_A0_T1,
664
    X86_64_ONLY(gen_op_add_bitq_A0_T1),
665
};
666

    
667
static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
668
    [0] = {
669
        gen_op_bsfw_T0_cc,
670
        gen_op_bsrw_T0_cc,
671
    },
672
    [1] = {
673
        gen_op_bsfl_T0_cc,
674
        gen_op_bsrl_T0_cc,
675
    },
676
#ifdef TARGET_X86_64
677
    [2] = {
678
        gen_op_bsfq_T0_cc,
679
        gen_op_bsrq_T0_cc,
680
    },
681
#endif
682
};
683

    
684
static GenOpFunc *gen_op_lds_T0_A0[3 * 4] = {
685
    gen_op_ldsb_raw_T0_A0,
686
    gen_op_ldsw_raw_T0_A0,
687
    X86_64_ONLY(gen_op_ldsl_raw_T0_A0),
688
    NULL,
689
#ifndef CONFIG_USER_ONLY
690
    gen_op_ldsb_kernel_T0_A0,
691
    gen_op_ldsw_kernel_T0_A0,
692
    X86_64_ONLY(gen_op_ldsl_kernel_T0_A0),
693
    NULL,
694

    
695
    gen_op_ldsb_user_T0_A0,
696
    gen_op_ldsw_user_T0_A0,
697
    X86_64_ONLY(gen_op_ldsl_user_T0_A0),
698
    NULL,
699
#endif
700
};
701

    
702
static GenOpFunc *gen_op_ldu_T0_A0[3 * 4] = {
703
    gen_op_ldub_raw_T0_A0,
704
    gen_op_lduw_raw_T0_A0,
705
    NULL,
706
    NULL,
707

    
708
#ifndef CONFIG_USER_ONLY
709
    gen_op_ldub_kernel_T0_A0,
710
    gen_op_lduw_kernel_T0_A0,
711
    NULL,
712
    NULL,
713

    
714
    gen_op_ldub_user_T0_A0,
715
    gen_op_lduw_user_T0_A0,
716
    NULL,
717
    NULL,
718
#endif
719
};
720

    
721
/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
722
static GenOpFunc *gen_op_ld_T0_A0[3 * 4] = {
723
    gen_op_ldub_raw_T0_A0,
724
    gen_op_lduw_raw_T0_A0,
725
    gen_op_ldl_raw_T0_A0,
726
    X86_64_ONLY(gen_op_ldq_raw_T0_A0),
727

    
728
#ifndef CONFIG_USER_ONLY
729
    gen_op_ldub_kernel_T0_A0,
730
    gen_op_lduw_kernel_T0_A0,
731
    gen_op_ldl_kernel_T0_A0,
732
    X86_64_ONLY(gen_op_ldq_kernel_T0_A0),
733

    
734
    gen_op_ldub_user_T0_A0,
735
    gen_op_lduw_user_T0_A0,
736
    gen_op_ldl_user_T0_A0,
737
    X86_64_ONLY(gen_op_ldq_user_T0_A0),
738
#endif
739
};
740

    
741
static GenOpFunc *gen_op_ld_T1_A0[3 * 4] = {
742
    gen_op_ldub_raw_T1_A0,
743
    gen_op_lduw_raw_T1_A0,
744
    gen_op_ldl_raw_T1_A0,
745
    X86_64_ONLY(gen_op_ldq_raw_T1_A0),
746

    
747
#ifndef CONFIG_USER_ONLY
748
    gen_op_ldub_kernel_T1_A0,
749
    gen_op_lduw_kernel_T1_A0,
750
    gen_op_ldl_kernel_T1_A0,
751
    X86_64_ONLY(gen_op_ldq_kernel_T1_A0),
752

    
753
    gen_op_ldub_user_T1_A0,
754
    gen_op_lduw_user_T1_A0,
755
    gen_op_ldl_user_T1_A0,
756
    X86_64_ONLY(gen_op_ldq_user_T1_A0),
757
#endif
758
};
759

    
760
static GenOpFunc *gen_op_st_T0_A0[3 * 4] = {
761
    gen_op_stb_raw_T0_A0,
762
    gen_op_stw_raw_T0_A0,
763
    gen_op_stl_raw_T0_A0,
764
    X86_64_ONLY(gen_op_stq_raw_T0_A0),
765

    
766
#ifndef CONFIG_USER_ONLY
767
    gen_op_stb_kernel_T0_A0,
768
    gen_op_stw_kernel_T0_A0,
769
    gen_op_stl_kernel_T0_A0,
770
    X86_64_ONLY(gen_op_stq_kernel_T0_A0),
771

    
772
    gen_op_stb_user_T0_A0,
773
    gen_op_stw_user_T0_A0,
774
    gen_op_stl_user_T0_A0,
775
    X86_64_ONLY(gen_op_stq_user_T0_A0),
776
#endif
777
};
778

    
779
static GenOpFunc *gen_op_st_T1_A0[3 * 4] = {
780
    NULL,
781
    gen_op_stw_raw_T1_A0,
782
    gen_op_stl_raw_T1_A0,
783
    X86_64_ONLY(gen_op_stq_raw_T1_A0),
784

    
785
#ifndef CONFIG_USER_ONLY
786
    NULL,
787
    gen_op_stw_kernel_T1_A0,
788
    gen_op_stl_kernel_T1_A0,
789
    X86_64_ONLY(gen_op_stq_kernel_T1_A0),
790

    
791
    NULL,
792
    gen_op_stw_user_T1_A0,
793
    gen_op_stl_user_T1_A0,
794
    X86_64_ONLY(gen_op_stq_user_T1_A0),
795
#endif
796
};
797

    
798
static inline void gen_jmp_im(target_ulong pc)
799
{
800
#ifdef TARGET_X86_64
801
    if (pc == (uint32_t)pc) {
802
        gen_op_movl_eip_im(pc);
803
    } else if (pc == (int32_t)pc) {
804
        gen_op_movq_eip_im(pc);
805
    } else {
806
        gen_op_movq_eip_im64(pc >> 32, pc);
807
    }
808
#else
809
    gen_op_movl_eip_im(pc);
810
#endif
811
}
812

    
813
static inline void gen_string_movl_A0_ESI(DisasContext *s)
814
{
815
    int override;
816

    
817
    override = s->override;
818
#ifdef TARGET_X86_64
819
    if (s->aflag == 2) {
820
        if (override >= 0) {
821
            gen_op_movq_A0_seg(offsetof(CPUX86State,segs[override].base));
822
            gen_op_addq_A0_reg_sN[0][R_ESI]();
823
        } else {
824
            gen_op_movq_A0_reg[R_ESI]();
825
        }
826
    } else
827
#endif
828
    if (s->aflag) {
829
        /* 32 bit address */
830
        if (s->addseg && override < 0)
831
            override = R_DS;
832
        if (override >= 0) {
833
            gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
834
            gen_op_addl_A0_reg_sN[0][R_ESI]();
835
        } else {
836
            gen_op_movl_A0_reg[R_ESI]();
837
        }
838
    } else {
839
        /* 16 address, always override */
840
        if (override < 0)
841
            override = R_DS;
842
        gen_op_movl_A0_reg[R_ESI]();
843
        gen_op_andl_A0_ffff();
844
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
845
    }
846
}
847

    
848
static inline void gen_string_movl_A0_EDI(DisasContext *s)
849
{
850
#ifdef TARGET_X86_64
851
    if (s->aflag == 2) {
852
        gen_op_movq_A0_reg[R_EDI]();
853
    } else
854
#endif
855
    if (s->aflag) {
856
        if (s->addseg) {
857
            gen_op_movl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
858
            gen_op_addl_A0_reg_sN[0][R_EDI]();
859
        } else {
860
            gen_op_movl_A0_reg[R_EDI]();
861
        }
862
    } else {
863
        gen_op_movl_A0_reg[R_EDI]();
864
        gen_op_andl_A0_ffff();
865
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
866
    }
867
}
868

    
869
static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
870
    gen_op_movl_T0_Dshiftb,
871
    gen_op_movl_T0_Dshiftw,
872
    gen_op_movl_T0_Dshiftl,
873
    X86_64_ONLY(gen_op_movl_T0_Dshiftq),
874
};
875

    
876
static GenOpFunc1 *gen_op_jnz_ecx[3] = {
877
    gen_op_jnz_ecxw,
878
    gen_op_jnz_ecxl,
879
    X86_64_ONLY(gen_op_jnz_ecxq),
880
};
881
    
882
static GenOpFunc1 *gen_op_jz_ecx[3] = {
883
    gen_op_jz_ecxw,
884
    gen_op_jz_ecxl,
885
    X86_64_ONLY(gen_op_jz_ecxq),
886
};
887

    
888
static GenOpFunc *gen_op_dec_ECX[3] = {
889
    gen_op_decw_ECX,
890
    gen_op_decl_ECX,
891
    X86_64_ONLY(gen_op_decq_ECX),
892
};
893

    
894
static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
895
    {
896
        gen_op_jnz_subb,
897
        gen_op_jnz_subw,
898
        gen_op_jnz_subl,
899
        X86_64_ONLY(gen_op_jnz_subq),
900
    },
901
    {
902
        gen_op_jz_subb,
903
        gen_op_jz_subw,
904
        gen_op_jz_subl,
905
        X86_64_ONLY(gen_op_jz_subq),
906
    },
907
};
908

    
909
static GenOpFunc *gen_op_in_DX_T0[3] = {
910
    gen_op_inb_DX_T0,
911
    gen_op_inw_DX_T0,
912
    gen_op_inl_DX_T0,
913
};
914

    
915
static GenOpFunc *gen_op_out_DX_T0[3] = {
916
    gen_op_outb_DX_T0,
917
    gen_op_outw_DX_T0,
918
    gen_op_outl_DX_T0,
919
};
920

    
921
static GenOpFunc *gen_op_in[3] = {
922
    gen_op_inb_T0_T1,
923
    gen_op_inw_T0_T1,
924
    gen_op_inl_T0_T1,
925
};
926

    
927
static GenOpFunc *gen_op_out[3] = {
928
    gen_op_outb_T0_T1,
929
    gen_op_outw_T0_T1,
930
    gen_op_outl_T0_T1,
931
};
932

    
933
static GenOpFunc *gen_check_io_T0[3] = {
934
    gen_op_check_iob_T0,
935
    gen_op_check_iow_T0,
936
    gen_op_check_iol_T0,
937
};
938

    
939
static GenOpFunc *gen_check_io_DX[3] = {
940
    gen_op_check_iob_DX,
941
    gen_op_check_iow_DX,
942
    gen_op_check_iol_DX,
943
};
944

    
945
static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
946
{
947
    if (s->pe && (s->cpl > s->iopl || s->vm86)) {
948
        if (s->cc_op != CC_OP_DYNAMIC)
949
            gen_op_set_cc_op(s->cc_op);
950
        gen_jmp_im(cur_eip);
951
        if (use_dx)
952
            gen_check_io_DX[ot]();
953
        else
954
            gen_check_io_T0[ot]();
955
    }
956
}
957

    
958
static inline void gen_movs(DisasContext *s, int ot)
959
{
960
    gen_string_movl_A0_ESI(s);
961
    gen_op_ld_T0_A0[ot + s->mem_index]();
962
    gen_string_movl_A0_EDI(s);
963
    gen_op_st_T0_A0[ot + s->mem_index]();
964
    gen_op_movl_T0_Dshift[ot]();
965
#ifdef TARGET_X86_64
966
    if (s->aflag == 2) {
967
        gen_op_addq_ESI_T0();
968
        gen_op_addq_EDI_T0();
969
    } else 
970
#endif
971
    if (s->aflag) {
972
        gen_op_addl_ESI_T0();
973
        gen_op_addl_EDI_T0();
974
    } else {
975
        gen_op_addw_ESI_T0();
976
        gen_op_addw_EDI_T0();
977
    }
978
}
979

    
980
static inline void gen_update_cc_op(DisasContext *s)
981
{
982
    if (s->cc_op != CC_OP_DYNAMIC) {
983
        gen_op_set_cc_op(s->cc_op);
984
        s->cc_op = CC_OP_DYNAMIC;
985
    }
986
}
987

    
988
/* XXX: does not work with gdbstub "ice" single step - not a
989
   serious problem */
990
static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
991
{
992
    int l1, l2;
993

    
994
    l1 = gen_new_label();
995
    l2 = gen_new_label();
996
    gen_op_jnz_ecx[s->aflag](l1);
997
    gen_set_label(l2);
998
    gen_jmp_tb(s, next_eip, 1);
999
    gen_set_label(l1);
1000
    return l2;
1001
}
1002

    
1003
static inline void gen_stos(DisasContext *s, int ot)
1004
{
1005
    gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1006
    gen_string_movl_A0_EDI(s);
1007
    gen_op_st_T0_A0[ot + s->mem_index]();
1008
    gen_op_movl_T0_Dshift[ot]();
1009
#ifdef TARGET_X86_64
1010
    if (s->aflag == 2) {
1011
        gen_op_addq_EDI_T0();
1012
    } else 
1013
#endif
1014
    if (s->aflag) {
1015
        gen_op_addl_EDI_T0();
1016
    } else {
1017
        gen_op_addw_EDI_T0();
1018
    }
1019
}
1020

    
1021
static inline void gen_lods(DisasContext *s, int ot)
1022
{
1023
    gen_string_movl_A0_ESI(s);
1024
    gen_op_ld_T0_A0[ot + s->mem_index]();
1025
    gen_op_mov_reg_T0[ot][R_EAX]();
1026
    gen_op_movl_T0_Dshift[ot]();
1027
#ifdef TARGET_X86_64
1028
    if (s->aflag == 2) {
1029
        gen_op_addq_ESI_T0();
1030
    } else 
1031
#endif
1032
    if (s->aflag) {
1033
        gen_op_addl_ESI_T0();
1034
    } else {
1035
        gen_op_addw_ESI_T0();
1036
    }
1037
}
1038

    
1039
static inline void gen_scas(DisasContext *s, int ot)
1040
{
1041
    gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1042
    gen_string_movl_A0_EDI(s);
1043
    gen_op_ld_T1_A0[ot + s->mem_index]();
1044
    gen_op_cmpl_T0_T1_cc();
1045
    gen_op_movl_T0_Dshift[ot]();
1046
#ifdef TARGET_X86_64
1047
    if (s->aflag == 2) {
1048
        gen_op_addq_EDI_T0();
1049
    } else 
1050
#endif
1051
    if (s->aflag) {
1052
        gen_op_addl_EDI_T0();
1053
    } else {
1054
        gen_op_addw_EDI_T0();
1055
    }
1056
}
1057

    
1058
static inline void gen_cmps(DisasContext *s, int ot)
1059
{
1060
    gen_string_movl_A0_ESI(s);
1061
    gen_op_ld_T0_A0[ot + s->mem_index]();
1062
    gen_string_movl_A0_EDI(s);
1063
    gen_op_ld_T1_A0[ot + s->mem_index]();
1064
    gen_op_cmpl_T0_T1_cc();
1065
    gen_op_movl_T0_Dshift[ot]();
1066
#ifdef TARGET_X86_64
1067
    if (s->aflag == 2) {
1068
        gen_op_addq_ESI_T0();
1069
        gen_op_addq_EDI_T0();
1070
    } else 
1071
#endif
1072
    if (s->aflag) {
1073
        gen_op_addl_ESI_T0();
1074
        gen_op_addl_EDI_T0();
1075
    } else {
1076
        gen_op_addw_ESI_T0();
1077
        gen_op_addw_EDI_T0();
1078
    }
1079
}
1080

    
1081
static inline void gen_ins(DisasContext *s, int ot)
1082
{
1083
    gen_string_movl_A0_EDI(s);
1084
    gen_op_movl_T0_0();
1085
    gen_op_st_T0_A0[ot + s->mem_index]();
1086
    gen_op_in_DX_T0[ot]();
1087
    gen_op_st_T0_A0[ot + s->mem_index]();
1088
    gen_op_movl_T0_Dshift[ot]();
1089
#ifdef TARGET_X86_64
1090
    if (s->aflag == 2) {
1091
        gen_op_addq_EDI_T0();
1092
    } else 
1093
#endif
1094
    if (s->aflag) {
1095
        gen_op_addl_EDI_T0();
1096
    } else {
1097
        gen_op_addw_EDI_T0();
1098
    }
1099
}
1100

    
1101
static inline void gen_outs(DisasContext *s, int ot)
1102
{
1103
    gen_string_movl_A0_ESI(s);
1104
    gen_op_ld_T0_A0[ot + s->mem_index]();
1105
    gen_op_out_DX_T0[ot]();
1106
    gen_op_movl_T0_Dshift[ot]();
1107
#ifdef TARGET_X86_64
1108
    if (s->aflag == 2) {
1109
        gen_op_addq_ESI_T0();
1110
    } else 
1111
#endif
1112
    if (s->aflag) {
1113
        gen_op_addl_ESI_T0();
1114
    } else {
1115
        gen_op_addw_ESI_T0();
1116
    }
1117
}
1118

    
1119
/* same method as Valgrind : we generate jumps to current or next
1120
   instruction */
1121
#define GEN_REPZ(op)                                                          \
1122
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1123
                                 target_ulong cur_eip, target_ulong next_eip) \
1124
{                                                                             \
1125
    int l2;\
1126
    gen_update_cc_op(s);                                                      \
1127
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1128
    gen_ ## op(s, ot);                                                        \
1129
    gen_op_dec_ECX[s->aflag]();                                               \
1130
    /* a loop would cause two single step exceptions if ECX = 1               \
1131
       before rep string_insn */                                              \
1132
    if (!s->jmp_opt)                                                          \
1133
        gen_op_jz_ecx[s->aflag](l2);                                          \
1134
    gen_jmp(s, cur_eip);                                                      \
1135
}
1136

    
1137
#define GEN_REPZ2(op)                                                         \
1138
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1139
                                   target_ulong cur_eip,                      \
1140
                                   target_ulong next_eip,                     \
1141
                                   int nz)                                    \
1142
{                                                                             \
1143
    int l2;\
1144
    gen_update_cc_op(s);                                                      \
1145
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1146
    gen_ ## op(s, ot);                                                        \
1147
    gen_op_dec_ECX[s->aflag]();                                               \
1148
    gen_op_set_cc_op(CC_OP_SUBB + ot);                                        \
1149
    gen_op_string_jnz_sub[nz][ot](l2);\
1150
    if (!s->jmp_opt)                                                          \
1151
        gen_op_jz_ecx[s->aflag](l2);                                          \
1152
    gen_jmp(s, cur_eip);                                                      \
1153
}
1154

    
1155
GEN_REPZ(movs)
1156
GEN_REPZ(stos)
1157
GEN_REPZ(lods)
1158
GEN_REPZ(ins)
1159
GEN_REPZ(outs)
1160
GEN_REPZ2(scas)
1161
GEN_REPZ2(cmps)
1162

    
1163
enum {
1164
    JCC_O,
1165
    JCC_B,
1166
    JCC_Z,
1167
    JCC_BE,
1168
    JCC_S,
1169
    JCC_P,
1170
    JCC_L,
1171
    JCC_LE,
1172
};
1173

    
1174
static GenOpFunc1 *gen_jcc_sub[4][8] = {
1175
    [OT_BYTE] = {
1176
        NULL,
1177
        gen_op_jb_subb,
1178
        gen_op_jz_subb,
1179
        gen_op_jbe_subb,
1180
        gen_op_js_subb,
1181
        NULL,
1182
        gen_op_jl_subb,
1183
        gen_op_jle_subb,
1184
    },
1185
    [OT_WORD] = {
1186
        NULL,
1187
        gen_op_jb_subw,
1188
        gen_op_jz_subw,
1189
        gen_op_jbe_subw,
1190
        gen_op_js_subw,
1191
        NULL,
1192
        gen_op_jl_subw,
1193
        gen_op_jle_subw,
1194
    },
1195
    [OT_LONG] = {
1196
        NULL,
1197
        gen_op_jb_subl,
1198
        gen_op_jz_subl,
1199
        gen_op_jbe_subl,
1200
        gen_op_js_subl,
1201
        NULL,
1202
        gen_op_jl_subl,
1203
        gen_op_jle_subl,
1204
    },
1205
#ifdef TARGET_X86_64
1206
    [OT_QUAD] = {
1207
        NULL,
1208
        BUGGY_64(gen_op_jb_subq),
1209
        gen_op_jz_subq,
1210
        BUGGY_64(gen_op_jbe_subq),
1211
        gen_op_js_subq,
1212
        NULL,
1213
        BUGGY_64(gen_op_jl_subq),
1214
        BUGGY_64(gen_op_jle_subq),
1215
    },
1216
#endif
1217
};
1218
static GenOpFunc1 *gen_op_loop[3][4] = {
1219
    [0] = {
1220
        gen_op_loopnzw,
1221
        gen_op_loopzw,
1222
        gen_op_jnz_ecxw,
1223
    },
1224
    [1] = {
1225
        gen_op_loopnzl,
1226
        gen_op_loopzl,
1227
        gen_op_jnz_ecxl,
1228
    },
1229
#ifdef TARGET_X86_64
1230
    [2] = {
1231
        gen_op_loopnzq,
1232
        gen_op_loopzq,
1233
        gen_op_jnz_ecxq,
1234
    },
1235
#endif
1236
};
1237

    
1238
static GenOpFunc *gen_setcc_slow[8] = {
1239
    gen_op_seto_T0_cc,
1240
    gen_op_setb_T0_cc,
1241
    gen_op_setz_T0_cc,
1242
    gen_op_setbe_T0_cc,
1243
    gen_op_sets_T0_cc,
1244
    gen_op_setp_T0_cc,
1245
    gen_op_setl_T0_cc,
1246
    gen_op_setle_T0_cc,
1247
};
1248

    
1249
static GenOpFunc *gen_setcc_sub[4][8] = {
1250
    [OT_BYTE] = {
1251
        NULL,
1252
        gen_op_setb_T0_subb,
1253
        gen_op_setz_T0_subb,
1254
        gen_op_setbe_T0_subb,
1255
        gen_op_sets_T0_subb,
1256
        NULL,
1257
        gen_op_setl_T0_subb,
1258
        gen_op_setle_T0_subb,
1259
    },
1260
    [OT_WORD] = {
1261
        NULL,
1262
        gen_op_setb_T0_subw,
1263
        gen_op_setz_T0_subw,
1264
        gen_op_setbe_T0_subw,
1265
        gen_op_sets_T0_subw,
1266
        NULL,
1267
        gen_op_setl_T0_subw,
1268
        gen_op_setle_T0_subw,
1269
    },
1270
    [OT_LONG] = {
1271
        NULL,
1272
        gen_op_setb_T0_subl,
1273
        gen_op_setz_T0_subl,
1274
        gen_op_setbe_T0_subl,
1275
        gen_op_sets_T0_subl,
1276
        NULL,
1277
        gen_op_setl_T0_subl,
1278
        gen_op_setle_T0_subl,
1279
    },
1280
#ifdef TARGET_X86_64
1281
    [OT_QUAD] = {
1282
        NULL,
1283
        gen_op_setb_T0_subq,
1284
        gen_op_setz_T0_subq,
1285
        gen_op_setbe_T0_subq,
1286
        gen_op_sets_T0_subq,
1287
        NULL,
1288
        gen_op_setl_T0_subq,
1289
        gen_op_setle_T0_subq,
1290
    },
1291
#endif
1292
};
1293

    
1294
static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1295
    gen_op_fadd_ST0_FT0,
1296
    gen_op_fmul_ST0_FT0,
1297
    gen_op_fcom_ST0_FT0,
1298
    gen_op_fcom_ST0_FT0,
1299
    gen_op_fsub_ST0_FT0,
1300
    gen_op_fsubr_ST0_FT0,
1301
    gen_op_fdiv_ST0_FT0,
1302
    gen_op_fdivr_ST0_FT0,
1303
};
1304

    
1305
/* NOTE the exception in "r" op ordering */
1306
static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1307
    gen_op_fadd_STN_ST0,
1308
    gen_op_fmul_STN_ST0,
1309
    NULL,
1310
    NULL,
1311
    gen_op_fsubr_STN_ST0,
1312
    gen_op_fsub_STN_ST0,
1313
    gen_op_fdivr_STN_ST0,
1314
    gen_op_fdiv_STN_ST0,
1315
};
1316

    
1317
/* if d == OR_TMP0, it means memory operand (address in A0) */
1318
static void gen_op(DisasContext *s1, int op, int ot, int d)
1319
{
1320
    GenOpFunc *gen_update_cc;
1321
    
1322
    if (d != OR_TMP0) {
1323
        gen_op_mov_TN_reg[ot][0][d]();
1324
    } else {
1325
        gen_op_ld_T0_A0[ot + s1->mem_index]();
1326
    }
1327
    switch(op) {
1328
    case OP_ADCL:
1329
    case OP_SBBL:
1330
        if (s1->cc_op != CC_OP_DYNAMIC)
1331
            gen_op_set_cc_op(s1->cc_op);
1332
        if (d != OR_TMP0) {
1333
            gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1334
            gen_op_mov_reg_T0[ot][d]();
1335
        } else {
1336
            gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1337
        }
1338
        s1->cc_op = CC_OP_DYNAMIC;
1339
        goto the_end;
1340
    case OP_ADDL:
1341
        gen_op_addl_T0_T1();
1342
        s1->cc_op = CC_OP_ADDB + ot;
1343
        gen_update_cc = gen_op_update2_cc;
1344
        break;
1345
    case OP_SUBL:
1346
        gen_op_subl_T0_T1();
1347
        s1->cc_op = CC_OP_SUBB + ot;
1348
        gen_update_cc = gen_op_update2_cc;
1349
        break;
1350
    default:
1351
    case OP_ANDL:
1352
    case OP_ORL:
1353
    case OP_XORL:
1354
        gen_op_arith_T0_T1_cc[op]();
1355
        s1->cc_op = CC_OP_LOGICB + ot;
1356
        gen_update_cc = gen_op_update1_cc;
1357
        break;
1358
    case OP_CMPL:
1359
        gen_op_cmpl_T0_T1_cc();
1360
        s1->cc_op = CC_OP_SUBB + ot;
1361
        gen_update_cc = NULL;
1362
        break;
1363
    }
1364
    if (op != OP_CMPL) {
1365
        if (d != OR_TMP0)
1366
            gen_op_mov_reg_T0[ot][d]();
1367
        else
1368
            gen_op_st_T0_A0[ot + s1->mem_index]();
1369
    }
1370
    /* the flags update must happen after the memory write (precise
1371
       exception support) */
1372
    if (gen_update_cc)
1373
        gen_update_cc();
1374
 the_end: ;
1375
}
1376

    
1377
/* if d == OR_TMP0, it means memory operand (address in A0) */
1378
static void gen_inc(DisasContext *s1, int ot, int d, int c)
1379
{
1380
    if (d != OR_TMP0)
1381
        gen_op_mov_TN_reg[ot][0][d]();
1382
    else
1383
        gen_op_ld_T0_A0[ot + s1->mem_index]();
1384
    if (s1->cc_op != CC_OP_DYNAMIC)
1385
        gen_op_set_cc_op(s1->cc_op);
1386
    if (c > 0) {
1387
        gen_op_incl_T0();
1388
        s1->cc_op = CC_OP_INCB + ot;
1389
    } else {
1390
        gen_op_decl_T0();
1391
        s1->cc_op = CC_OP_DECB + ot;
1392
    }
1393
    if (d != OR_TMP0)
1394
        gen_op_mov_reg_T0[ot][d]();
1395
    else
1396
        gen_op_st_T0_A0[ot + s1->mem_index]();
1397
    gen_op_update_inc_cc();
1398
}
1399

    
1400
static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1401
{
1402
    if (d != OR_TMP0)
1403
        gen_op_mov_TN_reg[ot][0][d]();
1404
    else
1405
        gen_op_ld_T0_A0[ot + s1->mem_index]();
1406
    if (s != OR_TMP1)
1407
        gen_op_mov_TN_reg[ot][1][s]();
1408
    /* for zero counts, flags are not updated, so must do it dynamically */
1409
    if (s1->cc_op != CC_OP_DYNAMIC)
1410
        gen_op_set_cc_op(s1->cc_op);
1411
    
1412
    if (d != OR_TMP0)
1413
        gen_op_shift_T0_T1_cc[ot][op]();
1414
    else
1415
        gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1416
    if (d != OR_TMP0)
1417
        gen_op_mov_reg_T0[ot][d]();
1418
    s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1419
}
1420

    
1421
static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1422
{
1423
    /* currently not optimized */
1424
    gen_op_movl_T1_im(c);
1425
    gen_shift(s1, op, ot, d, OR_TMP1);
1426
}
1427

    
1428
static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1429
{
1430
    target_long disp;
1431
    int havesib;
1432
    int base;
1433
    int index;
1434
    int scale;
1435
    int opreg;
1436
    int mod, rm, code, override, must_add_seg;
1437

    
1438
    override = s->override;
1439
    must_add_seg = s->addseg;
1440
    if (override >= 0)
1441
        must_add_seg = 1;
1442
    mod = (modrm >> 6) & 3;
1443
    rm = modrm & 7;
1444

    
1445
    if (s->aflag) {
1446

    
1447
        havesib = 0;
1448
        base = rm;
1449
        index = 0;
1450
        scale = 0;
1451
        
1452
        if (base == 4) {
1453
            havesib = 1;
1454
            code = ldub_code(s->pc++);
1455
            scale = (code >> 6) & 3;
1456
            index = ((code >> 3) & 7) | REX_X(s);
1457
            base = (code & 7);
1458
        }
1459
        base |= REX_B(s);
1460

    
1461
        switch (mod) {
1462
        case 0:
1463
            if ((base & 7) == 5) {
1464
                base = -1;
1465
                disp = (int32_t)ldl_code(s->pc);
1466
                s->pc += 4;
1467
                if (CODE64(s) && !havesib) {
1468
                    disp += s->pc + s->rip_offset;
1469
                }
1470
            } else {
1471
                disp = 0;
1472
            }
1473
            break;
1474
        case 1:
1475
            disp = (int8_t)ldub_code(s->pc++);
1476
            break;
1477
        default:
1478
        case 2:
1479
            disp = ldl_code(s->pc);
1480
            s->pc += 4;
1481
            break;
1482
        }
1483
        
1484
        if (base >= 0) {
1485
            /* for correct popl handling with esp */
1486
            if (base == 4 && s->popl_esp_hack)
1487
                disp += s->popl_esp_hack;
1488
#ifdef TARGET_X86_64
1489
            if (s->aflag == 2) {
1490
                gen_op_movq_A0_reg[base]();
1491
                if (disp != 0) {
1492
                    if ((int32_t)disp == disp)
1493
                        gen_op_addq_A0_im(disp);
1494
                    else
1495
                        gen_op_addq_A0_im64(disp >> 32, disp);
1496
                }
1497
            } else 
1498
#endif
1499
            {
1500
                gen_op_movl_A0_reg[base]();
1501
                if (disp != 0)
1502
                    gen_op_addl_A0_im(disp);
1503
            }
1504
        } else {
1505
#ifdef TARGET_X86_64
1506
            if (s->aflag == 2) {
1507
                if ((int32_t)disp == disp)
1508
                    gen_op_movq_A0_im(disp);
1509
                else
1510
                    gen_op_movq_A0_im64(disp >> 32, disp);
1511
            } else 
1512
#endif
1513
            {
1514
                gen_op_movl_A0_im(disp);
1515
            }
1516
        }
1517
        /* XXX: index == 4 is always invalid */
1518
        if (havesib && (index != 4 || scale != 0)) {
1519
#ifdef TARGET_X86_64
1520
            if (s->aflag == 2) {
1521
                gen_op_addq_A0_reg_sN[scale][index]();
1522
            } else 
1523
#endif
1524
            {
1525
                gen_op_addl_A0_reg_sN[scale][index]();
1526
            }
1527
        }
1528
        if (must_add_seg) {
1529
            if (override < 0) {
1530
                if (base == R_EBP || base == R_ESP)
1531
                    override = R_SS;
1532
                else
1533
                    override = R_DS;
1534
            }
1535
#ifdef TARGET_X86_64
1536
            if (s->aflag == 2) {
1537
                gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1538
            } else 
1539
#endif
1540
            {
1541
                gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1542
            }
1543
        }
1544
    } else {
1545
        switch (mod) {
1546
        case 0:
1547
            if (rm == 6) {
1548
                disp = lduw_code(s->pc);
1549
                s->pc += 2;
1550
                gen_op_movl_A0_im(disp);
1551
                rm = 0; /* avoid SS override */
1552
                goto no_rm;
1553
            } else {
1554
                disp = 0;
1555
            }
1556
            break;
1557
        case 1:
1558
            disp = (int8_t)ldub_code(s->pc++);
1559
            break;
1560
        default:
1561
        case 2:
1562
            disp = lduw_code(s->pc);
1563
            s->pc += 2;
1564
            break;
1565
        }
1566
        switch(rm) {
1567
        case 0:
1568
            gen_op_movl_A0_reg[R_EBX]();
1569
            gen_op_addl_A0_reg_sN[0][R_ESI]();
1570
            break;
1571
        case 1:
1572
            gen_op_movl_A0_reg[R_EBX]();
1573
            gen_op_addl_A0_reg_sN[0][R_EDI]();
1574
            break;
1575
        case 2:
1576
            gen_op_movl_A0_reg[R_EBP]();
1577
            gen_op_addl_A0_reg_sN[0][R_ESI]();
1578
            break;
1579
        case 3:
1580
            gen_op_movl_A0_reg[R_EBP]();
1581
            gen_op_addl_A0_reg_sN[0][R_EDI]();
1582
            break;
1583
        case 4:
1584
            gen_op_movl_A0_reg[R_ESI]();
1585
            break;
1586
        case 5:
1587
            gen_op_movl_A0_reg[R_EDI]();
1588
            break;
1589
        case 6:
1590
            gen_op_movl_A0_reg[R_EBP]();
1591
            break;
1592
        default:
1593
        case 7:
1594
            gen_op_movl_A0_reg[R_EBX]();
1595
            break;
1596
        }
1597
        if (disp != 0)
1598
            gen_op_addl_A0_im(disp);
1599
        gen_op_andl_A0_ffff();
1600
    no_rm:
1601
        if (must_add_seg) {
1602
            if (override < 0) {
1603
                if (rm == 2 || rm == 3 || rm == 6)
1604
                    override = R_SS;
1605
                else
1606
                    override = R_DS;
1607
            }
1608
            gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1609
        }
1610
    }
1611

    
1612
    opreg = OR_A0;
1613
    disp = 0;
1614
    *reg_ptr = opreg;
1615
    *offset_ptr = disp;
1616
}
1617

    
1618
/* used for LEA and MOV AX, mem */
1619
static void gen_add_A0_ds_seg(DisasContext *s)
1620
{
1621
    int override, must_add_seg;
1622
    must_add_seg = s->addseg;
1623
    override = R_DS;
1624
    if (s->override >= 0) {
1625
        override = s->override;
1626
        must_add_seg = 1;
1627
    } else {
1628
        override = R_DS;
1629
    }
1630
    if (must_add_seg) {
1631
#ifdef TARGET_X86_64
1632
        if (CODE64(s)) {
1633
            gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1634
        } else 
1635
#endif
1636
        {
1637
            gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1638
        }
1639
    }
1640
}
1641

    
1642
/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1643
   OR_TMP0 */
1644
static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1645
{
1646
    int mod, rm, opreg, disp;
1647

    
1648
    mod = (modrm >> 6) & 3;
1649
    rm = (modrm & 7) | REX_B(s);
1650
    if (mod == 3) {
1651
        if (is_store) {
1652
            if (reg != OR_TMP0)
1653
                gen_op_mov_TN_reg[ot][0][reg]();
1654
            gen_op_mov_reg_T0[ot][rm]();
1655
        } else {
1656
            gen_op_mov_TN_reg[ot][0][rm]();
1657
            if (reg != OR_TMP0)
1658
                gen_op_mov_reg_T0[ot][reg]();
1659
        }
1660
    } else {
1661
        gen_lea_modrm(s, modrm, &opreg, &disp);
1662
        if (is_store) {
1663
            if (reg != OR_TMP0)
1664
                gen_op_mov_TN_reg[ot][0][reg]();
1665
            gen_op_st_T0_A0[ot + s->mem_index]();
1666
        } else {
1667
            gen_op_ld_T0_A0[ot + s->mem_index]();
1668
            if (reg != OR_TMP0)
1669
                gen_op_mov_reg_T0[ot][reg]();
1670
        }
1671
    }
1672
}
1673

    
1674
static inline uint32_t insn_get(DisasContext *s, int ot)
1675
{
1676
    uint32_t ret;
1677

    
1678
    switch(ot) {
1679
    case OT_BYTE:
1680
        ret = ldub_code(s->pc);
1681
        s->pc++;
1682
        break;
1683
    case OT_WORD:
1684
        ret = lduw_code(s->pc);
1685
        s->pc += 2;
1686
        break;
1687
    default:
1688
    case OT_LONG:
1689
        ret = ldl_code(s->pc);
1690
        s->pc += 4;
1691
        break;
1692
    }
1693
    return ret;
1694
}
1695

    
1696
static inline int insn_const_size(unsigned int ot)
1697
{
1698
    if (ot <= OT_LONG)
1699
        return 1 << ot;
1700
    else
1701
        return 4;
1702
}
1703

    
1704
static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1705
{
1706
    TranslationBlock *tb;
1707
    target_ulong pc;
1708

    
1709
    pc = s->cs_base + eip;
1710
    tb = s->tb;
1711
    /* NOTE: we handle the case where the TB spans two pages here */
1712
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1713
        (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK))  {
1714
        /* jump to same page: we can use a direct jump */
1715
        if (tb_num == 0)
1716
            gen_op_goto_tb0(TBPARAM(tb));
1717
        else
1718
            gen_op_goto_tb1(TBPARAM(tb));
1719
        gen_jmp_im(eip);
1720
        gen_op_movl_T0_im((long)tb + tb_num);
1721
        gen_op_exit_tb();
1722
    } else {
1723
        /* jump to another page: currently not optimized */
1724
        gen_jmp_im(eip);
1725
        gen_eob(s);
1726
    }
1727
}
1728

    
1729
static inline void gen_jcc(DisasContext *s, int b, 
1730
                           target_ulong val, target_ulong next_eip)
1731
{
1732
    TranslationBlock *tb;
1733
    int inv, jcc_op;
1734
    GenOpFunc1 *func;
1735
    target_ulong tmp;
1736
    int l1, l2;
1737

    
1738
    inv = b & 1;
1739
    jcc_op = (b >> 1) & 7;
1740
    
1741
    if (s->jmp_opt) {
1742
        switch(s->cc_op) {
1743
            /* we optimize the cmp/jcc case */
1744
        case CC_OP_SUBB:
1745
        case CC_OP_SUBW:
1746
        case CC_OP_SUBL:
1747
        case CC_OP_SUBQ:
1748
            func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1749
            break;
1750
            
1751
            /* some jumps are easy to compute */
1752
        case CC_OP_ADDB:
1753
        case CC_OP_ADDW:
1754
        case CC_OP_ADDL:
1755
        case CC_OP_ADDQ:
1756

    
1757
        case CC_OP_ADCB:
1758
        case CC_OP_ADCW:
1759
        case CC_OP_ADCL:
1760
        case CC_OP_ADCQ:
1761

    
1762
        case CC_OP_SBBB:
1763
        case CC_OP_SBBW:
1764
        case CC_OP_SBBL:
1765
        case CC_OP_SBBQ:
1766

    
1767
        case CC_OP_LOGICB:
1768
        case CC_OP_LOGICW:
1769
        case CC_OP_LOGICL:
1770
        case CC_OP_LOGICQ:
1771

    
1772
        case CC_OP_INCB:
1773
        case CC_OP_INCW:
1774
        case CC_OP_INCL:
1775
        case CC_OP_INCQ:
1776

    
1777
        case CC_OP_DECB:
1778
        case CC_OP_DECW:
1779
        case CC_OP_DECL:
1780
        case CC_OP_DECQ:
1781

    
1782
        case CC_OP_SHLB:
1783
        case CC_OP_SHLW:
1784
        case CC_OP_SHLL:
1785
        case CC_OP_SHLQ:
1786

    
1787
        case CC_OP_SARB:
1788
        case CC_OP_SARW:
1789
        case CC_OP_SARL:
1790
        case CC_OP_SARQ:
1791
            switch(jcc_op) {
1792
            case JCC_Z:
1793
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1794
                break;
1795
            case JCC_S:
1796
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1797
                break;
1798
            default:
1799
                func = NULL;
1800
                break;
1801
            }
1802
            break;
1803
        default:
1804
            func = NULL;
1805
            break;
1806
        }
1807

    
1808
        if (s->cc_op != CC_OP_DYNAMIC) {
1809
            gen_op_set_cc_op(s->cc_op);
1810
            s->cc_op = CC_OP_DYNAMIC;
1811
        }
1812

    
1813
        if (!func) {
1814
            gen_setcc_slow[jcc_op]();
1815
            func = gen_op_jnz_T0_label;
1816
        }
1817
    
1818
        if (inv) {
1819
            tmp = val;
1820
            val = next_eip;
1821
            next_eip = tmp;
1822
        }
1823
        tb = s->tb;
1824

    
1825
        l1 = gen_new_label();
1826
        func(l1);
1827

    
1828
        gen_goto_tb(s, 0, next_eip);
1829

    
1830
        gen_set_label(l1);
1831
        gen_goto_tb(s, 1, val);
1832

    
1833
        s->is_jmp = 3;
1834
    } else {
1835

    
1836
        if (s->cc_op != CC_OP_DYNAMIC) {
1837
            gen_op_set_cc_op(s->cc_op);
1838
            s->cc_op = CC_OP_DYNAMIC;
1839
        }
1840
        gen_setcc_slow[jcc_op]();
1841
        if (inv) {
1842
            tmp = val;
1843
            val = next_eip;
1844
            next_eip = tmp;
1845
        }
1846
        l1 = gen_new_label();
1847
        l2 = gen_new_label();
1848
        gen_op_jnz_T0_label(l1);
1849
        gen_jmp_im(next_eip);
1850
        gen_op_jmp_label(l2);
1851
        gen_set_label(l1);
1852
        gen_jmp_im(val);
1853
        gen_set_label(l2);
1854
        gen_eob(s);
1855
    }
1856
}
1857

    
1858
static void gen_setcc(DisasContext *s, int b)
1859
{
1860
    int inv, jcc_op;
1861
    GenOpFunc *func;
1862

    
1863
    inv = b & 1;
1864
    jcc_op = (b >> 1) & 7;
1865
    switch(s->cc_op) {
1866
        /* we optimize the cmp/jcc case */
1867
    case CC_OP_SUBB:
1868
    case CC_OP_SUBW:
1869
    case CC_OP_SUBL:
1870
    case CC_OP_SUBQ:
1871
        func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1872
        if (!func)
1873
            goto slow_jcc;
1874
        break;
1875
        
1876
        /* some jumps are easy to compute */
1877
    case CC_OP_ADDB:
1878
    case CC_OP_ADDW:
1879
    case CC_OP_ADDL:
1880
    case CC_OP_ADDQ:
1881

    
1882
    case CC_OP_LOGICB:
1883
    case CC_OP_LOGICW:
1884
    case CC_OP_LOGICL:
1885
    case CC_OP_LOGICQ:
1886

    
1887
    case CC_OP_INCB:
1888
    case CC_OP_INCW:
1889
    case CC_OP_INCL:
1890
    case CC_OP_INCQ:
1891

    
1892
    case CC_OP_DECB:
1893
    case CC_OP_DECW:
1894
    case CC_OP_DECL:
1895
    case CC_OP_DECQ:
1896

    
1897
    case CC_OP_SHLB:
1898
    case CC_OP_SHLW:
1899
    case CC_OP_SHLL:
1900
    case CC_OP_SHLQ:
1901
        switch(jcc_op) {
1902
        case JCC_Z:
1903
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1904
            break;
1905
        case JCC_S:
1906
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1907
            break;
1908
        default:
1909
            goto slow_jcc;
1910
        }
1911
        break;
1912
    default:
1913
    slow_jcc:
1914
        if (s->cc_op != CC_OP_DYNAMIC)
1915
            gen_op_set_cc_op(s->cc_op);
1916
        func = gen_setcc_slow[jcc_op];
1917
        break;
1918
    }
1919
    func();
1920
    if (inv) {
1921
        gen_op_xor_T0_1();
1922
    }
1923
}
1924

    
1925
/* move T0 to seg_reg and compute if the CPU state may change. Never
1926
   call this function with seg_reg == R_CS */
1927
static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1928
{
1929
    if (s->pe && !s->vm86) {
1930
        /* XXX: optimize by finding processor state dynamically */
1931
        if (s->cc_op != CC_OP_DYNAMIC)
1932
            gen_op_set_cc_op(s->cc_op);
1933
        gen_jmp_im(cur_eip);
1934
        gen_op_movl_seg_T0(seg_reg);
1935
        /* abort translation because the addseg value may change or
1936
           because ss32 may change. For R_SS, translation must always
1937
           stop as a special handling must be done to disable hardware
1938
           interrupts for the next instruction */
1939
        if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1940
            s->is_jmp = 3;
1941
    } else {
1942
        gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1943
        if (seg_reg == R_SS)
1944
            s->is_jmp = 3;
1945
    }
1946
}
1947

    
1948
static inline void gen_stack_update(DisasContext *s, int addend)
1949
{
1950
#ifdef TARGET_X86_64
1951
    if (CODE64(s)) {
1952
        if (addend == 8)
1953
            gen_op_addq_ESP_8();
1954
        else 
1955
            gen_op_addq_ESP_im(addend);
1956
    } else
1957
#endif
1958
    if (s->ss32) {
1959
        if (addend == 2)
1960
            gen_op_addl_ESP_2();
1961
        else if (addend == 4)
1962
            gen_op_addl_ESP_4();
1963
        else 
1964
            gen_op_addl_ESP_im(addend);
1965
    } else {
1966
        if (addend == 2)
1967
            gen_op_addw_ESP_2();
1968
        else if (addend == 4)
1969
            gen_op_addw_ESP_4();
1970
        else
1971
            gen_op_addw_ESP_im(addend);
1972
    }
1973
}
1974

    
1975
/* generate a push. It depends on ss32, addseg and dflag */
1976
static void gen_push_T0(DisasContext *s)
1977
{
1978
#ifdef TARGET_X86_64
1979
    if (CODE64(s)) {
1980
        gen_op_movq_A0_reg[R_ESP]();
1981
        if (s->dflag) {
1982
            gen_op_subq_A0_8();
1983
            gen_op_st_T0_A0[OT_QUAD + s->mem_index]();
1984
        } else {
1985
            gen_op_subq_A0_2();
1986
            gen_op_st_T0_A0[OT_WORD + s->mem_index]();
1987
        }
1988
        gen_op_movq_ESP_A0();
1989
    } else 
1990
#endif
1991
    {
1992
        gen_op_movl_A0_reg[R_ESP]();
1993
        if (!s->dflag)
1994
            gen_op_subl_A0_2();
1995
        else
1996
            gen_op_subl_A0_4();
1997
        if (s->ss32) {
1998
            if (s->addseg) {
1999
                gen_op_movl_T1_A0();
2000
                gen_op_addl_A0_SS();
2001
            }
2002
        } else {
2003
            gen_op_andl_A0_ffff();
2004
            gen_op_movl_T1_A0();
2005
            gen_op_addl_A0_SS();
2006
        }
2007
        gen_op_st_T0_A0[s->dflag + 1 + s->mem_index]();
2008
        if (s->ss32 && !s->addseg)
2009
            gen_op_movl_ESP_A0();
2010
        else
2011
            gen_op_mov_reg_T1[s->ss32 + 1][R_ESP]();
2012
    }
2013
}
2014

    
2015
/* generate a push. It depends on ss32, addseg and dflag */
2016
/* slower version for T1, only used for call Ev */
2017
static void gen_push_T1(DisasContext *s)
2018
{
2019
#ifdef TARGET_X86_64
2020
    if (CODE64(s)) {
2021
        gen_op_movq_A0_reg[R_ESP]();
2022
        if (s->dflag) {
2023
            gen_op_subq_A0_8();
2024
            gen_op_st_T1_A0[OT_QUAD + s->mem_index]();
2025
        } else {
2026
            gen_op_subq_A0_2();
2027
            gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2028
        }
2029
        gen_op_movq_ESP_A0();
2030
    } else 
2031
#endif
2032
    {
2033
        gen_op_movl_A0_reg[R_ESP]();
2034
        if (!s->dflag)
2035
            gen_op_subl_A0_2();
2036
        else
2037
            gen_op_subl_A0_4();
2038
        if (s->ss32) {
2039
            if (s->addseg) {
2040
                gen_op_addl_A0_SS();
2041
            }
2042
        } else {
2043
            gen_op_andl_A0_ffff();
2044
            gen_op_addl_A0_SS();
2045
        }
2046
        gen_op_st_T1_A0[s->dflag + 1 + s->mem_index]();
2047
        
2048
        if (s->ss32 && !s->addseg)
2049
            gen_op_movl_ESP_A0();
2050
        else
2051
            gen_stack_update(s, (-2) << s->dflag);
2052
    }
2053
}
2054

    
2055
/* two step pop is necessary for precise exceptions */
2056
static void gen_pop_T0(DisasContext *s)
2057
{
2058
#ifdef TARGET_X86_64
2059
    if (CODE64(s)) {
2060
        gen_op_movq_A0_reg[R_ESP]();
2061
        gen_op_ld_T0_A0[(s->dflag ? OT_QUAD : OT_WORD) + s->mem_index]();
2062
    } else 
2063
#endif
2064
    {
2065
        gen_op_movl_A0_reg[R_ESP]();
2066
        if (s->ss32) {
2067
            if (s->addseg)
2068
                gen_op_addl_A0_SS();
2069
        } else {
2070
            gen_op_andl_A0_ffff();
2071
            gen_op_addl_A0_SS();
2072
        }
2073
        gen_op_ld_T0_A0[s->dflag + 1 + s->mem_index]();
2074
    }
2075
}
2076

    
2077
static void gen_pop_update(DisasContext *s)
2078
{
2079
#ifdef TARGET_X86_64
2080
    if (CODE64(s) && s->dflag) {
2081
        gen_stack_update(s, 8);
2082
    } else
2083
#endif
2084
    {
2085
        gen_stack_update(s, 2 << s->dflag);
2086
    }
2087
}
2088

    
2089
static void gen_stack_A0(DisasContext *s)
2090
{
2091
    gen_op_movl_A0_ESP();
2092
    if (!s->ss32)
2093
        gen_op_andl_A0_ffff();
2094
    gen_op_movl_T1_A0();
2095
    if (s->addseg)
2096
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2097
}
2098

    
2099
/* NOTE: wrap around in 16 bit not fully handled */
2100
static void gen_pusha(DisasContext *s)
2101
{
2102
    int i;
2103
    gen_op_movl_A0_ESP();
2104
    gen_op_addl_A0_im(-16 <<  s->dflag);
2105
    if (!s->ss32)
2106
        gen_op_andl_A0_ffff();
2107
    gen_op_movl_T1_A0();
2108
    if (s->addseg)
2109
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2110
    for(i = 0;i < 8; i++) {
2111
        gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
2112
        gen_op_st_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2113
        gen_op_addl_A0_im(2 <<  s->dflag);
2114
    }
2115
    gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2116
}
2117

    
2118
/* NOTE: wrap around in 16 bit not fully handled */
2119
static void gen_popa(DisasContext *s)
2120
{
2121
    int i;
2122
    gen_op_movl_A0_ESP();
2123
    if (!s->ss32)
2124
        gen_op_andl_A0_ffff();
2125
    gen_op_movl_T1_A0();
2126
    gen_op_addl_T1_im(16 <<  s->dflag);
2127
    if (s->addseg)
2128
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2129
    for(i = 0;i < 8; i++) {
2130
        /* ESP is not reloaded */
2131
        if (i != 3) {
2132
            gen_op_ld_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2133
            gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
2134
        }
2135
        gen_op_addl_A0_im(2 <<  s->dflag);
2136
    }
2137
    gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2138
}
2139

    
2140
static void gen_enter(DisasContext *s, int esp_addend, int level)
2141
{
2142
    int ot, opsize;
2143

    
2144
    level &= 0x1f;
2145
#ifdef TARGET_X86_64
2146
    if (CODE64(s)) {
2147
        ot = s->dflag ? OT_QUAD : OT_WORD;
2148
        opsize = 1 << ot;
2149
        
2150
        gen_op_movl_A0_ESP();
2151
        gen_op_addq_A0_im(-opsize);
2152
        gen_op_movl_T1_A0();
2153

    
2154
        /* push bp */
2155
        gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2156
        gen_op_st_T0_A0[ot + s->mem_index]();
2157
        if (level) {
2158
            gen_op_enter64_level(level, (ot == OT_QUAD));
2159
        }
2160
        gen_op_mov_reg_T1[ot][R_EBP]();
2161
        gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2162
        gen_op_mov_reg_T1[OT_QUAD][R_ESP]();
2163
    } else 
2164
#endif
2165
    {
2166
        ot = s->dflag + OT_WORD;
2167
        opsize = 2 << s->dflag;
2168
        
2169
        gen_op_movl_A0_ESP();
2170
        gen_op_addl_A0_im(-opsize);
2171
        if (!s->ss32)
2172
            gen_op_andl_A0_ffff();
2173
        gen_op_movl_T1_A0();
2174
        if (s->addseg)
2175
            gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2176
        /* push bp */
2177
        gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2178
        gen_op_st_T0_A0[ot + s->mem_index]();
2179
        if (level) {
2180
            gen_op_enter_level(level, s->dflag);
2181
        }
2182
        gen_op_mov_reg_T1[ot][R_EBP]();
2183
        gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2184
        gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2185
    }
2186
}
2187

    
2188
static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2189
{
2190
    if (s->cc_op != CC_OP_DYNAMIC)
2191
        gen_op_set_cc_op(s->cc_op);
2192
    gen_jmp_im(cur_eip);
2193
    gen_op_raise_exception(trapno);
2194
    s->is_jmp = 3;
2195
}
2196

    
2197
/* an interrupt is different from an exception because of the
2198
   priviledge checks */
2199
static void gen_interrupt(DisasContext *s, int intno, 
2200
                          target_ulong cur_eip, target_ulong next_eip)
2201
{
2202
    if (s->cc_op != CC_OP_DYNAMIC)
2203
        gen_op_set_cc_op(s->cc_op);
2204
    gen_jmp_im(cur_eip);
2205
    gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2206
    s->is_jmp = 3;
2207
}
2208

    
2209
static void gen_debug(DisasContext *s, target_ulong cur_eip)
2210
{
2211
    if (s->cc_op != CC_OP_DYNAMIC)
2212
        gen_op_set_cc_op(s->cc_op);
2213
    gen_jmp_im(cur_eip);
2214
    gen_op_debug();
2215
    s->is_jmp = 3;
2216
}
2217

    
2218
/* generate a generic end of block. Trace exception is also generated
2219
   if needed */
2220
static void gen_eob(DisasContext *s)
2221
{
2222
    if (s->cc_op != CC_OP_DYNAMIC)
2223
        gen_op_set_cc_op(s->cc_op);
2224
    if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2225
        gen_op_reset_inhibit_irq();
2226
    }
2227
    if (s->singlestep_enabled) {
2228
        gen_op_debug();
2229
    } else if (s->tf) {
2230
        gen_op_raise_exception(EXCP01_SSTP);
2231
    } else {
2232
        gen_op_movl_T0_0();
2233
        gen_op_exit_tb();
2234
    }
2235
    s->is_jmp = 3;
2236
}
2237

    
2238
/* generate a jump to eip. No segment change must happen before as a
2239
   direct call to the next block may occur */
2240
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2241
{
2242
    if (s->jmp_opt) {
2243
        if (s->cc_op != CC_OP_DYNAMIC) {
2244
            gen_op_set_cc_op(s->cc_op);
2245
            s->cc_op = CC_OP_DYNAMIC;
2246
        }
2247
        gen_goto_tb(s, tb_num, eip);
2248
        s->is_jmp = 3;
2249
    } else {
2250
        gen_jmp_im(eip);
2251
        gen_eob(s);
2252
    }
2253
}
2254

    
2255
static void gen_jmp(DisasContext *s, target_ulong eip)
2256
{
2257
    gen_jmp_tb(s, eip, 0);
2258
}
2259

    
2260
static void gen_movtl_T0_im(target_ulong val)
2261
{
2262
#ifdef TARGET_X86_64    
2263
    if ((int32_t)val == val) {
2264
        gen_op_movl_T0_im(val);
2265
    } else {
2266
        gen_op_movq_T0_im64(val >> 32, val);
2267
    }
2268
#else
2269
    gen_op_movl_T0_im(val);
2270
#endif
2271
}
2272

    
2273
static void gen_movtl_T1_im(target_ulong val)
2274
{
2275
#ifdef TARGET_X86_64    
2276
    if ((int32_t)val == val) {
2277
        gen_op_movl_T1_im(val);
2278
    } else {
2279
        gen_op_movq_T1_im64(val >> 32, val);
2280
    }
2281
#else
2282
    gen_op_movl_T1_im(val);
2283
#endif
2284
}
2285

    
2286
static void gen_add_A0_im(DisasContext *s, int val)
2287
{
2288
#ifdef TARGET_X86_64
2289
    if (CODE64(s))
2290
        gen_op_addq_A0_im(val);
2291
    else
2292
#endif
2293
        gen_op_addl_A0_im(val);
2294
}
2295

    
2296
static GenOpFunc1 *gen_ldq_env_A0[3] = {
2297
    gen_op_ldq_raw_env_A0,
2298
#ifndef CONFIG_USER_ONLY
2299
    gen_op_ldq_kernel_env_A0,
2300
    gen_op_ldq_user_env_A0,
2301
#endif
2302
};
2303

    
2304
static GenOpFunc1 *gen_stq_env_A0[3] = {
2305
    gen_op_stq_raw_env_A0,
2306
#ifndef CONFIG_USER_ONLY
2307
    gen_op_stq_kernel_env_A0,
2308
    gen_op_stq_user_env_A0,
2309
#endif
2310
};
2311

    
2312
static GenOpFunc1 *gen_ldo_env_A0[3] = {
2313
    gen_op_ldo_raw_env_A0,
2314
#ifndef CONFIG_USER_ONLY
2315
    gen_op_ldo_kernel_env_A0,
2316
    gen_op_ldo_user_env_A0,
2317
#endif
2318
};
2319

    
2320
static GenOpFunc1 *gen_sto_env_A0[3] = {
2321
    gen_op_sto_raw_env_A0,
2322
#ifndef CONFIG_USER_ONLY
2323
    gen_op_sto_kernel_env_A0,
2324
    gen_op_sto_user_env_A0,
2325
#endif
2326
};
2327

    
2328
#define SSE_SPECIAL ((GenOpFunc2 *)1)
2329

    
2330
#define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2331
#define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2332
                     gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2333

    
2334
static GenOpFunc2 *sse_op_table1[256][4] = {
2335
    /* pure SSE operations */
2336
    [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2337
    [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2338
    [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2339
    [0x13] = { SSE_SPECIAL, SSE_SPECIAL },  /* movlps, movlpd */
2340
    [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2341
    [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2342
    [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd, movshdup */
2343
    [0x17] = { SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd */
2344

    
2345
    [0x28] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2346
    [0x29] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2347
    [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2348
    [0x2b] = { SSE_SPECIAL, SSE_SPECIAL },  /* movntps, movntpd */
2349
    [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2350
    [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2351
    [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2352
    [0x2f] = { gen_op_comiss, gen_op_comisd },
2353
    [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2354
    [0x51] = SSE_FOP(sqrt),
2355
    [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2356
    [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2357
    [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2358
    [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2359
    [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2360
    [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2361
    [0x58] = SSE_FOP(add),
2362
    [0x59] = SSE_FOP(mul),
2363
    [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps, 
2364
               gen_op_cvtss2sd, gen_op_cvtsd2ss },
2365
    [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2366
    [0x5c] = SSE_FOP(sub),
2367
    [0x5d] = SSE_FOP(min),
2368
    [0x5e] = SSE_FOP(div),
2369
    [0x5f] = SSE_FOP(max),
2370

    
2371
    [0xc2] = SSE_FOP(cmpeq),
2372
    [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2373

    
2374
    /* MMX ops and their SSE extensions */
2375
    [0x60] = MMX_OP2(punpcklbw),
2376
    [0x61] = MMX_OP2(punpcklwd),
2377
    [0x62] = MMX_OP2(punpckldq),
2378
    [0x63] = MMX_OP2(packsswb),
2379
    [0x64] = MMX_OP2(pcmpgtb),
2380
    [0x65] = MMX_OP2(pcmpgtw),
2381
    [0x66] = MMX_OP2(pcmpgtl),
2382
    [0x67] = MMX_OP2(packuswb),
2383
    [0x68] = MMX_OP2(punpckhbw),
2384
    [0x69] = MMX_OP2(punpckhwd),
2385
    [0x6a] = MMX_OP2(punpckhdq),
2386
    [0x6b] = MMX_OP2(packssdw),
2387
    [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2388
    [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2389
    [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2390
    [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2391
    [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx, 
2392
               (GenOpFunc2 *)gen_op_pshufd_xmm, 
2393
               (GenOpFunc2 *)gen_op_pshufhw_xmm, 
2394
               (GenOpFunc2 *)gen_op_pshuflw_xmm },
2395
    [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2396
    [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2397
    [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2398
    [0x74] = MMX_OP2(pcmpeqb),
2399
    [0x75] = MMX_OP2(pcmpeqw),
2400
    [0x76] = MMX_OP2(pcmpeql),
2401
    [0x77] = { SSE_SPECIAL }, /* emms */
2402
    [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2403
    [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2404
    [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2405
    [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2406
    [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2407
    [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2408
    [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2409
    [0xd1] = MMX_OP2(psrlw),
2410
    [0xd2] = MMX_OP2(psrld),
2411
    [0xd3] = MMX_OP2(psrlq),
2412
    [0xd4] = MMX_OP2(paddq),
2413
    [0xd5] = MMX_OP2(pmullw),
2414
    [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2415
    [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2416
    [0xd8] = MMX_OP2(psubusb),
2417
    [0xd9] = MMX_OP2(psubusw),
2418
    [0xda] = MMX_OP2(pminub),
2419
    [0xdb] = MMX_OP2(pand),
2420
    [0xdc] = MMX_OP2(paddusb),
2421
    [0xdd] = MMX_OP2(paddusw),
2422
    [0xde] = MMX_OP2(pmaxub),
2423
    [0xdf] = MMX_OP2(pandn),
2424
    [0xe0] = MMX_OP2(pavgb),
2425
    [0xe1] = MMX_OP2(psraw),
2426
    [0xe2] = MMX_OP2(psrad),
2427
    [0xe3] = MMX_OP2(pavgw),
2428
    [0xe4] = MMX_OP2(pmulhuw),
2429
    [0xe5] = MMX_OP2(pmulhw),
2430
    [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2431
    [0xe7] = { SSE_SPECIAL , SSE_SPECIAL },  /* movntq, movntq */
2432
    [0xe8] = MMX_OP2(psubsb),
2433
    [0xe9] = MMX_OP2(psubsw),
2434
    [0xea] = MMX_OP2(pminsw),
2435
    [0xeb] = MMX_OP2(por),
2436
    [0xec] = MMX_OP2(paddsb),
2437
    [0xed] = MMX_OP2(paddsw),
2438
    [0xee] = MMX_OP2(pmaxsw),
2439
    [0xef] = MMX_OP2(pxor),
2440
    [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2441
    [0xf1] = MMX_OP2(psllw),
2442
    [0xf2] = MMX_OP2(pslld),
2443
    [0xf3] = MMX_OP2(psllq),
2444
    [0xf4] = MMX_OP2(pmuludq),
2445
    [0xf5] = MMX_OP2(pmaddwd),
2446
    [0xf6] = MMX_OP2(psadbw),
2447
    [0xf7] = MMX_OP2(maskmov),
2448
    [0xf8] = MMX_OP2(psubb),
2449
    [0xf9] = MMX_OP2(psubw),
2450
    [0xfa] = MMX_OP2(psubl),
2451
    [0xfb] = MMX_OP2(psubq),
2452
    [0xfc] = MMX_OP2(paddb),
2453
    [0xfd] = MMX_OP2(paddw),
2454
    [0xfe] = MMX_OP2(paddl),
2455
};
2456

    
2457
static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2458
    [0 + 2] = MMX_OP2(psrlw),
2459
    [0 + 4] = MMX_OP2(psraw),
2460
    [0 + 6] = MMX_OP2(psllw),
2461
    [8 + 2] = MMX_OP2(psrld),
2462
    [8 + 4] = MMX_OP2(psrad),
2463
    [8 + 6] = MMX_OP2(pslld),
2464
    [16 + 2] = MMX_OP2(psrlq),
2465
    [16 + 3] = { NULL, gen_op_psrldq_xmm },
2466
    [16 + 6] = MMX_OP2(psllq),
2467
    [16 + 7] = { NULL, gen_op_pslldq_xmm },
2468
};
2469

    
2470
static GenOpFunc1 *sse_op_table3[4 * 3] = {
2471
    gen_op_cvtsi2ss,
2472
    gen_op_cvtsi2sd,
2473
    X86_64_ONLY(gen_op_cvtsq2ss),
2474
    X86_64_ONLY(gen_op_cvtsq2sd),
2475
    
2476
    gen_op_cvttss2si,
2477
    gen_op_cvttsd2si,
2478
    X86_64_ONLY(gen_op_cvttss2sq),
2479
    X86_64_ONLY(gen_op_cvttsd2sq),
2480

    
2481
    gen_op_cvtss2si,
2482
    gen_op_cvtsd2si,
2483
    X86_64_ONLY(gen_op_cvtss2sq),
2484
    X86_64_ONLY(gen_op_cvtsd2sq),
2485
};
2486
    
2487
static GenOpFunc2 *sse_op_table4[8][4] = {
2488
    SSE_FOP(cmpeq),
2489
    SSE_FOP(cmplt),
2490
    SSE_FOP(cmple),
2491
    SSE_FOP(cmpunord),
2492
    SSE_FOP(cmpneq),
2493
    SSE_FOP(cmpnlt),
2494
    SSE_FOP(cmpnle),
2495
    SSE_FOP(cmpord),
2496
};
2497
    
2498
static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2499
{
2500
    int b1, op1_offset, op2_offset, is_xmm, val, ot;
2501
    int modrm, mod, rm, reg, reg_addr, offset_addr;
2502
    GenOpFunc2 *sse_op2;
2503
    GenOpFunc3 *sse_op3;
2504

    
2505
    b &= 0xff;
2506
    if (s->prefix & PREFIX_DATA) 
2507
        b1 = 1;
2508
    else if (s->prefix & PREFIX_REPZ) 
2509
        b1 = 2;
2510
    else if (s->prefix & PREFIX_REPNZ) 
2511
        b1 = 3;
2512
    else
2513
        b1 = 0;
2514
    sse_op2 = sse_op_table1[b][b1];
2515
    if (!sse_op2) 
2516
        goto illegal_op;
2517
    if (b <= 0x5f || b == 0xc6 || b == 0xc2) {
2518
        is_xmm = 1;
2519
    } else {
2520
        if (b1 == 0) {
2521
            /* MMX case */
2522
            is_xmm = 0;
2523
        } else {
2524
            is_xmm = 1;
2525
        }
2526
    }
2527
    /* simple MMX/SSE operation */
2528
    if (s->flags & HF_TS_MASK) {
2529
        gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2530
        return;
2531
    }
2532
    if (s->flags & HF_EM_MASK) {
2533
    illegal_op:
2534
        gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2535
        return;
2536
    }
2537
    if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2538
        goto illegal_op;
2539
    if (b == 0x77) {
2540
        /* emms */
2541
        gen_op_emms();
2542
        return;
2543
    }
2544
    /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2545
       the static cpu state) */
2546
    if (!is_xmm) {
2547
        gen_op_enter_mmx();
2548
    }
2549

    
2550
    modrm = ldub_code(s->pc++);
2551
    reg = ((modrm >> 3) & 7);
2552
    if (is_xmm)
2553
        reg |= rex_r;
2554
    mod = (modrm >> 6) & 3;
2555
    if (sse_op2 == SSE_SPECIAL) {
2556
        b |= (b1 << 8);
2557
        switch(b) {
2558
        case 0x0e7: /* movntq */
2559
            if (mod == 3) 
2560
                goto illegal_op;
2561
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2562
            gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2563
            break;
2564
        case 0x1e7: /* movntdq */
2565
        case 0x02b: /* movntps */
2566
        case 0x12b: /* movntps */
2567
        case 0x3f0: /* lddqu */
2568
            if (mod == 3)
2569
                goto illegal_op;
2570
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2571
            gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2572
            break;
2573
        case 0x6e: /* movd mm, ea */
2574
            gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2575
            gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2576
            break;
2577
        case 0x16e: /* movd xmm, ea */
2578
            gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2579
            gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2580
            break;
2581
        case 0x6f: /* movq mm, ea */
2582
            if (mod != 3) {
2583
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2584
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2585
            } else {
2586
                rm = (modrm & 7);
2587
                gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2588
                            offsetof(CPUX86State,fpregs[rm].mmx));
2589
            }
2590
            break;
2591
        case 0x010: /* movups */
2592
        case 0x110: /* movupd */
2593
        case 0x028: /* movaps */
2594
        case 0x128: /* movapd */
2595
        case 0x16f: /* movdqa xmm, ea */
2596
        case 0x26f: /* movdqu xmm, ea */
2597
            if (mod != 3) {
2598
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2599
                gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2600
            } else {
2601
                rm = (modrm & 7) | REX_B(s);
2602
                gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2603
                            offsetof(CPUX86State,xmm_regs[rm]));
2604
            }
2605
            break;
2606
        case 0x210: /* movss xmm, ea */
2607
            if (mod != 3) {
2608
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2609
                gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2610
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2611
                gen_op_movl_T0_0();
2612
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2613
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2614
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2615
            } else {
2616
                rm = (modrm & 7) | REX_B(s);
2617
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2618
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2619
            }
2620
            break;
2621
        case 0x310: /* movsd xmm, ea */
2622
            if (mod != 3) {
2623
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2624
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2625
                gen_op_movl_T0_0();
2626
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2627
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2628
            } else {
2629
                rm = (modrm & 7) | REX_B(s);
2630
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2631
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2632
            }
2633
            break;
2634
        case 0x012: /* movlps */
2635
        case 0x112: /* movlpd */
2636
            if (mod != 3) {
2637
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2638
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2639
            } else {
2640
                /* movhlps */
2641
                rm = (modrm & 7) | REX_B(s);
2642
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2643
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2644
            }
2645
            break;
2646
        case 0x212: /* movsldup */
2647
            if (mod != 3) {
2648
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2649
                gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2650
            } else {
2651
                rm = (modrm & 7) | REX_B(s);
2652
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2653
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2654
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2655
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2656
            }
2657
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2658
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2659
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2660
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2661
            break;
2662
        case 0x312: /* movddup */
2663
            if (mod != 3) {
2664
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2665
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2666
            } else {
2667
                rm = (modrm & 7) | REX_B(s);
2668
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2669
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2670
            }
2671
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2672
                        offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2673
            break;
2674
        case 0x016: /* movhps */
2675
        case 0x116: /* movhpd */
2676
            if (mod != 3) {
2677
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2678
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2679
            } else {
2680
                /* movlhps */
2681
                rm = (modrm & 7) | REX_B(s);
2682
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2683
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2684
            }
2685
            break;
2686
        case 0x216: /* movshdup */
2687
            if (mod != 3) {
2688
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2689
                gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2690
            } else {
2691
                rm = (modrm & 7) | REX_B(s);
2692
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2693
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2694
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2695
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2696
            }
2697
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2698
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2699
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2700
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2701
            break;
2702
        case 0x7e: /* movd ea, mm */
2703
            gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2704
            gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2705
            break;
2706
        case 0x17e: /* movd ea, xmm */
2707
            gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2708
            gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2709
            break;
2710
        case 0x27e: /* movq xmm, ea */
2711
            if (mod != 3) {
2712
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2713
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2714
            } else {
2715
                rm = (modrm & 7) | REX_B(s);
2716
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2717
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2718
            }
2719
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2720
            break;
2721
        case 0x7f: /* movq ea, mm */
2722
            if (mod != 3) {
2723
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2724
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2725
            } else {
2726
                rm = (modrm & 7);
2727
                gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2728
                            offsetof(CPUX86State,fpregs[reg].mmx));
2729
            }
2730
            break;
2731
        case 0x011: /* movups */
2732
        case 0x111: /* movupd */
2733
        case 0x029: /* movaps */
2734
        case 0x129: /* movapd */
2735
        case 0x17f: /* movdqa ea, xmm */
2736
        case 0x27f: /* movdqu ea, xmm */
2737
            if (mod != 3) {
2738
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2739
                gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2740
            } else {
2741
                rm = (modrm & 7) | REX_B(s);
2742
                gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2743
                            offsetof(CPUX86State,xmm_regs[reg]));
2744
            }
2745
            break;
2746
        case 0x211: /* movss ea, xmm */
2747
            if (mod != 3) {
2748
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2749
                gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2750
                gen_op_st_T0_A0[OT_LONG + s->mem_index]();
2751
            } else {
2752
                rm = (modrm & 7) | REX_B(s);
2753
                gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2754
                            offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2755
            }
2756
            break;
2757
        case 0x311: /* movsd ea, xmm */
2758
            if (mod != 3) {
2759
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2760
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2761
            } else {
2762
                rm = (modrm & 7) | REX_B(s);
2763
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2764
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2765
            }
2766
            break;
2767
        case 0x013: /* movlps */
2768
        case 0x113: /* movlpd */
2769
            if (mod != 3) {
2770
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2771
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2772
            } else {
2773
                goto illegal_op;
2774
            }
2775
            break;
2776
        case 0x017: /* movhps */
2777
        case 0x117: /* movhpd */
2778
            if (mod != 3) {
2779
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2780
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2781
            } else {
2782
                goto illegal_op;
2783
            }
2784
            break;
2785
        case 0x71: /* shift mm, im */
2786
        case 0x72:
2787
        case 0x73:
2788
        case 0x171: /* shift xmm, im */
2789
        case 0x172:
2790
        case 0x173:
2791
            val = ldub_code(s->pc++);
2792
            if (is_xmm) {
2793
                gen_op_movl_T0_im(val);
2794
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2795
                gen_op_movl_T0_0();
2796
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2797
                op1_offset = offsetof(CPUX86State,xmm_t0);
2798
            } else {
2799
                gen_op_movl_T0_im(val);
2800
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2801
                gen_op_movl_T0_0();
2802
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2803
                op1_offset = offsetof(CPUX86State,mmx_t0);
2804
            }
2805
            sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2806
            if (!sse_op2)
2807
                goto illegal_op;
2808
            if (is_xmm) {
2809
                rm = (modrm & 7) | REX_B(s);
2810
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2811
            } else {
2812
                rm = (modrm & 7);
2813
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2814
            }
2815
            sse_op2(op2_offset, op1_offset);
2816
            break;
2817
        case 0x050: /* movmskps */
2818
            rm = (modrm & 7) | REX_B(s);
2819
            gen_op_movmskps(offsetof(CPUX86State,xmm_regs[rm]));
2820
            gen_op_mov_reg_T0[OT_LONG][reg]();
2821
            break;
2822
        case 0x150: /* movmskpd */
2823
            rm = (modrm & 7) | REX_B(s);
2824
            gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[rm]));
2825
            gen_op_mov_reg_T0[OT_LONG][reg]();
2826
            break;
2827
        case 0x02a: /* cvtpi2ps */
2828
        case 0x12a: /* cvtpi2pd */
2829
            gen_op_enter_mmx();
2830
            if (mod != 3) {
2831
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2832
                op2_offset = offsetof(CPUX86State,mmx_t0);
2833
                gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2834
            } else {
2835
                rm = (modrm & 7);
2836
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2837
            }
2838
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2839
            switch(b >> 8) {
2840
            case 0x0:
2841
                gen_op_cvtpi2ps(op1_offset, op2_offset);
2842
                break;
2843
            default:
2844
            case 0x1:
2845
                gen_op_cvtpi2pd(op1_offset, op2_offset);
2846
                break;
2847
            }
2848
            break;
2849
        case 0x22a: /* cvtsi2ss */
2850
        case 0x32a: /* cvtsi2sd */
2851
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2852
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2853
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2854
            sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
2855
            break;
2856
        case 0x02c: /* cvttps2pi */
2857
        case 0x12c: /* cvttpd2pi */
2858
        case 0x02d: /* cvtps2pi */
2859
        case 0x12d: /* cvtpd2pi */
2860
            gen_op_enter_mmx();
2861
            if (mod != 3) {
2862
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2863
                op2_offset = offsetof(CPUX86State,xmm_t0);
2864
                gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
2865
            } else {
2866
                rm = (modrm & 7) | REX_B(s);
2867
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2868
            }
2869
            op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
2870
            switch(b) {
2871
            case 0x02c:
2872
                gen_op_cvttps2pi(op1_offset, op2_offset);
2873
                break;
2874
            case 0x12c:
2875
                gen_op_cvttpd2pi(op1_offset, op2_offset);
2876
                break;
2877
            case 0x02d:
2878
                gen_op_cvtps2pi(op1_offset, op2_offset);
2879
                break;
2880
            case 0x12d:
2881
                gen_op_cvtpd2pi(op1_offset, op2_offset);
2882
                break;
2883
            }
2884
            break;
2885
        case 0x22c: /* cvttss2si */
2886
        case 0x32c: /* cvttsd2si */
2887
        case 0x22d: /* cvtss2si */
2888
        case 0x32d: /* cvtsd2si */
2889
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2890
            if (mod != 3) {
2891
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2892
                if ((b >> 8) & 1) {
2893
                    gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
2894
                } else {
2895
                    gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2896
                    gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2897
                }
2898
                op2_offset = offsetof(CPUX86State,xmm_t0);
2899
            } else {
2900
                rm = (modrm & 7) | REX_B(s);
2901
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2902
            }
2903
            sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 + 
2904
                          (b & 1) * 4](op2_offset);
2905
            gen_op_mov_reg_T0[ot][reg]();
2906
            break;
2907
        case 0xc4: /* pinsrw */
2908
        case 0x1c4: 
2909
            s->rip_offset = 1;
2910
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
2911
            val = ldub_code(s->pc++);
2912
            if (b1) {
2913
                val &= 7;
2914
                gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
2915
            } else {
2916
                val &= 3;
2917
                gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
2918
            }
2919
            break;
2920
        case 0xc5: /* pextrw */
2921
        case 0x1c5: 
2922
            if (mod != 3)
2923
                goto illegal_op;
2924
            val = ldub_code(s->pc++);
2925
            if (b1) {
2926
                val &= 7;
2927
                rm = (modrm & 7) | REX_B(s);
2928
                gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
2929
            } else {
2930
                val &= 3;
2931
                rm = (modrm & 7);
2932
                gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
2933
            }
2934
            reg = ((modrm >> 3) & 7) | rex_r;
2935
            gen_op_mov_reg_T0[OT_LONG][reg]();
2936
            break;
2937
        case 0x1d6: /* movq ea, xmm */
2938
            if (mod != 3) {
2939
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2940
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2941
            } else {
2942
                rm = (modrm & 7) | REX_B(s);
2943
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2944
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2945
                gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2946
            }
2947
            break;
2948
        case 0x2d6: /* movq2dq */
2949
            gen_op_enter_mmx();
2950
            rm = (modrm & 7);
2951
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2952
                        offsetof(CPUX86State,fpregs[rm].mmx));
2953
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2954
            break;
2955
        case 0x3d6: /* movdq2q */
2956
            gen_op_enter_mmx();
2957
            rm = (modrm & 7) | REX_B(s);
2958
            gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
2959
                        offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2960
            break;
2961
        case 0xd7: /* pmovmskb */
2962
        case 0x1d7:
2963
            if (mod != 3)
2964
                goto illegal_op;
2965
            if (b1) {
2966
                rm = (modrm & 7) | REX_B(s);
2967
                gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
2968
            } else {
2969
                rm = (modrm & 7);
2970
                gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
2971
            }
2972
            reg = ((modrm >> 3) & 7) | rex_r;
2973
            gen_op_mov_reg_T0[OT_LONG][reg]();
2974
            break;
2975
        default:
2976
            goto illegal_op;
2977
        }
2978
    } else {
2979
        /* generic MMX or SSE operation */
2980
        switch(b) {
2981
        case 0xf7:
2982
            /* maskmov : we must prepare A0 */
2983
            if (mod != 3) 
2984
                goto illegal_op;
2985
#ifdef TARGET_X86_64
2986
            if (s->aflag == 2) {
2987
                gen_op_movq_A0_reg[R_EDI]();
2988
            } else 
2989
#endif
2990
            {
2991
                gen_op_movl_A0_reg[R_EDI]();
2992
                if (s->aflag == 0)
2993
                    gen_op_andl_A0_ffff();
2994
            }
2995
            gen_add_A0_ds_seg(s);
2996
            break;
2997
        case 0x70: /* pshufx insn */
2998
        case 0xc6: /* pshufx insn */
2999
        case 0xc2: /* compare insns */
3000
            s->rip_offset = 1;
3001
            break;
3002
        default:
3003
            break;
3004
        }
3005
        if (is_xmm) {
3006
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3007
            if (mod != 3) {
3008
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3009
                op2_offset = offsetof(CPUX86State,xmm_t0);
3010
                if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3011
                                b == 0xc2)) {
3012
                    /* specific case for SSE single instructions */
3013
                    if (b1 == 2) {
3014
                        /* 32 bit access */
3015
                        gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
3016
                        gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3017
                    } else {
3018
                        /* 64 bit access */
3019
                        gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3020
                    }
3021
                } else {
3022
                    gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3023
                }
3024
            } else {
3025
                rm = (modrm & 7) | REX_B(s);
3026
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3027
            }
3028
        } else {
3029
            op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3030
            if (mod != 3) {
3031
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3032
                op2_offset = offsetof(CPUX86State,mmx_t0);
3033
                gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
3034
            } else {
3035
                rm = (modrm & 7);
3036
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3037
            }
3038
        }
3039
        switch(b) {
3040
        case 0x70: /* pshufx insn */
3041
        case 0xc6: /* pshufx insn */
3042
            val = ldub_code(s->pc++);
3043
            sse_op3 = (GenOpFunc3 *)sse_op2;
3044
            sse_op3(op1_offset, op2_offset, val);
3045
            break;
3046
        case 0xc2:
3047
            /* compare insns */
3048
            val = ldub_code(s->pc++);
3049
            if (val >= 8)
3050
                goto illegal_op;
3051
            sse_op2 = sse_op_table4[val][b1];
3052
            sse_op2(op1_offset, op2_offset);
3053
            break;
3054
        default:
3055
            sse_op2(op1_offset, op2_offset);
3056
            break;
3057
        }
3058
        if (b == 0x2e || b == 0x2f) {
3059
            s->cc_op = CC_OP_EFLAGS;
3060
        }
3061
    }
3062
}
3063

    
3064

    
3065
/* convert one instruction. s->is_jmp is set if the translation must
3066
   be stopped. Return the next pc value */
3067
static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3068
{
3069
    int b, prefixes, aflag, dflag;
3070
    int shift, ot;
3071
    int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3072
    target_ulong next_eip, tval;
3073
    int rex_w, rex_r;
3074

    
3075
    s->pc = pc_start;
3076
    prefixes = 0;
3077
    aflag = s->code32;
3078
    dflag = s->code32;
3079
    s->override = -1;
3080
    rex_w = -1;
3081
    rex_r = 0;
3082
#ifdef TARGET_X86_64
3083
    s->rex_x = 0;
3084
    s->rex_b = 0;
3085
    x86_64_hregs = 0; 
3086
#endif
3087
    s->rip_offset = 0; /* for relative ip address */
3088
 next_byte:
3089
    b = ldub_code(s->pc);
3090
    s->pc++;
3091
    /* check prefixes */
3092
#ifdef TARGET_X86_64
3093
    if (CODE64(s)) {
3094
        switch (b) {
3095
        case 0xf3:
3096
            prefixes |= PREFIX_REPZ;
3097
            goto next_byte;
3098
        case 0xf2:
3099
            prefixes |= PREFIX_REPNZ;
3100
            goto next_byte;
3101
        case 0xf0:
3102
            prefixes |= PREFIX_LOCK;
3103
            goto next_byte;
3104
        case 0x2e:
3105
            s->override = R_CS;
3106
            goto next_byte;
3107
        case 0x36:
3108
            s->override = R_SS;
3109
            goto next_byte;
3110
        case 0x3e:
3111
            s->override = R_DS;
3112
            goto next_byte;
3113
        case 0x26:
3114
            s->override = R_ES;
3115
            goto next_byte;
3116
        case 0x64:
3117
            s->override = R_FS;
3118
            goto next_byte;
3119
        case 0x65:
3120
            s->override = R_GS;
3121
            goto next_byte;
3122
        case 0x66:
3123
            prefixes |= PREFIX_DATA;
3124
            goto next_byte;
3125
        case 0x67:
3126
            prefixes |= PREFIX_ADR;
3127
            goto next_byte;
3128
        case 0x40 ... 0x4f:
3129
            /* REX prefix */
3130
            rex_w = (b >> 3) & 1;
3131
            rex_r = (b & 0x4) << 1;
3132
            s->rex_x = (b & 0x2) << 2;
3133
            REX_B(s) = (b & 0x1) << 3;
3134
            x86_64_hregs = 1; /* select uniform byte register addressing */
3135
            goto next_byte;
3136
        }
3137
        if (rex_w == 1) {
3138
            /* 0x66 is ignored if rex.w is set */
3139
            dflag = 2;
3140
        } else {
3141
            if (prefixes & PREFIX_DATA)
3142
                dflag ^= 1;
3143
        }
3144
        if (!(prefixes & PREFIX_ADR))
3145
            aflag = 2;
3146
    } else 
3147
#endif
3148
    {
3149
        switch (b) {
3150
        case 0xf3:
3151
            prefixes |= PREFIX_REPZ;
3152
            goto next_byte;
3153
        case 0xf2:
3154
            prefixes |= PREFIX_REPNZ;
3155
            goto next_byte;
3156
        case 0xf0:
3157
            prefixes |= PREFIX_LOCK;
3158
            goto next_byte;
3159
        case 0x2e:
3160
            s->override = R_CS;
3161
            goto next_byte;
3162
        case 0x36:
3163
            s->override = R_SS;
3164
            goto next_byte;
3165
        case 0x3e:
3166
            s->override = R_DS;
3167
            goto next_byte;
3168
        case 0x26:
3169
            s->override = R_ES;
3170
            goto next_byte;
3171
        case 0x64:
3172
            s->override = R_FS;
3173
            goto next_byte;
3174
        case 0x65:
3175
            s->override = R_GS;
3176
            goto next_byte;
3177
        case 0x66:
3178
            prefixes |= PREFIX_DATA;
3179
            goto next_byte;
3180
        case 0x67:
3181
            prefixes |= PREFIX_ADR;
3182
            goto next_byte;
3183
        }
3184
        if (prefixes & PREFIX_DATA)
3185
            dflag ^= 1;
3186
        if (prefixes & PREFIX_ADR)
3187
            aflag ^= 1;
3188
    }
3189

    
3190
    s->prefix = prefixes;
3191
    s->aflag = aflag;
3192
    s->dflag = dflag;
3193

    
3194
    /* lock generation */
3195
    if (prefixes & PREFIX_LOCK)
3196
        gen_op_lock();
3197

    
3198
    /* now check op code */
3199
 reswitch:
3200
    switch(b) {
3201
    case 0x0f:
3202
        /**************************/
3203
        /* extended op code */
3204
        b = ldub_code(s->pc++) | 0x100;
3205
        goto reswitch;
3206
        
3207
        /**************************/
3208
        /* arith & logic */
3209
    case 0x00 ... 0x05:
3210
    case 0x08 ... 0x0d:
3211
    case 0x10 ... 0x15:
3212
    case 0x18 ... 0x1d:
3213
    case 0x20 ... 0x25:
3214
    case 0x28 ... 0x2d:
3215
    case 0x30 ... 0x35:
3216
    case 0x38 ... 0x3d:
3217
        {
3218
            int op, f, val;
3219
            op = (b >> 3) & 7;
3220
            f = (b >> 1) & 3;
3221

    
3222
            if ((b & 1) == 0)
3223
                ot = OT_BYTE;
3224
            else
3225
                ot = dflag + OT_WORD;
3226
            
3227
            switch(f) {
3228
            case 0: /* OP Ev, Gv */
3229
                modrm = ldub_code(s->pc++);
3230
                reg = ((modrm >> 3) & 7) | rex_r;
3231
                mod = (modrm >> 6) & 3;
3232
                rm = (modrm & 7) | REX_B(s);
3233
                if (mod != 3) {
3234
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3235
                    opreg = OR_TMP0;
3236
                } else if (op == OP_XORL && rm == reg) {
3237
                xor_zero:
3238
                    /* xor reg, reg optimisation */
3239
                    gen_op_movl_T0_0();
3240
                    s->cc_op = CC_OP_LOGICB + ot;
3241
                    gen_op_mov_reg_T0[ot][reg]();
3242
                    gen_op_update1_cc();
3243
                    break;
3244
                } else {
3245
                    opreg = rm;
3246
                }
3247
                gen_op_mov_TN_reg[ot][1][reg]();
3248
                gen_op(s, op, ot, opreg);
3249
                break;
3250
            case 1: /* OP Gv, Ev */
3251
                modrm = ldub_code(s->pc++);
3252
                mod = (modrm >> 6) & 3;
3253
                reg = ((modrm >> 3) & 7) | rex_r;
3254
                rm = (modrm & 7) | REX_B(s);
3255
                if (mod != 3) {
3256
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3257
                    gen_op_ld_T1_A0[ot + s->mem_index]();
3258
                } else if (op == OP_XORL && rm == reg) {
3259
                    goto xor_zero;
3260
                } else {
3261
                    gen_op_mov_TN_reg[ot][1][rm]();
3262
                }
3263
                gen_op(s, op, ot, reg);
3264
                break;
3265
            case 2: /* OP A, Iv */
3266
                val = insn_get(s, ot);
3267
                gen_op_movl_T1_im(val);
3268
                gen_op(s, op, ot, OR_EAX);
3269
                break;
3270
            }
3271
        }
3272
        break;
3273

    
3274
    case 0x80: /* GRP1 */
3275
    case 0x81:
3276
    case 0x82:
3277
    case 0x83:
3278
        {
3279
            int val;
3280

    
3281
            if ((b & 1) == 0)
3282
                ot = OT_BYTE;
3283
            else
3284
                ot = dflag + OT_WORD;
3285
            
3286
            modrm = ldub_code(s->pc++);
3287
            mod = (modrm >> 6) & 3;
3288
            rm = (modrm & 7) | REX_B(s);
3289
            op = (modrm >> 3) & 7;
3290
            
3291
            if (mod != 3) {
3292
                if (b == 0x83)
3293
                    s->rip_offset = 1;
3294
                else
3295
                    s->rip_offset = insn_const_size(ot);
3296
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3297
                opreg = OR_TMP0;
3298
            } else {
3299
                opreg = rm;
3300
            }
3301

    
3302
            switch(b) {
3303
            default:
3304
            case 0x80:
3305
            case 0x81:
3306
            case 0x82:
3307
                val = insn_get(s, ot);
3308
                break;
3309
            case 0x83:
3310
                val = (int8_t)insn_get(s, OT_BYTE);
3311
                break;
3312
            }
3313
            gen_op_movl_T1_im(val);
3314
            gen_op(s, op, ot, opreg);
3315
        }
3316
        break;
3317

    
3318
        /**************************/
3319
        /* inc, dec, and other misc arith */
3320
    case 0x40 ... 0x47: /* inc Gv */
3321
        ot = dflag ? OT_LONG : OT_WORD;
3322
        gen_inc(s, ot, OR_EAX + (b & 7), 1);
3323
        break;
3324
    case 0x48 ... 0x4f: /* dec Gv */
3325
        ot = dflag ? OT_LONG : OT_WORD;
3326
        gen_inc(s, ot, OR_EAX + (b & 7), -1);
3327
        break;
3328
    case 0xf6: /* GRP3 */
3329
    case 0xf7:
3330
        if ((b & 1) == 0)
3331
            ot = OT_BYTE;
3332
        else
3333
            ot = dflag + OT_WORD;
3334

    
3335
        modrm = ldub_code(s->pc++);
3336
        mod = (modrm >> 6) & 3;
3337
        rm = (modrm & 7) | REX_B(s);
3338
        op = (modrm >> 3) & 7;
3339
        if (mod != 3) {
3340
            if (op == 0)
3341
                s->rip_offset = insn_const_size(ot);
3342
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3343
            gen_op_ld_T0_A0[ot + s->mem_index]();
3344
        } else {
3345
            gen_op_mov_TN_reg[ot][0][rm]();
3346
        }
3347

    
3348
        switch(op) {
3349
        case 0: /* test */
3350
            val = insn_get(s, ot);
3351
            gen_op_movl_T1_im(val);
3352
            gen_op_testl_T0_T1_cc();
3353
            s->cc_op = CC_OP_LOGICB + ot;
3354
            break;
3355
        case 2: /* not */
3356
            gen_op_notl_T0();
3357
            if (mod != 3) {
3358
                gen_op_st_T0_A0[ot + s->mem_index]();
3359
            } else {
3360
                gen_op_mov_reg_T0[ot][rm]();
3361
            }
3362
            break;
3363
        case 3: /* neg */
3364
            gen_op_negl_T0();
3365
            if (mod != 3) {
3366
                gen_op_st_T0_A0[ot + s->mem_index]();
3367
            } else {
3368
                gen_op_mov_reg_T0[ot][rm]();
3369
            }
3370
            gen_op_update_neg_cc();
3371
            s->cc_op = CC_OP_SUBB + ot;
3372
            break;
3373
        case 4: /* mul */
3374
            switch(ot) {
3375
            case OT_BYTE:
3376
                gen_op_mulb_AL_T0();
3377
                s->cc_op = CC_OP_MULB;
3378
                break;
3379
            case OT_WORD:
3380
                gen_op_mulw_AX_T0();
3381
                s->cc_op = CC_OP_MULW;
3382
                break;
3383
            default:
3384
            case OT_LONG:
3385
                gen_op_mull_EAX_T0();
3386
                s->cc_op = CC_OP_MULL;
3387
                break;
3388
#ifdef TARGET_X86_64
3389
            case OT_QUAD:
3390
                gen_op_mulq_EAX_T0();
3391
                s->cc_op = CC_OP_MULQ;
3392
                break;
3393
#endif
3394
            }
3395
            break;
3396
        case 5: /* imul */
3397
            switch(ot) {
3398
            case OT_BYTE:
3399
                gen_op_imulb_AL_T0();
3400
                s->cc_op = CC_OP_MULB;
3401
                break;
3402
            case OT_WORD:
3403
                gen_op_imulw_AX_T0();
3404
                s->cc_op = CC_OP_MULW;
3405
                break;
3406
            default:
3407
            case OT_LONG:
3408
                gen_op_imull_EAX_T0();
3409
                s->cc_op = CC_OP_MULL;
3410
                break;
3411
#ifdef TARGET_X86_64
3412
            case OT_QUAD:
3413
                gen_op_imulq_EAX_T0();
3414
                s->cc_op = CC_OP_MULQ;
3415
                break;
3416
#endif
3417
            }
3418
            break;
3419
        case 6: /* div */
3420
            switch(ot) {
3421
            case OT_BYTE:
3422
                gen_jmp_im(pc_start - s->cs_base);
3423
                gen_op_divb_AL_T0();
3424
                break;
3425
            case OT_WORD:
3426
                gen_jmp_im(pc_start - s->cs_base);
3427
                gen_op_divw_AX_T0();
3428
                break;
3429
            default:
3430
            case OT_LONG:
3431
                gen_jmp_im(pc_start - s->cs_base);
3432
                gen_op_divl_EAX_T0();
3433
                break;
3434
#ifdef TARGET_X86_64
3435
            case OT_QUAD:
3436
                gen_jmp_im(pc_start - s->cs_base);
3437
                gen_op_divq_EAX_T0();
3438
                break;
3439
#endif
3440
            }
3441
            break;
3442
        case 7: /* idiv */
3443
            switch(ot) {
3444
            case OT_BYTE:
3445
                gen_jmp_im(pc_start - s->cs_base);
3446
                gen_op_idivb_AL_T0();
3447
                break;
3448
            case OT_WORD:
3449
                gen_jmp_im(pc_start - s->cs_base);
3450
                gen_op_idivw_AX_T0();
3451
                break;
3452
            default:
3453
            case OT_LONG:
3454
                gen_jmp_im(pc_start - s->cs_base);
3455
                gen_op_idivl_EAX_T0();
3456
                break;
3457
#ifdef TARGET_X86_64
3458
            case OT_QUAD:
3459
                gen_jmp_im(pc_start - s->cs_base);
3460
                gen_op_idivq_EAX_T0();
3461
                break;
3462
#endif
3463
            }
3464
            break;
3465
        default:
3466
            goto illegal_op;
3467
        }
3468
        break;
3469

    
3470
    case 0xfe: /* GRP4 */
3471
    case 0xff: /* GRP5 */
3472
        if ((b & 1) == 0)
3473
            ot = OT_BYTE;
3474
        else
3475
            ot = dflag + OT_WORD;
3476

    
3477
        modrm = ldub_code(s->pc++);
3478
        mod = (modrm >> 6) & 3;
3479
        rm = (modrm & 7) | REX_B(s);
3480
        op = (modrm >> 3) & 7;
3481
        if (op >= 2 && b == 0xfe) {
3482
            goto illegal_op;
3483
        }
3484
        if (CODE64(s)) {
3485
            if (op == 2 || op == 4) {
3486
                /* operand size for jumps is 64 bit */
3487
                ot = OT_QUAD;
3488
            } else if (op == 3 || op == 5) {
3489
                /* for call calls, the operand is 16 or 32 bit, even
3490
                   in long mode */
3491
                ot = dflag ? OT_LONG : OT_WORD;
3492
            } else if (op == 6) {
3493
                /* default push size is 64 bit */
3494
                ot = dflag ? OT_QUAD : OT_WORD;
3495
            }
3496
        }
3497
        if (mod != 3) {
3498
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3499
            if (op >= 2 && op != 3 && op != 5)
3500
                gen_op_ld_T0_A0[ot + s->mem_index]();
3501
        } else {
3502
            gen_op_mov_TN_reg[ot][0][rm]();
3503
        }
3504

    
3505
        switch(op) {
3506
        case 0: /* inc Ev */
3507
            if (mod != 3)
3508
                opreg = OR_TMP0;
3509
            else
3510
                opreg = rm;
3511
            gen_inc(s, ot, opreg, 1);
3512
            break;
3513
        case 1: /* dec Ev */
3514
            if (mod != 3)
3515
                opreg = OR_TMP0;
3516
            else
3517
                opreg = rm;
3518
            gen_inc(s, ot, opreg, -1);
3519
            break;
3520
        case 2: /* call Ev */
3521
            /* XXX: optimize if memory (no 'and' is necessary) */
3522
            if (s->dflag == 0)
3523
                gen_op_andl_T0_ffff();
3524
            next_eip = s->pc - s->cs_base;
3525
            gen_movtl_T1_im(next_eip);
3526
            gen_push_T1(s);
3527
            gen_op_jmp_T0();
3528
            gen_eob(s);
3529
            break;
3530
        case 3: /* lcall Ev */
3531
            gen_op_ld_T1_A0[ot + s->mem_index]();
3532
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3533
            gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3534
        do_lcall:
3535
            if (s->pe && !s->vm86) {
3536
                if (s->cc_op != CC_OP_DYNAMIC)
3537
                    gen_op_set_cc_op(s->cc_op);
3538
                gen_jmp_im(pc_start - s->cs_base);
3539
                gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3540
            } else {
3541
                gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3542
            }
3543
            gen_eob(s);
3544
            break;
3545
        case 4: /* jmp Ev */
3546
            if (s->dflag == 0)
3547
                gen_op_andl_T0_ffff();
3548
            gen_op_jmp_T0();
3549
            gen_eob(s);
3550
            break;
3551
        case 5: /* ljmp Ev */
3552
            gen_op_ld_T1_A0[ot + s->mem_index]();
3553
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3554
            gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3555
        do_ljmp:
3556
            if (s->pe && !s->vm86) {
3557
                if (s->cc_op != CC_OP_DYNAMIC)
3558
                    gen_op_set_cc_op(s->cc_op);
3559
                gen_jmp_im(pc_start - s->cs_base);
3560
                gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3561
            } else {
3562
                gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3563
                gen_op_movl_T0_T1();
3564
                gen_op_jmp_T0();
3565
            }
3566
            gen_eob(s);
3567
            break;
3568
        case 6: /* push Ev */
3569
            gen_push_T0(s);
3570
            break;
3571
        default:
3572
            goto illegal_op;
3573
        }
3574
        break;
3575

    
3576
    case 0x84: /* test Ev, Gv */
3577
    case 0x85: 
3578
        if ((b & 1) == 0)
3579
            ot = OT_BYTE;
3580
        else
3581
            ot = dflag + OT_WORD;
3582

    
3583
        modrm = ldub_code(s->pc++);
3584
        mod = (modrm >> 6) & 3;
3585
        rm = (modrm & 7) | REX_B(s);
3586
        reg = ((modrm >> 3) & 7) | rex_r;
3587
        
3588
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3589
        gen_op_mov_TN_reg[ot][1][reg]();
3590
        gen_op_testl_T0_T1_cc();
3591
        s->cc_op = CC_OP_LOGICB + ot;
3592
        break;
3593
        
3594
    case 0xa8: /* test eAX, Iv */
3595
    case 0xa9:
3596
        if ((b & 1) == 0)
3597
            ot = OT_BYTE;
3598
        else
3599
            ot = dflag + OT_WORD;
3600
        val = insn_get(s, ot);
3601

    
3602
        gen_op_mov_TN_reg[ot][0][OR_EAX]();
3603
        gen_op_movl_T1_im(val);
3604
        gen_op_testl_T0_T1_cc();
3605
        s->cc_op = CC_OP_LOGICB + ot;
3606
        break;
3607
        
3608
    case 0x98: /* CWDE/CBW */
3609
#ifdef TARGET_X86_64
3610
        if (dflag == 2) {
3611
            gen_op_movslq_RAX_EAX();
3612
        } else
3613
#endif
3614
        if (dflag == 1)
3615
            gen_op_movswl_EAX_AX();
3616
        else
3617
            gen_op_movsbw_AX_AL();
3618
        break;
3619
    case 0x99: /* CDQ/CWD */
3620
#ifdef TARGET_X86_64
3621
        if (dflag == 2) {
3622
            gen_op_movsqo_RDX_RAX();
3623
        } else
3624
#endif
3625
        if (dflag == 1)
3626
            gen_op_movslq_EDX_EAX();
3627
        else
3628
            gen_op_movswl_DX_AX();
3629
        break;
3630
    case 0x1af: /* imul Gv, Ev */
3631
    case 0x69: /* imul Gv, Ev, I */
3632
    case 0x6b:
3633
        ot = dflag + OT_WORD;
3634
        modrm = ldub_code(s->pc++);
3635
        reg = ((modrm >> 3) & 7) | rex_r;
3636
        if (b == 0x69)
3637
            s->rip_offset = insn_const_size(ot);
3638
        else if (b == 0x6b)
3639
            s->rip_offset = 1;
3640
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3641
        if (b == 0x69) {
3642
            val = insn_get(s, ot);
3643
            gen_op_movl_T1_im(val);
3644
        } else if (b == 0x6b) {
3645
            val = (int8_t)insn_get(s, OT_BYTE);
3646
            gen_op_movl_T1_im(val);
3647
        } else {
3648
            gen_op_mov_TN_reg[ot][1][reg]();
3649
        }
3650

    
3651
#ifdef TARGET_X86_64
3652
        if (ot == OT_QUAD) {
3653
            gen_op_imulq_T0_T1();
3654
        } else
3655
#endif
3656
        if (ot == OT_LONG) {
3657
            gen_op_imull_T0_T1();
3658
        } else {
3659
            gen_op_imulw_T0_T1();
3660
        }
3661
        gen_op_mov_reg_T0[ot][reg]();
3662
        s->cc_op = CC_OP_MULB + ot;
3663
        break;
3664
    case 0x1c0:
3665
    case 0x1c1: /* xadd Ev, Gv */
3666
        if ((b & 1) == 0)
3667
            ot = OT_BYTE;
3668
        else
3669
            ot = dflag + OT_WORD;
3670
        modrm = ldub_code(s->pc++);
3671
        reg = ((modrm >> 3) & 7) | rex_r;
3672
        mod = (modrm >> 6) & 3;
3673
        if (mod == 3) {
3674
            rm = (modrm & 7) | REX_B(s);
3675
            gen_op_mov_TN_reg[ot][0][reg]();
3676
            gen_op_mov_TN_reg[ot][1][rm]();
3677
            gen_op_addl_T0_T1();
3678
            gen_op_mov_reg_T1[ot][reg]();
3679
            gen_op_mov_reg_T0[ot][rm]();
3680
        } else {
3681
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3682
            gen_op_mov_TN_reg[ot][0][reg]();
3683
            gen_op_ld_T1_A0[ot + s->mem_index]();
3684
            gen_op_addl_T0_T1();
3685
            gen_op_st_T0_A0[ot + s->mem_index]();
3686
            gen_op_mov_reg_T1[ot][reg]();
3687
        }
3688
        gen_op_update2_cc();
3689
        s->cc_op = CC_OP_ADDB + ot;
3690
        break;
3691
    case 0x1b0:
3692
    case 0x1b1: /* cmpxchg Ev, Gv */
3693
        if ((b & 1) == 0)
3694
            ot = OT_BYTE;
3695
        else
3696
            ot = dflag + OT_WORD;
3697
        modrm = ldub_code(s->pc++);
3698
        reg = ((modrm >> 3) & 7) | rex_r;
3699
        mod = (modrm >> 6) & 3;
3700
        gen_op_mov_TN_reg[ot][1][reg]();
3701
        if (mod == 3) {
3702
            rm = (modrm & 7) | REX_B(s);
3703
            gen_op_mov_TN_reg[ot][0][rm]();
3704
            gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3705
            gen_op_mov_reg_T0[ot][rm]();
3706
        } else {
3707
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3708
            gen_op_ld_T0_A0[ot + s->mem_index]();
3709
            gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3710
        }
3711
        s->cc_op = CC_OP_SUBB + ot;
3712
        break;
3713
    case 0x1c7: /* cmpxchg8b */
3714
        modrm = ldub_code(s->pc++);
3715
        mod = (modrm >> 6) & 3;
3716
        if (mod == 3)
3717
            goto illegal_op;
3718
        if (s->cc_op != CC_OP_DYNAMIC)
3719
            gen_op_set_cc_op(s->cc_op);
3720
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3721
        gen_op_cmpxchg8b();
3722
        s->cc_op = CC_OP_EFLAGS;
3723
        break;
3724
        
3725
        /**************************/
3726
        /* push/pop */
3727
    case 0x50 ... 0x57: /* push */
3728
        gen_op_mov_TN_reg[OT_LONG][0][(b & 7) | REX_B(s)]();
3729
        gen_push_T0(s);
3730
        break;
3731
    case 0x58 ... 0x5f: /* pop */
3732
        if (CODE64(s)) {
3733
            ot = dflag ? OT_QUAD : OT_WORD;
3734
        } else {
3735
            ot = dflag + OT_WORD;
3736
        }
3737
        gen_pop_T0(s);
3738
        /* NOTE: order is important for pop %sp */
3739
        gen_pop_update(s);
3740
        gen_op_mov_reg_T0[ot][(b & 7) | REX_B(s)]();
3741
        break;
3742
    case 0x60: /* pusha */
3743
        if (CODE64(s))
3744
            goto illegal_op;
3745
        gen_pusha(s);
3746
        break;
3747
    case 0x61: /* popa */
3748
        if (CODE64(s))
3749
            goto illegal_op;
3750
        gen_popa(s);
3751
        break;
3752
    case 0x68: /* push Iv */
3753
    case 0x6a:
3754
        if (CODE64(s)) {
3755
            ot = dflag ? OT_QUAD : OT_WORD;
3756
        } else {
3757
            ot = dflag + OT_WORD;
3758
        }
3759
        if (b == 0x68)
3760
            val = insn_get(s, ot);
3761
        else
3762
            val = (int8_t)insn_get(s, OT_BYTE);
3763
        gen_op_movl_T0_im(val);
3764
        gen_push_T0(s);
3765
        break;
3766
    case 0x8f: /* pop Ev */
3767
        if (CODE64(s)) {
3768
            ot = dflag ? OT_QUAD : OT_WORD;
3769
        } else {
3770
            ot = dflag + OT_WORD;
3771
        }
3772
        modrm = ldub_code(s->pc++);
3773
        mod = (modrm >> 6) & 3;
3774
        gen_pop_T0(s);
3775
        if (mod == 3) {
3776
            /* NOTE: order is important for pop %sp */
3777
            gen_pop_update(s);
3778
            rm = (modrm & 7) | REX_B(s);
3779
            gen_op_mov_reg_T0[ot][rm]();
3780
        } else {
3781
            /* NOTE: order is important too for MMU exceptions */
3782
            s->popl_esp_hack = 1 << ot;
3783
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3784
            s->popl_esp_hack = 0;
3785
            gen_pop_update(s);
3786
        }
3787
        break;
3788
    case 0xc8: /* enter */
3789
        {
3790
            int level;
3791
            val = lduw_code(s->pc);
3792
            s->pc += 2;
3793
            level = ldub_code(s->pc++);
3794
            gen_enter(s, val, level);
3795
        }
3796
        break;
3797
    case 0xc9: /* leave */
3798
        /* XXX: exception not precise (ESP is updated before potential exception) */
3799
        if (CODE64(s)) {
3800
            gen_op_mov_TN_reg[OT_QUAD][0][R_EBP]();
3801
            gen_op_mov_reg_T0[OT_QUAD][R_ESP]();
3802
        } else if (s->ss32) {
3803
            gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
3804
            gen_op_mov_reg_T0[OT_LONG][R_ESP]();
3805
        } else {
3806
            gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
3807
            gen_op_mov_reg_T0[OT_WORD][R_ESP]();
3808
        }
3809
        gen_pop_T0(s);
3810
        if (CODE64(s)) {
3811
            ot = dflag ? OT_QUAD : OT_WORD;
3812
        } else {
3813
            ot = dflag + OT_WORD;
3814
        }
3815
        gen_op_mov_reg_T0[ot][R_EBP]();
3816
        gen_pop_update(s);
3817
        break;
3818
    case 0x06: /* push es */
3819
    case 0x0e: /* push cs */
3820
    case 0x16: /* push ss */
3821
    case 0x1e: /* push ds */
3822
        if (CODE64(s))
3823
            goto illegal_op;
3824
        gen_op_movl_T0_seg(b >> 3);
3825
        gen_push_T0(s);
3826
        break;
3827
    case 0x1a0: /* push fs */
3828
    case 0x1a8: /* push gs */
3829
        gen_op_movl_T0_seg((b >> 3) & 7);
3830
        gen_push_T0(s);
3831
        break;
3832
    case 0x07: /* pop es */
3833
    case 0x17: /* pop ss */
3834
    case 0x1f: /* pop ds */
3835
        if (CODE64(s))
3836
            goto illegal_op;
3837
        reg = b >> 3;
3838
        gen_pop_T0(s);
3839
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3840
        gen_pop_update(s);
3841
        if (reg == R_SS) {
3842
            /* if reg == SS, inhibit interrupts/trace. */
3843
            /* If several instructions disable interrupts, only the
3844
               _first_ does it */
3845
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3846
                gen_op_set_inhibit_irq();
3847
            s->tf = 0;
3848
        }
3849
        if (s->is_jmp) {
3850
            gen_jmp_im(s->pc - s->cs_base);
3851
            gen_eob(s);
3852
        }
3853
        break;
3854
    case 0x1a1: /* pop fs */
3855
    case 0x1a9: /* pop gs */
3856
        gen_pop_T0(s);
3857
        gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
3858
        gen_pop_update(s);
3859
        if (s->is_jmp) {
3860
            gen_jmp_im(s->pc - s->cs_base);
3861
            gen_eob(s);
3862
        }
3863
        break;
3864

    
3865
        /**************************/
3866
        /* mov */
3867
    case 0x88:
3868
    case 0x89: /* mov Gv, Ev */
3869
        if ((b & 1) == 0)
3870
            ot = OT_BYTE;
3871
        else
3872
            ot = dflag + OT_WORD;
3873
        modrm = ldub_code(s->pc++);
3874
        reg = ((modrm >> 3) & 7) | rex_r;
3875
        
3876
        /* generate a generic store */
3877
        gen_ldst_modrm(s, modrm, ot, reg, 1);
3878
        break;
3879
    case 0xc6:
3880
    case 0xc7: /* mov Ev, Iv */
3881
        if ((b & 1) == 0)
3882
            ot = OT_BYTE;
3883
        else
3884
            ot = dflag + OT_WORD;
3885
        modrm = ldub_code(s->pc++);
3886
        mod = (modrm >> 6) & 3;
3887
        if (mod != 3) {
3888
            s->rip_offset = insn_const_size(ot);
3889
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3890
        }
3891
        val = insn_get(s, ot);
3892
        gen_op_movl_T0_im(val);
3893
        if (mod != 3)
3894
            gen_op_st_T0_A0[ot + s->mem_index]();
3895
        else
3896
            gen_op_mov_reg_T0[ot][(modrm & 7) | REX_B(s)]();
3897
        break;
3898
    case 0x8a:
3899
    case 0x8b: /* mov Ev, Gv */
3900
        if ((b & 1) == 0)
3901
            ot = OT_BYTE;
3902
        else
3903
            ot = OT_WORD + dflag;
3904
        modrm = ldub_code(s->pc++);
3905
        reg = ((modrm >> 3) & 7) | rex_r;
3906
        
3907
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3908
        gen_op_mov_reg_T0[ot][reg]();
3909
        break;
3910
    case 0x8e: /* mov seg, Gv */
3911
        modrm = ldub_code(s->pc++);
3912
        reg = (modrm >> 3) & 7;
3913
        if (reg >= 6 || reg == R_CS)
3914
            goto illegal_op;
3915
        gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3916
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3917
        if (reg == R_SS) {
3918
            /* if reg == SS, inhibit interrupts/trace */
3919
            /* If several instructions disable interrupts, only the
3920
               _first_ does it */
3921
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3922
                gen_op_set_inhibit_irq();
3923
            s->tf = 0;
3924
        }
3925
        if (s->is_jmp) {
3926
            gen_jmp_im(s->pc - s->cs_base);
3927
            gen_eob(s);
3928
        }
3929
        break;
3930
    case 0x8c: /* mov Gv, seg */
3931
        modrm = ldub_code(s->pc++);
3932
        reg = (modrm >> 3) & 7;
3933
        mod = (modrm >> 6) & 3;
3934
        if (reg >= 6)
3935
            goto illegal_op;
3936
        gen_op_movl_T0_seg(reg);
3937
        if (mod == 3)
3938
            ot = OT_WORD + dflag;
3939
        else
3940
            ot = OT_WORD;
3941
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3942
        break;
3943

    
3944
    case 0x1b6: /* movzbS Gv, Eb */
3945
    case 0x1b7: /* movzwS Gv, Eb */
3946
    case 0x1be: /* movsbS Gv, Eb */
3947
    case 0x1bf: /* movswS Gv, Eb */
3948
        {
3949
            int d_ot;
3950
            /* d_ot is the size of destination */
3951
            d_ot = dflag + OT_WORD;
3952
            /* ot is the size of source */
3953
            ot = (b & 1) + OT_BYTE;
3954
            modrm = ldub_code(s->pc++);
3955
            reg = ((modrm >> 3) & 7) | rex_r;
3956
            mod = (modrm >> 6) & 3;
3957
            rm = (modrm & 7) | REX_B(s);
3958
            
3959
            if (mod == 3) {
3960
                gen_op_mov_TN_reg[ot][0][rm]();
3961
                switch(ot | (b & 8)) {
3962
                case OT_BYTE:
3963
                    gen_op_movzbl_T0_T0();
3964
                    break;
3965
                case OT_BYTE | 8:
3966
                    gen_op_movsbl_T0_T0();
3967
                    break;
3968
                case OT_WORD:
3969
                    gen_op_movzwl_T0_T0();
3970
                    break;
3971
                default:
3972
                case OT_WORD | 8:
3973
                    gen_op_movswl_T0_T0();
3974
                    break;
3975
                }
3976
                gen_op_mov_reg_T0[d_ot][reg]();
3977
            } else {
3978
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3979
                if (b & 8) {
3980
                    gen_op_lds_T0_A0[ot + s->mem_index]();
3981
                } else {
3982
                    gen_op_ldu_T0_A0[ot + s->mem_index]();
3983
                }
3984
                gen_op_mov_reg_T0[d_ot][reg]();
3985
            }
3986
        }
3987
        break;
3988

    
3989
    case 0x8d: /* lea */
3990
        ot = dflag + OT_WORD;
3991
        modrm = ldub_code(s->pc++);
3992
        mod = (modrm >> 6) & 3;
3993
        if (mod == 3)
3994
            goto illegal_op;
3995
        reg = ((modrm >> 3) & 7) | rex_r;
3996
        /* we must ensure that no segment is added */
3997
        s->override = -1;
3998
        val = s->addseg;
3999
        s->addseg = 0;
4000
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4001
        s->addseg = val;
4002
        gen_op_mov_reg_A0[ot - OT_WORD][reg]();
4003
        break;
4004
        
4005
    case 0xa0: /* mov EAX, Ov */
4006
    case 0xa1:
4007
    case 0xa2: /* mov Ov, EAX */
4008
    case 0xa3:
4009
        {
4010
            target_ulong offset_addr;
4011

    
4012
            if ((b & 1) == 0)
4013
                ot = OT_BYTE;
4014
            else
4015
                ot = dflag + OT_WORD;
4016
#ifdef TARGET_X86_64
4017
            if (s->aflag == 2) {
4018
                offset_addr = ldq_code(s->pc);
4019
                s->pc += 8;
4020
                if (offset_addr == (int32_t)offset_addr)
4021
                    gen_op_movq_A0_im(offset_addr);
4022
                else
4023
                    gen_op_movq_A0_im64(offset_addr >> 32, offset_addr);
4024
            } else 
4025
#endif
4026
            {
4027
                if (s->aflag) {
4028
                    offset_addr = insn_get(s, OT_LONG);
4029
                } else {
4030
                    offset_addr = insn_get(s, OT_WORD);
4031
                }
4032
                gen_op_movl_A0_im(offset_addr);
4033
            }
4034
            gen_add_A0_ds_seg(s);
4035
            if ((b & 2) == 0) {
4036
                gen_op_ld_T0_A0[ot + s->mem_index]();
4037
                gen_op_mov_reg_T0[ot][R_EAX]();
4038
            } else {
4039
                gen_op_mov_TN_reg[ot][0][R_EAX]();
4040
                gen_op_st_T0_A0[ot + s->mem_index]();
4041
            }
4042
        }
4043
        break;
4044
    case 0xd7: /* xlat */
4045
#ifdef TARGET_X86_64
4046
        if (s->aflag == 2) {
4047
            gen_op_movq_A0_reg[R_EBX]();
4048
            gen_op_addq_A0_AL();
4049
        } else 
4050
#endif
4051
        {
4052
            gen_op_movl_A0_reg[R_EBX]();
4053
            gen_op_addl_A0_AL();
4054
            if (s->aflag == 0)
4055
                gen_op_andl_A0_ffff();
4056
        }
4057
        gen_add_A0_ds_seg(s);
4058
        gen_op_ldu_T0_A0[OT_BYTE + s->mem_index]();
4059
        gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
4060
        break;
4061
    case 0xb0 ... 0xb7: /* mov R, Ib */
4062
        val = insn_get(s, OT_BYTE);
4063
        gen_op_movl_T0_im(val);
4064
        gen_op_mov_reg_T0[OT_BYTE][(b & 7) | REX_B(s)]();
4065
        break;
4066
    case 0xb8 ... 0xbf: /* mov R, Iv */
4067
#ifdef TARGET_X86_64
4068
        if (dflag == 2) {
4069
            uint64_t tmp;
4070
            /* 64 bit case */
4071
            tmp = ldq_code(s->pc);
4072
            s->pc += 8;
4073
            reg = (b & 7) | REX_B(s);
4074
            gen_movtl_T0_im(tmp);
4075
            gen_op_mov_reg_T0[OT_QUAD][reg]();
4076
        } else 
4077
#endif
4078
        {
4079
            ot = dflag ? OT_LONG : OT_WORD;
4080
            val = insn_get(s, ot);
4081
            reg = (b & 7) | REX_B(s);
4082
            gen_op_movl_T0_im(val);
4083
            gen_op_mov_reg_T0[ot][reg]();
4084
        }
4085
        break;
4086

    
4087
    case 0x91 ... 0x97: /* xchg R, EAX */
4088
        ot = dflag + OT_WORD;
4089
        reg = (b & 7) | REX_B(s);
4090
        rm = R_EAX;
4091
        goto do_xchg_reg;
4092
    case 0x86:
4093
    case 0x87: /* xchg Ev, Gv */
4094
        if ((b & 1) == 0)
4095
            ot = OT_BYTE;
4096
        else
4097
            ot = dflag + OT_WORD;
4098
        modrm = ldub_code(s->pc++);
4099
        reg = ((modrm >> 3) & 7) | rex_r;
4100
        mod = (modrm >> 6) & 3;
4101
        if (mod == 3) {
4102
            rm = (modrm & 7) | REX_B(s);
4103
        do_xchg_reg:
4104
            gen_op_mov_TN_reg[ot][0][reg]();
4105
            gen_op_mov_TN_reg[ot][1][rm]();
4106
            gen_op_mov_reg_T0[ot][rm]();
4107
            gen_op_mov_reg_T1[ot][reg]();
4108
        } else {
4109
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4110
            gen_op_mov_TN_reg[ot][0][reg]();
4111
            /* for xchg, lock is implicit */
4112
            if (!(prefixes & PREFIX_LOCK))
4113
                gen_op_lock();
4114
            gen_op_ld_T1_A0[ot + s->mem_index]();
4115
            gen_op_st_T0_A0[ot + s->mem_index]();
4116
            if (!(prefixes & PREFIX_LOCK))
4117
                gen_op_unlock();
4118
            gen_op_mov_reg_T1[ot][reg]();
4119
        }
4120
        break;
4121
    case 0xc4: /* les Gv */
4122
        if (CODE64(s))
4123
            goto illegal_op;
4124
        op = R_ES;
4125
        goto do_lxx;
4126
    case 0xc5: /* lds Gv */
4127
        if (CODE64(s))
4128
            goto illegal_op;
4129
        op = R_DS;
4130
        goto do_lxx;
4131
    case 0x1b2: /* lss Gv */
4132
        op = R_SS;
4133
        goto do_lxx;
4134
    case 0x1b4: /* lfs Gv */
4135
        op = R_FS;
4136
        goto do_lxx;
4137
    case 0x1b5: /* lgs Gv */
4138
        op = R_GS;
4139
    do_lxx:
4140
        ot = dflag ? OT_LONG : OT_WORD;
4141
        modrm = ldub_code(s->pc++);
4142
        reg = ((modrm >> 3) & 7) | rex_r;
4143
        mod = (modrm >> 6) & 3;
4144
        if (mod == 3)
4145
            goto illegal_op;
4146
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4147
        gen_op_ld_T1_A0[ot + s->mem_index]();
4148
        gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4149
        /* load the segment first to handle exceptions properly */
4150
        gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
4151
        gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4152
        /* then put the data */
4153
        gen_op_mov_reg_T1[ot][reg]();
4154
        if (s->is_jmp) {
4155
            gen_jmp_im(s->pc - s->cs_base);
4156
            gen_eob(s);
4157
        }
4158
        break;
4159
        
4160
        /************************/
4161
        /* shifts */
4162
    case 0xc0:
4163
    case 0xc1:
4164
        /* shift Ev,Ib */
4165
        shift = 2;
4166
    grp2:
4167
        {
4168
            if ((b & 1) == 0)
4169
                ot = OT_BYTE;
4170
            else
4171
                ot = dflag + OT_WORD;
4172
            
4173
            modrm = ldub_code(s->pc++);
4174
            mod = (modrm >> 6) & 3;
4175
            op = (modrm >> 3) & 7;
4176
            
4177
            if (mod != 3) {
4178
                if (shift == 2) {
4179
                    s->rip_offset = 1;
4180
                }
4181
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4182
                opreg = OR_TMP0;
4183
            } else {
4184
                opreg = (modrm & 7) | REX_B(s);
4185
            }
4186

    
4187
            /* simpler op */
4188
            if (shift == 0) {
4189
                gen_shift(s, op, ot, opreg, OR_ECX);
4190
            } else {
4191
                if (shift == 2) {
4192
                    shift = ldub_code(s->pc++);
4193
                }
4194
                gen_shifti(s, op, ot, opreg, shift);
4195
            }
4196
        }
4197
        break;
4198
    case 0xd0:
4199
    case 0xd1:
4200
        /* shift Ev,1 */
4201
        shift = 1;
4202
        goto grp2;
4203
    case 0xd2:
4204
    case 0xd3:
4205
        /* shift Ev,cl */
4206
        shift = 0;
4207
        goto grp2;
4208

    
4209
    case 0x1a4: /* shld imm */
4210
        op = 0;
4211
        shift = 1;
4212
        goto do_shiftd;
4213
    case 0x1a5: /* shld cl */
4214
        op = 0;
4215
        shift = 0;
4216
        goto do_shiftd;
4217
    case 0x1ac: /* shrd imm */
4218
        op = 1;
4219
        shift = 1;
4220
        goto do_shiftd;
4221
    case 0x1ad: /* shrd cl */
4222
        op = 1;
4223
        shift = 0;
4224
    do_shiftd:
4225
        ot = dflag + OT_WORD;
4226
        modrm = ldub_code(s->pc++);
4227
        mod = (modrm >> 6) & 3;
4228
        rm = (modrm & 7) | REX_B(s);
4229
        reg = ((modrm >> 3) & 7) | rex_r;
4230
        
4231
        if (mod != 3) {
4232
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4233
            gen_op_ld_T0_A0[ot + s->mem_index]();
4234
        } else {
4235
            gen_op_mov_TN_reg[ot][0][rm]();
4236
        }
4237
        gen_op_mov_TN_reg[ot][1][reg]();
4238
        
4239
        if (shift) {
4240
            val = ldub_code(s->pc++);
4241
            if (ot == OT_QUAD)
4242
                val &= 0x3f;
4243
            else
4244
                val &= 0x1f;
4245
            if (val) {
4246
                if (mod == 3)
4247
                    gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4248
                else
4249
                    gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4250
                if (op == 0 && ot != OT_WORD)
4251
                    s->cc_op = CC_OP_SHLB + ot;
4252
                else
4253
                    s->cc_op = CC_OP_SARB + ot;
4254
            }
4255
        } else {
4256
            if (s->cc_op != CC_OP_DYNAMIC)
4257
                gen_op_set_cc_op(s->cc_op);
4258
            if (mod == 3)
4259
                gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4260
            else
4261
                gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4262
            s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4263
        }
4264
        if (mod == 3) {
4265
            gen_op_mov_reg_T0[ot][rm]();
4266
        }
4267
        break;
4268

    
4269
        /************************/
4270
        /* floats */
4271
    case 0xd8 ... 0xdf: 
4272
        if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4273
            /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4274
            /* XXX: what to do if illegal op ? */
4275
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4276
            break;
4277
        }
4278
        modrm = ldub_code(s->pc++);
4279
        mod = (modrm >> 6) & 3;
4280
        rm = modrm & 7;
4281
        op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4282
        if (mod != 3) {
4283
            /* memory op */
4284
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4285
            switch(op) {
4286
            case 0x00 ... 0x07: /* fxxxs */
4287
            case 0x10 ... 0x17: /* fixxxl */
4288
            case 0x20 ... 0x27: /* fxxxl */
4289
            case 0x30 ... 0x37: /* fixxx */
4290
                {
4291
                    int op1;
4292
                    op1 = op & 7;
4293

    
4294
                    switch(op >> 4) {
4295
                    case 0:
4296
                        gen_op_flds_FT0_A0();
4297
                        break;
4298
                    case 1:
4299
                        gen_op_fildl_FT0_A0();
4300
                        break;
4301
                    case 2:
4302
                        gen_op_fldl_FT0_A0();
4303
                        break;
4304
                    case 3:
4305
                    default:
4306
                        gen_op_fild_FT0_A0();
4307
                        break;
4308
                    }
4309
                    
4310
                    gen_op_fp_arith_ST0_FT0[op1]();
4311
                    if (op1 == 3) {
4312
                        /* fcomp needs pop */
4313
                        gen_op_fpop();
4314
                    }
4315
                }
4316
                break;
4317
            case 0x08: /* flds */
4318
            case 0x0a: /* fsts */
4319
            case 0x0b: /* fstps */
4320
            case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4321
            case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4322
            case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4323
                switch(op & 7) {
4324
                case 0:
4325
                    switch(op >> 4) {
4326
                    case 0:
4327
                        gen_op_flds_ST0_A0();
4328
                        break;
4329
                    case 1:
4330
                        gen_op_fildl_ST0_A0();
4331
                        break;
4332
                    case 2:
4333
                        gen_op_fldl_ST0_A0();
4334
                        break;
4335
                    case 3:
4336
                    default:
4337
                        gen_op_fild_ST0_A0();
4338
                        break;
4339
                    }
4340
                    break;
4341
                case 1:
4342
                    switch(op >> 4) {
4343
                    case 1:
4344
                        gen_op_fisttl_ST0_A0();
4345
                        break;
4346
                    case 2:
4347
                        gen_op_fisttll_ST0_A0();
4348
                        break;
4349
                    case 3:
4350
                    default:
4351
                        gen_op_fistt_ST0_A0();
4352
                    }
4353
                    gen_op_fpop();
4354
                    break;
4355
                default:
4356
                    switch(op >> 4) {
4357
                    case 0:
4358
                        gen_op_fsts_ST0_A0();
4359
                        break;
4360
                    case 1:
4361
                        gen_op_fistl_ST0_A0();
4362
                        break;
4363
                    case 2:
4364
                        gen_op_fstl_ST0_A0();
4365
                        break;
4366
                    case 3:
4367
                    default:
4368
                        gen_op_fist_ST0_A0();
4369
                        break;
4370
                    }
4371
                    if ((op & 7) == 3)
4372
                        gen_op_fpop();
4373
                    break;
4374
                }
4375
                break;
4376
            case 0x0c: /* fldenv mem */
4377
                gen_op_fldenv_A0(s->dflag);
4378
                break;
4379
            case 0x0d: /* fldcw mem */
4380
                gen_op_fldcw_A0();
4381
                break;
4382
            case 0x0e: /* fnstenv mem */
4383
                gen_op_fnstenv_A0(s->dflag);
4384
                break;
4385
            case 0x0f: /* fnstcw mem */
4386
                gen_op_fnstcw_A0();
4387
                break;
4388
            case 0x1d: /* fldt mem */
4389
                gen_op_fldt_ST0_A0();
4390
                break;
4391
            case 0x1f: /* fstpt mem */
4392
                gen_op_fstt_ST0_A0();
4393
                gen_op_fpop();
4394
                break;
4395
            case 0x2c: /* frstor mem */
4396
                gen_op_frstor_A0(s->dflag);
4397
                break;
4398
            case 0x2e: /* fnsave mem */
4399
                gen_op_fnsave_A0(s->dflag);
4400
                break;
4401
            case 0x2f: /* fnstsw mem */
4402
                gen_op_fnstsw_A0();
4403
                break;
4404
            case 0x3c: /* fbld */
4405
                gen_op_fbld_ST0_A0();
4406
                break;
4407
            case 0x3e: /* fbstp */
4408
                gen_op_fbst_ST0_A0();
4409
                gen_op_fpop();
4410
                break;
4411
            case 0x3d: /* fildll */
4412
                gen_op_fildll_ST0_A0();
4413
                break;
4414
            case 0x3f: /* fistpll */
4415
                gen_op_fistll_ST0_A0();
4416
                gen_op_fpop();
4417
                break;
4418
            default:
4419
                goto illegal_op;
4420
            }
4421
        } else {
4422
            /* register float ops */
4423
            opreg = rm;
4424

    
4425
            switch(op) {
4426
            case 0x08: /* fld sti */
4427
                gen_op_fpush();
4428
                gen_op_fmov_ST0_STN((opreg + 1) & 7);
4429
                break;
4430
            case 0x09: /* fxchg sti */
4431
            case 0x29: /* fxchg4 sti, undocumented op */
4432
            case 0x39: /* fxchg7 sti, undocumented op */
4433
                gen_op_fxchg_ST0_STN(opreg);
4434
                break;
4435
            case 0x0a: /* grp d9/2 */
4436
                switch(rm) {
4437
                case 0: /* fnop */
4438
                    /* check exceptions (FreeBSD FPU probe) */
4439
                    if (s->cc_op != CC_OP_DYNAMIC)
4440
                        gen_op_set_cc_op(s->cc_op);
4441
                    gen_jmp_im(pc_start - s->cs_base);
4442
                    gen_op_fwait();
4443
                    break;
4444
                default:
4445
                    goto illegal_op;
4446
                }
4447
                break;
4448
            case 0x0c: /* grp d9/4 */
4449
                switch(rm) {
4450
                case 0: /* fchs */
4451
                    gen_op_fchs_ST0();
4452
                    break;
4453
                case 1: /* fabs */
4454
                    gen_op_fabs_ST0();
4455
                    break;
4456
                case 4: /* ftst */
4457
                    gen_op_fldz_FT0();
4458
                    gen_op_fcom_ST0_FT0();
4459
                    break;
4460
                case 5: /* fxam */
4461
                    gen_op_fxam_ST0();
4462
                    break;
4463
                default:
4464
                    goto illegal_op;
4465
                }
4466
                break;
4467
            case 0x0d: /* grp d9/5 */
4468
                {
4469
                    switch(rm) {
4470
                    case 0:
4471
                        gen_op_fpush();
4472
                        gen_op_fld1_ST0();
4473
                        break;
4474
                    case 1:
4475
                        gen_op_fpush();
4476
                        gen_op_fldl2t_ST0();
4477
                        break;
4478
                    case 2:
4479
                        gen_op_fpush();
4480
                        gen_op_fldl2e_ST0();
4481
                        break;
4482
                    case 3:
4483
                        gen_op_fpush();
4484
                        gen_op_fldpi_ST0();
4485
                        break;
4486
                    case 4:
4487
                        gen_op_fpush();
4488
                        gen_op_fldlg2_ST0();
4489
                        break;
4490
                    case 5:
4491
                        gen_op_fpush();
4492
                        gen_op_fldln2_ST0();
4493
                        break;
4494
                    case 6:
4495
                        gen_op_fpush();
4496
                        gen_op_fldz_ST0();
4497
                        break;
4498
                    default:
4499
                        goto illegal_op;
4500
                    }
4501
                }
4502
                break;
4503
            case 0x0e: /* grp d9/6 */
4504
                switch(rm) {
4505
                case 0: /* f2xm1 */
4506
                    gen_op_f2xm1();
4507
                    break;
4508
                case 1: /* fyl2x */
4509
                    gen_op_fyl2x();
4510
                    break;
4511
                case 2: /* fptan */
4512
                    gen_op_fptan();
4513
                    break;
4514
                case 3: /* fpatan */
4515
                    gen_op_fpatan();
4516
                    break;
4517
                case 4: /* fxtract */
4518
                    gen_op_fxtract();
4519
                    break;
4520
                case 5: /* fprem1 */
4521
                    gen_op_fprem1();
4522
                    break;
4523
                case 6: /* fdecstp */
4524
                    gen_op_fdecstp();
4525
                    break;
4526
                default:
4527
                case 7: /* fincstp */
4528
                    gen_op_fincstp();
4529
                    break;
4530
                }
4531
                break;
4532
            case 0x0f: /* grp d9/7 */
4533
                switch(rm) {
4534
                case 0: /* fprem */
4535
                    gen_op_fprem();
4536
                    break;
4537
                case 1: /* fyl2xp1 */
4538
                    gen_op_fyl2xp1();
4539
                    break;
4540
                case 2: /* fsqrt */
4541
                    gen_op_fsqrt();
4542
                    break;
4543
                case 3: /* fsincos */
4544
                    gen_op_fsincos();
4545
                    break;
4546
                case 5: /* fscale */
4547
                    gen_op_fscale();
4548
                    break;
4549
                case 4: /* frndint */
4550
                    gen_op_frndint();
4551
                    break;
4552
                case 6: /* fsin */
4553
                    gen_op_fsin();
4554
                    break;
4555
                default:
4556
                case 7: /* fcos */
4557
                    gen_op_fcos();
4558
                    break;
4559
                }
4560
                break;
4561
            case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4562
            case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4563
            case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4564
                {
4565
                    int op1;
4566
                    
4567
                    op1 = op & 7;
4568
                    if (op >= 0x20) {
4569
                        gen_op_fp_arith_STN_ST0[op1](opreg);
4570
                        if (op >= 0x30)
4571
                            gen_op_fpop();
4572
                    } else {
4573
                        gen_op_fmov_FT0_STN(opreg);
4574
                        gen_op_fp_arith_ST0_FT0[op1]();
4575
                    }
4576
                }
4577
                break;
4578
            case 0x02: /* fcom */
4579
            case 0x22: /* fcom2, undocumented op */
4580
                gen_op_fmov_FT0_STN(opreg);
4581
                gen_op_fcom_ST0_FT0();
4582
                break;
4583
            case 0x03: /* fcomp */
4584
            case 0x23: /* fcomp3, undocumented op */
4585
            case 0x32: /* fcomp5, undocumented op */
4586
                gen_op_fmov_FT0_STN(opreg);
4587
                gen_op_fcom_ST0_FT0();
4588
                gen_op_fpop();
4589
                break;
4590
            case 0x15: /* da/5 */
4591
                switch(rm) {
4592
                case 1: /* fucompp */
4593
                    gen_op_fmov_FT0_STN(1);
4594
                    gen_op_fucom_ST0_FT0();
4595
                    gen_op_fpop();
4596
                    gen_op_fpop();
4597
                    break;
4598
                default:
4599
                    goto illegal_op;
4600
                }
4601
                break;
4602
            case 0x1c:
4603
                switch(rm) {
4604
                case 0: /* feni (287 only, just do nop here) */
4605
                    break;
4606
                case 1: /* fdisi (287 only, just do nop here) */
4607
                    break;
4608
                case 2: /* fclex */
4609
                    gen_op_fclex();
4610
                    break;
4611
                case 3: /* fninit */
4612
                    gen_op_fninit();
4613
                    break;
4614
                case 4: /* fsetpm (287 only, just do nop here) */
4615
                    break;
4616
                default:
4617
                    goto illegal_op;
4618
                }
4619
                break;
4620
            case 0x1d: /* fucomi */
4621
                if (s->cc_op != CC_OP_DYNAMIC)
4622
                    gen_op_set_cc_op(s->cc_op);
4623
                gen_op_fmov_FT0_STN(opreg);
4624
                gen_op_fucomi_ST0_FT0();
4625
                s->cc_op = CC_OP_EFLAGS;
4626
                break;
4627
            case 0x1e: /* fcomi */
4628
                if (s->cc_op != CC_OP_DYNAMIC)
4629
                    gen_op_set_cc_op(s->cc_op);
4630
                gen_op_fmov_FT0_STN(opreg);
4631
                gen_op_fcomi_ST0_FT0();
4632
                s->cc_op = CC_OP_EFLAGS;
4633
                break;
4634
            case 0x28: /* ffree sti */
4635
                gen_op_ffree_STN(opreg);
4636
                break; 
4637
            case 0x2a: /* fst sti */
4638
                gen_op_fmov_STN_ST0(opreg);
4639
                break;
4640
            case 0x2b: /* fstp sti */
4641
            case 0x0b: /* fstp1 sti, undocumented op */
4642
            case 0x3a: /* fstp8 sti, undocumented op */
4643
            case 0x3b: /* fstp9 sti, undocumented op */
4644
                gen_op_fmov_STN_ST0(opreg);
4645
                gen_op_fpop();
4646
                break;
4647
            case 0x2c: /* fucom st(i) */
4648
                gen_op_fmov_FT0_STN(opreg);
4649
                gen_op_fucom_ST0_FT0();
4650
                break;
4651
            case 0x2d: /* fucomp st(i) */
4652
                gen_op_fmov_FT0_STN(opreg);
4653
                gen_op_fucom_ST0_FT0();
4654
                gen_op_fpop();
4655
                break;
4656
            case 0x33: /* de/3 */
4657
                switch(rm) {
4658
                case 1: /* fcompp */
4659
                    gen_op_fmov_FT0_STN(1);
4660
                    gen_op_fcom_ST0_FT0();
4661
                    gen_op_fpop();
4662
                    gen_op_fpop();
4663
                    break;
4664
                default:
4665
                    goto illegal_op;
4666
                }
4667
                break;
4668
            case 0x38: /* ffreep sti, undocumented op */
4669
                gen_op_ffree_STN(opreg);
4670
                gen_op_fpop();
4671
                break;
4672
            case 0x3c: /* df/4 */
4673
                switch(rm) {
4674
                case 0:
4675
                    gen_op_fnstsw_EAX();
4676
                    break;
4677
                default:
4678
                    goto illegal_op;
4679
                }
4680
                break;
4681
            case 0x3d: /* fucomip */
4682
                if (s->cc_op != CC_OP_DYNAMIC)
4683
                    gen_op_set_cc_op(s->cc_op);
4684
                gen_op_fmov_FT0_STN(opreg);
4685
                gen_op_fucomi_ST0_FT0();
4686
                gen_op_fpop();
4687
                s->cc_op = CC_OP_EFLAGS;
4688
                break;
4689
            case 0x3e: /* fcomip */
4690
                if (s->cc_op != CC_OP_DYNAMIC)
4691
                    gen_op_set_cc_op(s->cc_op);
4692
                gen_op_fmov_FT0_STN(opreg);
4693
                gen_op_fcomi_ST0_FT0();
4694
                gen_op_fpop();
4695
                s->cc_op = CC_OP_EFLAGS;
4696
                break;
4697
            case 0x10 ... 0x13: /* fcmovxx */
4698
            case 0x18 ... 0x1b:
4699
                {
4700
                    int op1;
4701
                    const static uint8_t fcmov_cc[8] = {
4702
                        (JCC_B << 1),
4703
                        (JCC_Z << 1),
4704
                        (JCC_BE << 1),
4705
                        (JCC_P << 1),
4706
                    };
4707
                    op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
4708
                    gen_setcc(s, op1);
4709
                    gen_op_fcmov_ST0_STN_T0(opreg);
4710
                }
4711
                break;
4712
            default:
4713
                goto illegal_op;
4714
            }
4715
        }
4716
#ifdef USE_CODE_COPY
4717
        s->tb->cflags |= CF_TB_FP_USED;
4718
#endif
4719
        break;
4720
        /************************/
4721
        /* string ops */
4722

    
4723
    case 0xa4: /* movsS */
4724
    case 0xa5:
4725
        if ((b & 1) == 0)
4726
            ot = OT_BYTE;
4727
        else
4728
            ot = dflag + OT_WORD;
4729

    
4730
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4731
            gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4732
        } else {
4733
            gen_movs(s, ot);
4734
        }
4735
        break;
4736
        
4737
    case 0xaa: /* stosS */
4738
    case 0xab:
4739
        if ((b & 1) == 0)
4740
            ot = OT_BYTE;
4741
        else
4742
            ot = dflag + OT_WORD;
4743

    
4744
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4745
            gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4746
        } else {
4747
            gen_stos(s, ot);
4748
        }
4749
        break;
4750
    case 0xac: /* lodsS */
4751
    case 0xad:
4752
        if ((b & 1) == 0)
4753
            ot = OT_BYTE;
4754
        else
4755
            ot = dflag + OT_WORD;
4756
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4757
            gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4758
        } else {
4759
            gen_lods(s, ot);
4760
        }
4761
        break;
4762
    case 0xae: /* scasS */
4763
    case 0xaf:
4764
        if ((b & 1) == 0)
4765
            ot = OT_BYTE;
4766
        else
4767
            ot = dflag + OT_WORD;
4768
        if (prefixes & PREFIX_REPNZ) {
4769
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4770
        } else if (prefixes & PREFIX_REPZ) {
4771
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4772
        } else {
4773
            gen_scas(s, ot);
4774
            s->cc_op = CC_OP_SUBB + ot;
4775
        }
4776
        break;
4777

    
4778
    case 0xa6: /* cmpsS */
4779
    case 0xa7:
4780
        if ((b & 1) == 0)
4781
            ot = OT_BYTE;
4782
        else
4783
            ot = dflag + OT_WORD;
4784
        if (prefixes & PREFIX_REPNZ) {
4785
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4786
        } else if (prefixes & PREFIX_REPZ) {
4787
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4788
        } else {
4789
            gen_cmps(s, ot);
4790
            s->cc_op = CC_OP_SUBB + ot;
4791
        }
4792
        break;
4793
    case 0x6c: /* insS */
4794
    case 0x6d:
4795
        if ((b & 1) == 0)
4796
            ot = OT_BYTE;
4797
        else
4798
            ot = dflag ? OT_LONG : OT_WORD;
4799
        gen_check_io(s, ot, 1, pc_start - s->cs_base);
4800
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4801
            gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4802
        } else {
4803
            gen_ins(s, ot);
4804
        }
4805
        break;
4806
    case 0x6e: /* outsS */
4807
    case 0x6f:
4808
        if ((b & 1) == 0)
4809
            ot = OT_BYTE;
4810
        else
4811
            ot = dflag ? OT_LONG : OT_WORD;
4812
        gen_check_io(s, ot, 1, pc_start - s->cs_base);
4813
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4814
            gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4815
        } else {
4816
            gen_outs(s, ot);
4817
        }
4818
        break;
4819

    
4820
        /************************/
4821
        /* port I/O */
4822
    case 0xe4:
4823
    case 0xe5:
4824
        if ((b & 1) == 0)
4825
            ot = OT_BYTE;
4826
        else
4827
            ot = dflag ? OT_LONG : OT_WORD;
4828
        val = ldub_code(s->pc++);
4829
        gen_op_movl_T0_im(val);
4830
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
4831
        gen_op_in[ot]();
4832
        gen_op_mov_reg_T1[ot][R_EAX]();
4833
        break;
4834
    case 0xe6:
4835
    case 0xe7:
4836
        if ((b & 1) == 0)
4837
            ot = OT_BYTE;
4838
        else
4839
            ot = dflag ? OT_LONG : OT_WORD;
4840
        val = ldub_code(s->pc++);
4841
        gen_op_movl_T0_im(val);
4842
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
4843
        gen_op_mov_TN_reg[ot][1][R_EAX]();
4844
        gen_op_out[ot]();
4845
        break;
4846
    case 0xec:
4847
    case 0xed:
4848
        if ((b & 1) == 0)
4849
            ot = OT_BYTE;
4850
        else
4851
            ot = dflag ? OT_LONG : OT_WORD;
4852
        gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4853
        gen_op_andl_T0_ffff();
4854
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
4855
        gen_op_in[ot]();
4856
        gen_op_mov_reg_T1[ot][R_EAX]();
4857
        break;
4858
    case 0xee:
4859
    case 0xef:
4860
        if ((b & 1) == 0)
4861
            ot = OT_BYTE;
4862
        else
4863
            ot = dflag ? OT_LONG : OT_WORD;
4864
        gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4865
        gen_op_andl_T0_ffff();
4866
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
4867
        gen_op_mov_TN_reg[ot][1][R_EAX]();
4868
        gen_op_out[ot]();
4869
        break;
4870

    
4871
        /************************/
4872
        /* control */
4873
    case 0xc2: /* ret im */
4874
        val = ldsw_code(s->pc);
4875
        s->pc += 2;
4876
        gen_pop_T0(s);
4877
        if (CODE64(s) && s->dflag)
4878
            s->dflag = 2;
4879
        gen_stack_update(s, val + (2 << s->dflag));
4880
        if (s->dflag == 0)
4881
            gen_op_andl_T0_ffff();
4882
        gen_op_jmp_T0();
4883
        gen_eob(s);
4884
        break;
4885
    case 0xc3: /* ret */
4886
        gen_pop_T0(s);
4887
        gen_pop_update(s);
4888
        if (s->dflag == 0)
4889
            gen_op_andl_T0_ffff();
4890
        gen_op_jmp_T0();
4891
        gen_eob(s);
4892
        break;
4893
    case 0xca: /* lret im */
4894
        val = ldsw_code(s->pc);
4895
        s->pc += 2;
4896
    do_lret:
4897
        if (s->pe && !s->vm86) {
4898
            if (s->cc_op != CC_OP_DYNAMIC)
4899
                gen_op_set_cc_op(s->cc_op);
4900
            gen_jmp_im(pc_start - s->cs_base);
4901
            gen_op_lret_protected(s->dflag, val);
4902
        } else {
4903
            gen_stack_A0(s);
4904
            /* pop offset */
4905
            gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4906
            if (s->dflag == 0)
4907
                gen_op_andl_T0_ffff();
4908
            /* NOTE: keeping EIP updated is not a problem in case of
4909
               exception */
4910
            gen_op_jmp_T0();
4911
            /* pop selector */
4912
            gen_op_addl_A0_im(2 << s->dflag);
4913
            gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4914
            gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
4915
            /* add stack offset */
4916
            gen_stack_update(s, val + (4 << s->dflag));
4917
        }
4918
        gen_eob(s);
4919
        break;
4920
    case 0xcb: /* lret */
4921
        val = 0;
4922
        goto do_lret;
4923
    case 0xcf: /* iret */
4924
        if (!s->pe) {
4925
            /* real mode */
4926
            gen_op_iret_real(s->dflag);
4927
            s->cc_op = CC_OP_EFLAGS;
4928
        } else if (s->vm86) {
4929
            if (s->iopl != 3) {
4930
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4931
            } else {
4932
                gen_op_iret_real(s->dflag);
4933
                s->cc_op = CC_OP_EFLAGS;
4934
            }
4935
        } else {
4936
            if (s->cc_op != CC_OP_DYNAMIC)
4937
                gen_op_set_cc_op(s->cc_op);
4938
            gen_jmp_im(pc_start - s->cs_base);
4939
            gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
4940
            s->cc_op = CC_OP_EFLAGS;
4941
        }
4942
        gen_eob(s);
4943
        break;
4944
    case 0xe8: /* call im */
4945
        {
4946
            if (dflag)
4947
                tval = (int32_t)insn_get(s, OT_LONG);
4948
            else
4949
                tval = (int16_t)insn_get(s, OT_WORD);
4950
            next_eip = s->pc - s->cs_base;
4951
            tval += next_eip;
4952
            if (s->dflag == 0)
4953
                tval &= 0xffff;
4954
            gen_movtl_T0_im(next_eip);
4955
            gen_push_T0(s);
4956
            gen_jmp(s, tval);
4957
        }
4958
        break;
4959
    case 0x9a: /* lcall im */
4960
        {
4961
            unsigned int selector, offset;
4962
            
4963
            if (CODE64(s))
4964
                goto illegal_op;
4965
            ot = dflag ? OT_LONG : OT_WORD;
4966
            offset = insn_get(s, ot);
4967
            selector = insn_get(s, OT_WORD);
4968
            
4969
            gen_op_movl_T0_im(selector);
4970
            gen_op_movl_T1_imu(offset);
4971
        }
4972
        goto do_lcall;
4973
    case 0xe9: /* jmp im */
4974
        if (dflag)
4975
            tval = (int32_t)insn_get(s, OT_LONG);
4976
        else
4977
            tval = (int16_t)insn_get(s, OT_WORD);
4978
        tval += s->pc - s->cs_base;
4979
        if (s->dflag == 0)
4980
            tval &= 0xffff;
4981
        gen_jmp(s, tval);
4982
        break;
4983
    case 0xea: /* ljmp im */
4984
        {
4985
            unsigned int selector, offset;
4986

    
4987
            if (CODE64(s))
4988
                goto illegal_op;
4989
            ot = dflag ? OT_LONG : OT_WORD;
4990
            offset = insn_get(s, ot);
4991
            selector = insn_get(s, OT_WORD);
4992
            
4993
            gen_op_movl_T0_im(selector);
4994
            gen_op_movl_T1_imu(offset);
4995
        }
4996
        goto do_ljmp;
4997
    case 0xeb: /* jmp Jb */
4998
        tval = (int8_t)insn_get(s, OT_BYTE);