Statistics
| Branch: | Revision:

root / target-i386 / translate.c @ a8ede8ba

History | View | Annotate | Download (160.8 kB)

1
/*
2
 *  i386 translation
3
 * 
4
 *  Copyright (c) 2003 Fabrice Bellard
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20
#include <stdarg.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23
#include <string.h>
24
#include <inttypes.h>
25
#include <signal.h>
26
#include <assert.h>
27

    
28
#include "cpu.h"
29
#include "exec-all.h"
30
#include "disas.h"
31

    
32
/* XXX: move that elsewhere */
33
static uint16_t *gen_opc_ptr;
34
static uint32_t *gen_opparam_ptr;
35

    
36
#define PREFIX_REPZ   0x01
37
#define PREFIX_REPNZ  0x02
38
#define PREFIX_LOCK   0x04
39
#define PREFIX_DATA   0x08
40
#define PREFIX_ADR    0x10
41

    
42
#ifdef TARGET_X86_64
43
#define X86_64_ONLY(x) x
44
#define X86_64_DEF(x...) x
45
#define CODE64(s) ((s)->code64)
46
#define REX_X(s) ((s)->rex_x)
47
#define REX_B(s) ((s)->rex_b)
48
/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
49
#if 1
50
#define BUGGY_64(x) NULL
51
#endif
52
#else
53
#define X86_64_ONLY(x) NULL
54
#define X86_64_DEF(x...)
55
#define CODE64(s) 0
56
#define REX_X(s) 0
57
#define REX_B(s) 0
58
#endif
59

    
60
#ifdef TARGET_X86_64
61
static int x86_64_hregs;
62
#endif
63

    
64
typedef struct DisasContext {
65
    /* current insn context */
66
    int override; /* -1 if no override */
67
    int prefix;
68
    int aflag, dflag;
69
    target_ulong pc; /* pc = eip + cs_base */
70
    int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
71
                   static state change (stop translation) */
72
    /* current block context */
73
    target_ulong cs_base; /* base of CS segment */
74
    int pe;     /* protected mode */
75
    int code32; /* 32 bit code segment */
76
#ifdef TARGET_X86_64
77
    int lma;    /* long mode active */
78
    int code64; /* 64 bit code segment */
79
    int rex_x, rex_b;
80
#endif
81
    int ss32;   /* 32 bit stack segment */
82
    int cc_op;  /* current CC operation */
83
    int addseg; /* non zero if either DS/ES/SS have a non zero base */
84
    int f_st;   /* currently unused */
85
    int vm86;   /* vm86 mode */
86
    int cpl;
87
    int iopl;
88
    int tf;     /* TF cpu flag */
89
    int singlestep_enabled; /* "hardware" single step enabled */
90
    int jmp_opt; /* use direct block chaining for direct jumps */
91
    int mem_index; /* select memory access functions */
92
    int flags; /* all execution flags */
93
    struct TranslationBlock *tb;
94
    int popl_esp_hack; /* for correct popl with esp base handling */
95
    int rip_offset; /* only used in x86_64, but left for simplicity */
96
    int cpuid_features;
97
} DisasContext;
98

    
99
static void gen_eob(DisasContext *s);
100
static void gen_jmp(DisasContext *s, target_ulong eip);
101
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
102

    
103
/* i386 arith/logic operations */
104
enum {
105
    OP_ADDL, 
106
    OP_ORL, 
107
    OP_ADCL, 
108
    OP_SBBL,
109
    OP_ANDL, 
110
    OP_SUBL, 
111
    OP_XORL, 
112
    OP_CMPL,
113
};
114

    
115
/* i386 shift ops */
116
enum {
117
    OP_ROL, 
118
    OP_ROR, 
119
    OP_RCL, 
120
    OP_RCR, 
121
    OP_SHL, 
122
    OP_SHR, 
123
    OP_SHL1, /* undocumented */
124
    OP_SAR = 7,
125
};
126

    
127
enum {
128
#define DEF(s, n, copy_size) INDEX_op_ ## s,
129
#include "opc.h"
130
#undef DEF
131
    NB_OPS,
132
};
133

    
134
#include "gen-op.h"
135

    
136
/* operand size */
137
enum {
138
    OT_BYTE = 0,
139
    OT_WORD,
140
    OT_LONG, 
141
    OT_QUAD,
142
};
143

    
144
enum {
145
    /* I386 int registers */
146
    OR_EAX,   /* MUST be even numbered */
147
    OR_ECX,
148
    OR_EDX,
149
    OR_EBX,
150
    OR_ESP,
151
    OR_EBP,
152
    OR_ESI,
153
    OR_EDI,
154

    
155
    OR_TMP0 = 16,    /* temporary operand register */
156
    OR_TMP1,
157
    OR_A0, /* temporary register used when doing address evaluation */
158
};
159

    
160
#ifdef TARGET_X86_64
161

    
162
#define NB_OP_SIZES 4
163

    
164
#define DEF_REGS(prefix, suffix) \
165
  prefix ## EAX ## suffix,\
166
  prefix ## ECX ## suffix,\
167
  prefix ## EDX ## suffix,\
168
  prefix ## EBX ## suffix,\
169
  prefix ## ESP ## suffix,\
170
  prefix ## EBP ## suffix,\
171
  prefix ## ESI ## suffix,\
172
  prefix ## EDI ## suffix,\
173
  prefix ## R8 ## suffix,\
174
  prefix ## R9 ## suffix,\
175
  prefix ## R10 ## suffix,\
176
  prefix ## R11 ## suffix,\
177
  prefix ## R12 ## suffix,\
178
  prefix ## R13 ## suffix,\
179
  prefix ## R14 ## suffix,\
180
  prefix ## R15 ## suffix,
181

    
182
#define DEF_BREGS(prefixb, prefixh, suffix)             \
183
                                                        \
184
static void prefixb ## ESP ## suffix ## _wrapper(void)  \
185
{                                                       \
186
    if (x86_64_hregs)                                 \
187
        prefixb ## ESP ## suffix ();                    \
188
    else                                                \
189
        prefixh ## EAX ## suffix ();                    \
190
}                                                       \
191
                                                        \
192
static void prefixb ## EBP ## suffix ## _wrapper(void)  \
193
{                                                       \
194
    if (x86_64_hregs)                                 \
195
        prefixb ## EBP ## suffix ();                    \
196
    else                                                \
197
        prefixh ## ECX ## suffix ();                    \
198
}                                                       \
199
                                                        \
200
static void prefixb ## ESI ## suffix ## _wrapper(void)  \
201
{                                                       \
202
    if (x86_64_hregs)                                 \
203
        prefixb ## ESI ## suffix ();                    \
204
    else                                                \
205
        prefixh ## EDX ## suffix ();                    \
206
}                                                       \
207
                                                        \
208
static void prefixb ## EDI ## suffix ## _wrapper(void)  \
209
{                                                       \
210
    if (x86_64_hregs)                                 \
211
        prefixb ## EDI ## suffix ();                    \
212
    else                                                \
213
        prefixh ## EBX ## suffix ();                    \
214
}
215

    
216
DEF_BREGS(gen_op_movb_, gen_op_movh_, _T0)
217
DEF_BREGS(gen_op_movb_, gen_op_movh_, _T1)
218
DEF_BREGS(gen_op_movl_T0_, gen_op_movh_T0_, )
219
DEF_BREGS(gen_op_movl_T1_, gen_op_movh_T1_, )
220

    
221
#else /* !TARGET_X86_64 */
222

    
223
#define NB_OP_SIZES 3
224

    
225
#define DEF_REGS(prefix, suffix) \
226
  prefix ## EAX ## suffix,\
227
  prefix ## ECX ## suffix,\
228
  prefix ## EDX ## suffix,\
229
  prefix ## EBX ## suffix,\
230
  prefix ## ESP ## suffix,\
231
  prefix ## EBP ## suffix,\
232
  prefix ## ESI ## suffix,\
233
  prefix ## EDI ## suffix,
234

    
235
#endif /* !TARGET_X86_64 */
236

    
237
static GenOpFunc *gen_op_mov_reg_T0[NB_OP_SIZES][CPU_NB_REGS] = {
238
    [OT_BYTE] = {
239
        gen_op_movb_EAX_T0,
240
        gen_op_movb_ECX_T0,
241
        gen_op_movb_EDX_T0,
242
        gen_op_movb_EBX_T0,
243
#ifdef TARGET_X86_64
244
        gen_op_movb_ESP_T0_wrapper,
245
        gen_op_movb_EBP_T0_wrapper,
246
        gen_op_movb_ESI_T0_wrapper,
247
        gen_op_movb_EDI_T0_wrapper,
248
        gen_op_movb_R8_T0,
249
        gen_op_movb_R9_T0,
250
        gen_op_movb_R10_T0,
251
        gen_op_movb_R11_T0,
252
        gen_op_movb_R12_T0,
253
        gen_op_movb_R13_T0,
254
        gen_op_movb_R14_T0,
255
        gen_op_movb_R15_T0,
256
#else
257
        gen_op_movh_EAX_T0,
258
        gen_op_movh_ECX_T0,
259
        gen_op_movh_EDX_T0,
260
        gen_op_movh_EBX_T0,
261
#endif
262
    },
263
    [OT_WORD] = {
264
        DEF_REGS(gen_op_movw_, _T0)
265
    },
266
    [OT_LONG] = {
267
        DEF_REGS(gen_op_movl_, _T0)
268
    },
269
#ifdef TARGET_X86_64
270
    [OT_QUAD] = {
271
        DEF_REGS(gen_op_movq_, _T0)
272
    },
273
#endif
274
};
275

    
276
static GenOpFunc *gen_op_mov_reg_T1[NB_OP_SIZES][CPU_NB_REGS] = {
277
    [OT_BYTE] = {
278
        gen_op_movb_EAX_T1,
279
        gen_op_movb_ECX_T1,
280
        gen_op_movb_EDX_T1,
281
        gen_op_movb_EBX_T1,
282
#ifdef TARGET_X86_64
283
        gen_op_movb_ESP_T1_wrapper,
284
        gen_op_movb_EBP_T1_wrapper,
285
        gen_op_movb_ESI_T1_wrapper,
286
        gen_op_movb_EDI_T1_wrapper,
287
        gen_op_movb_R8_T1,
288
        gen_op_movb_R9_T1,
289
        gen_op_movb_R10_T1,
290
        gen_op_movb_R11_T1,
291
        gen_op_movb_R12_T1,
292
        gen_op_movb_R13_T1,
293
        gen_op_movb_R14_T1,
294
        gen_op_movb_R15_T1,
295
#else
296
        gen_op_movh_EAX_T1,
297
        gen_op_movh_ECX_T1,
298
        gen_op_movh_EDX_T1,
299
        gen_op_movh_EBX_T1,
300
#endif
301
    },
302
    [OT_WORD] = {
303
        DEF_REGS(gen_op_movw_, _T1)
304
    },
305
    [OT_LONG] = {
306
        DEF_REGS(gen_op_movl_, _T1)
307
    },
308
#ifdef TARGET_X86_64
309
    [OT_QUAD] = {
310
        DEF_REGS(gen_op_movq_, _T1)
311
    },
312
#endif
313
};
314

    
315
static GenOpFunc *gen_op_mov_reg_A0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
316
    [0] = {
317
        DEF_REGS(gen_op_movw_, _A0)
318
    },
319
    [1] = {
320
        DEF_REGS(gen_op_movl_, _A0)
321
    },
322
#ifdef TARGET_X86_64
323
    [2] = {
324
        DEF_REGS(gen_op_movq_, _A0)
325
    },
326
#endif
327
};
328

    
329
static GenOpFunc *gen_op_mov_TN_reg[NB_OP_SIZES][2][CPU_NB_REGS] = 
330
{
331
    [OT_BYTE] = {
332
        {
333
            gen_op_movl_T0_EAX,
334
            gen_op_movl_T0_ECX,
335
            gen_op_movl_T0_EDX,
336
            gen_op_movl_T0_EBX,
337
#ifdef TARGET_X86_64
338
            gen_op_movl_T0_ESP_wrapper,
339
            gen_op_movl_T0_EBP_wrapper,
340
            gen_op_movl_T0_ESI_wrapper,
341
            gen_op_movl_T0_EDI_wrapper,
342
            gen_op_movl_T0_R8,
343
            gen_op_movl_T0_R9,
344
            gen_op_movl_T0_R10,
345
            gen_op_movl_T0_R11,
346
            gen_op_movl_T0_R12,
347
            gen_op_movl_T0_R13,
348
            gen_op_movl_T0_R14,
349
            gen_op_movl_T0_R15,
350
#else
351
            gen_op_movh_T0_EAX,
352
            gen_op_movh_T0_ECX,
353
            gen_op_movh_T0_EDX,
354
            gen_op_movh_T0_EBX,
355
#endif
356
        },
357
        {
358
            gen_op_movl_T1_EAX,
359
            gen_op_movl_T1_ECX,
360
            gen_op_movl_T1_EDX,
361
            gen_op_movl_T1_EBX,
362
#ifdef TARGET_X86_64
363
            gen_op_movl_T1_ESP_wrapper,
364
            gen_op_movl_T1_EBP_wrapper,
365
            gen_op_movl_T1_ESI_wrapper,
366
            gen_op_movl_T1_EDI_wrapper,
367
            gen_op_movl_T1_R8,
368
            gen_op_movl_T1_R9,
369
            gen_op_movl_T1_R10,
370
            gen_op_movl_T1_R11,
371
            gen_op_movl_T1_R12,
372
            gen_op_movl_T1_R13,
373
            gen_op_movl_T1_R14,
374
            gen_op_movl_T1_R15,
375
#else
376
            gen_op_movh_T1_EAX,
377
            gen_op_movh_T1_ECX,
378
            gen_op_movh_T1_EDX,
379
            gen_op_movh_T1_EBX,
380
#endif
381
        },
382
    },
383
    [OT_WORD] = {
384
        {
385
            DEF_REGS(gen_op_movl_T0_, )
386
        },
387
        {
388
            DEF_REGS(gen_op_movl_T1_, )
389
        },
390
    },
391
    [OT_LONG] = {
392
        {
393
            DEF_REGS(gen_op_movl_T0_, )
394
        },
395
        {
396
            DEF_REGS(gen_op_movl_T1_, )
397
        },
398
    },
399
#ifdef TARGET_X86_64
400
    [OT_QUAD] = {
401
        {
402
            DEF_REGS(gen_op_movl_T0_, )
403
        },
404
        {
405
            DEF_REGS(gen_op_movl_T1_, )
406
        },
407
    },
408
#endif
409
};
410

    
411
static GenOpFunc *gen_op_movl_A0_reg[CPU_NB_REGS] = {
412
    DEF_REGS(gen_op_movl_A0_, )
413
};
414

    
415
static GenOpFunc *gen_op_addl_A0_reg_sN[4][CPU_NB_REGS] = {
416
    [0] = {
417
        DEF_REGS(gen_op_addl_A0_, )
418
    },
419
    [1] = {
420
        DEF_REGS(gen_op_addl_A0_, _s1)
421
    },
422
    [2] = {
423
        DEF_REGS(gen_op_addl_A0_, _s2)
424
    },
425
    [3] = {
426
        DEF_REGS(gen_op_addl_A0_, _s3)
427
    },
428
};
429

    
430
#ifdef TARGET_X86_64
431
static GenOpFunc *gen_op_movq_A0_reg[CPU_NB_REGS] = {
432
    DEF_REGS(gen_op_movq_A0_, )
433
};
434

    
435
static GenOpFunc *gen_op_addq_A0_reg_sN[4][CPU_NB_REGS] = {
436
    [0] = {
437
        DEF_REGS(gen_op_addq_A0_, )
438
    },
439
    [1] = {
440
        DEF_REGS(gen_op_addq_A0_, _s1)
441
    },
442
    [2] = {
443
        DEF_REGS(gen_op_addq_A0_, _s2)
444
    },
445
    [3] = {
446
        DEF_REGS(gen_op_addq_A0_, _s3)
447
    },
448
};
449
#endif
450

    
451
static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
452
    [0] = {
453
        DEF_REGS(gen_op_cmovw_, _T1_T0)
454
    },
455
    [1] = {
456
        DEF_REGS(gen_op_cmovl_, _T1_T0)
457
    },
458
#ifdef TARGET_X86_64
459
    [2] = {
460
        DEF_REGS(gen_op_cmovq_, _T1_T0)
461
    },
462
#endif
463
};
464

    
465
static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
466
    NULL,
467
    gen_op_orl_T0_T1,
468
    NULL,
469
    NULL,
470
    gen_op_andl_T0_T1,
471
    NULL,
472
    gen_op_xorl_T0_T1,
473
    NULL,
474
};
475

    
476
#define DEF_ARITHC(SUFFIX)\
477
    {\
478
        gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
479
        gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
480
    },\
481
    {\
482
        gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
483
        gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
484
    },\
485
    {\
486
        gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
487
        gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
488
    },\
489
    {\
490
        X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
491
        X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
492
    },
493

    
494
static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
495
    DEF_ARITHC( )
496
};
497

    
498
static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
499
    DEF_ARITHC(_raw)
500
#ifndef CONFIG_USER_ONLY
501
    DEF_ARITHC(_kernel)
502
    DEF_ARITHC(_user)
503
#endif
504
};
505

    
506
static const int cc_op_arithb[8] = {
507
    CC_OP_ADDB,
508
    CC_OP_LOGICB,
509
    CC_OP_ADDB,
510
    CC_OP_SUBB,
511
    CC_OP_LOGICB,
512
    CC_OP_SUBB,
513
    CC_OP_LOGICB,
514
    CC_OP_SUBB,
515
};
516

    
517
#define DEF_CMPXCHG(SUFFIX)\
518
    gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
519
    gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
520
    gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
521
    X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
522

    
523
static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
524
    DEF_CMPXCHG( )
525
};
526

    
527
static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
528
    DEF_CMPXCHG(_raw)
529
#ifndef CONFIG_USER_ONLY
530
    DEF_CMPXCHG(_kernel)
531
    DEF_CMPXCHG(_user)
532
#endif
533
};
534

    
535
#define DEF_SHIFT(SUFFIX)\
536
    {\
537
        gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
538
        gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
539
        gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
540
        gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
541
        gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
542
        gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
543
        gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
544
        gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
545
    },\
546
    {\
547
        gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
548
        gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
549
        gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
550
        gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
551
        gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
552
        gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
553
        gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
554
        gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
555
    },\
556
    {\
557
        gen_op_roll ## SUFFIX ## _T0_T1_cc,\
558
        gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
559
        gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
560
        gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
561
        gen_op_shll ## SUFFIX ## _T0_T1_cc,\
562
        gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
563
        gen_op_shll ## SUFFIX ## _T0_T1_cc,\
564
        gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
565
    },\
566
    {\
567
        X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
568
        X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
569
        X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
570
        X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
571
        X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
572
        X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
573
        X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
574
        X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
575
    },
576

    
577
static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
578
    DEF_SHIFT( )
579
};
580

    
581
static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
582
    DEF_SHIFT(_raw)
583
#ifndef CONFIG_USER_ONLY
584
    DEF_SHIFT(_kernel)
585
    DEF_SHIFT(_user)
586
#endif
587
};
588

    
589
#define DEF_SHIFTD(SUFFIX, op)\
590
    {\
591
        NULL,\
592
        NULL,\
593
    },\
594
    {\
595
        gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
596
        gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
597
    },\
598
    {\
599
        gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
600
        gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
601
    },\
602
    {\
603
    },
604

    
605
static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
606
    DEF_SHIFTD(, im)
607
};
608

    
609
static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
610
    DEF_SHIFTD(, ECX)
611
};
612

    
613
static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
614
    DEF_SHIFTD(_raw, im)
615
#ifndef CONFIG_USER_ONLY
616
    DEF_SHIFTD(_kernel, im)
617
    DEF_SHIFTD(_user, im)
618
#endif
619
};
620

    
621
static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
622
    DEF_SHIFTD(_raw, ECX)
623
#ifndef CONFIG_USER_ONLY
624
    DEF_SHIFTD(_kernel, ECX)
625
    DEF_SHIFTD(_user, ECX)
626
#endif
627
};
628

    
629
static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
630
    [0] = {
631
        gen_op_btw_T0_T1_cc,
632
        gen_op_btsw_T0_T1_cc,
633
        gen_op_btrw_T0_T1_cc,
634
        gen_op_btcw_T0_T1_cc,
635
    },
636
    [1] = {
637
        gen_op_btl_T0_T1_cc,
638
        gen_op_btsl_T0_T1_cc,
639
        gen_op_btrl_T0_T1_cc,
640
        gen_op_btcl_T0_T1_cc,
641
    },
642
#ifdef TARGET_X86_64
643
    [2] = {
644
        gen_op_btq_T0_T1_cc,
645
        gen_op_btsq_T0_T1_cc,
646
        gen_op_btrq_T0_T1_cc,
647
        gen_op_btcq_T0_T1_cc,
648
    },
649
#endif
650
};
651

    
652
static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
653
    gen_op_add_bitw_A0_T1,
654
    gen_op_add_bitl_A0_T1,
655
    X86_64_ONLY(gen_op_add_bitq_A0_T1),
656
};
657

    
658
static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
659
    [0] = {
660
        gen_op_bsfw_T0_cc,
661
        gen_op_bsrw_T0_cc,
662
    },
663
    [1] = {
664
        gen_op_bsfl_T0_cc,
665
        gen_op_bsrl_T0_cc,
666
    },
667
#ifdef TARGET_X86_64
668
    [2] = {
669
        gen_op_bsfq_T0_cc,
670
        gen_op_bsrq_T0_cc,
671
    },
672
#endif
673
};
674

    
675
static GenOpFunc *gen_op_lds_T0_A0[3 * 4] = {
676
    gen_op_ldsb_raw_T0_A0,
677
    gen_op_ldsw_raw_T0_A0,
678
    X86_64_ONLY(gen_op_ldsl_raw_T0_A0),
679
    NULL,
680
#ifndef CONFIG_USER_ONLY
681
    gen_op_ldsb_kernel_T0_A0,
682
    gen_op_ldsw_kernel_T0_A0,
683
    X86_64_ONLY(gen_op_ldsl_kernel_T0_A0),
684
    NULL,
685

    
686
    gen_op_ldsb_user_T0_A0,
687
    gen_op_ldsw_user_T0_A0,
688
    X86_64_ONLY(gen_op_ldsl_user_T0_A0),
689
    NULL,
690
#endif
691
};
692

    
693
static GenOpFunc *gen_op_ldu_T0_A0[3 * 4] = {
694
    gen_op_ldub_raw_T0_A0,
695
    gen_op_lduw_raw_T0_A0,
696
    NULL,
697
    NULL,
698

    
699
#ifndef CONFIG_USER_ONLY
700
    gen_op_ldub_kernel_T0_A0,
701
    gen_op_lduw_kernel_T0_A0,
702
    NULL,
703
    NULL,
704

    
705
    gen_op_ldub_user_T0_A0,
706
    gen_op_lduw_user_T0_A0,
707
    NULL,
708
    NULL,
709
#endif
710
};
711

    
712
/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
713
static GenOpFunc *gen_op_ld_T0_A0[3 * 4] = {
714
    gen_op_ldub_raw_T0_A0,
715
    gen_op_lduw_raw_T0_A0,
716
    gen_op_ldl_raw_T0_A0,
717
    X86_64_ONLY(gen_op_ldq_raw_T0_A0),
718

    
719
#ifndef CONFIG_USER_ONLY
720
    gen_op_ldub_kernel_T0_A0,
721
    gen_op_lduw_kernel_T0_A0,
722
    gen_op_ldl_kernel_T0_A0,
723
    X86_64_ONLY(gen_op_ldq_kernel_T0_A0),
724

    
725
    gen_op_ldub_user_T0_A0,
726
    gen_op_lduw_user_T0_A0,
727
    gen_op_ldl_user_T0_A0,
728
    X86_64_ONLY(gen_op_ldq_user_T0_A0),
729
#endif
730
};
731

    
732
static GenOpFunc *gen_op_ld_T1_A0[3 * 4] = {
733
    gen_op_ldub_raw_T1_A0,
734
    gen_op_lduw_raw_T1_A0,
735
    gen_op_ldl_raw_T1_A0,
736
    X86_64_ONLY(gen_op_ldq_raw_T1_A0),
737

    
738
#ifndef CONFIG_USER_ONLY
739
    gen_op_ldub_kernel_T1_A0,
740
    gen_op_lduw_kernel_T1_A0,
741
    gen_op_ldl_kernel_T1_A0,
742
    X86_64_ONLY(gen_op_ldq_kernel_T1_A0),
743

    
744
    gen_op_ldub_user_T1_A0,
745
    gen_op_lduw_user_T1_A0,
746
    gen_op_ldl_user_T1_A0,
747
    X86_64_ONLY(gen_op_ldq_user_T1_A0),
748
#endif
749
};
750

    
751
static GenOpFunc *gen_op_st_T0_A0[3 * 4] = {
752
    gen_op_stb_raw_T0_A0,
753
    gen_op_stw_raw_T0_A0,
754
    gen_op_stl_raw_T0_A0,
755
    X86_64_ONLY(gen_op_stq_raw_T0_A0),
756

    
757
#ifndef CONFIG_USER_ONLY
758
    gen_op_stb_kernel_T0_A0,
759
    gen_op_stw_kernel_T0_A0,
760
    gen_op_stl_kernel_T0_A0,
761
    X86_64_ONLY(gen_op_stq_kernel_T0_A0),
762

    
763
    gen_op_stb_user_T0_A0,
764
    gen_op_stw_user_T0_A0,
765
    gen_op_stl_user_T0_A0,
766
    X86_64_ONLY(gen_op_stq_user_T0_A0),
767
#endif
768
};
769

    
770
static GenOpFunc *gen_op_st_T1_A0[3 * 4] = {
771
    NULL,
772
    gen_op_stw_raw_T1_A0,
773
    gen_op_stl_raw_T1_A0,
774
    X86_64_ONLY(gen_op_stq_raw_T1_A0),
775

    
776
#ifndef CONFIG_USER_ONLY
777
    NULL,
778
    gen_op_stw_kernel_T1_A0,
779
    gen_op_stl_kernel_T1_A0,
780
    X86_64_ONLY(gen_op_stq_kernel_T1_A0),
781

    
782
    NULL,
783
    gen_op_stw_user_T1_A0,
784
    gen_op_stl_user_T1_A0,
785
    X86_64_ONLY(gen_op_stq_user_T1_A0),
786
#endif
787
};
788

    
789
static inline void gen_jmp_im(target_ulong pc)
790
{
791
#ifdef TARGET_X86_64
792
    if (pc == (uint32_t)pc) {
793
        gen_op_movl_eip_im(pc);
794
    } else if (pc == (int32_t)pc) {
795
        gen_op_movq_eip_im(pc);
796
    } else {
797
        gen_op_movq_eip_im64(pc >> 32, pc);
798
    }
799
#else
800
    gen_op_movl_eip_im(pc);
801
#endif
802
}
803

    
804
static inline void gen_string_movl_A0_ESI(DisasContext *s)
805
{
806
    int override;
807

    
808
    override = s->override;
809
#ifdef TARGET_X86_64
810
    if (s->aflag == 2) {
811
        if (override >= 0) {
812
            gen_op_movq_A0_seg(offsetof(CPUX86State,segs[override].base));
813
            gen_op_addq_A0_reg_sN[0][R_ESI]();
814
        } else {
815
            gen_op_movq_A0_reg[R_ESI]();
816
        }
817
    } else
818
#endif
819
    if (s->aflag) {
820
        /* 32 bit address */
821
        if (s->addseg && override < 0)
822
            override = R_DS;
823
        if (override >= 0) {
824
            gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
825
            gen_op_addl_A0_reg_sN[0][R_ESI]();
826
        } else {
827
            gen_op_movl_A0_reg[R_ESI]();
828
        }
829
    } else {
830
        /* 16 address, always override */
831
        if (override < 0)
832
            override = R_DS;
833
        gen_op_movl_A0_reg[R_ESI]();
834
        gen_op_andl_A0_ffff();
835
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
836
    }
837
}
838

    
839
static inline void gen_string_movl_A0_EDI(DisasContext *s)
840
{
841
#ifdef TARGET_X86_64
842
    if (s->aflag == 2) {
843
        gen_op_movq_A0_reg[R_EDI]();
844
    } else
845
#endif
846
    if (s->aflag) {
847
        if (s->addseg) {
848
            gen_op_movl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
849
            gen_op_addl_A0_reg_sN[0][R_EDI]();
850
        } else {
851
            gen_op_movl_A0_reg[R_EDI]();
852
        }
853
    } else {
854
        gen_op_movl_A0_reg[R_EDI]();
855
        gen_op_andl_A0_ffff();
856
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
857
    }
858
}
859

    
860
static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
861
    gen_op_movl_T0_Dshiftb,
862
    gen_op_movl_T0_Dshiftw,
863
    gen_op_movl_T0_Dshiftl,
864
    X86_64_ONLY(gen_op_movl_T0_Dshiftq),
865
};
866

    
867
static GenOpFunc1 *gen_op_jnz_ecx[3] = {
868
    gen_op_jnz_ecxw,
869
    gen_op_jnz_ecxl,
870
    X86_64_ONLY(gen_op_jnz_ecxq),
871
};
872
    
873
static GenOpFunc1 *gen_op_jz_ecx[3] = {
874
    gen_op_jz_ecxw,
875
    gen_op_jz_ecxl,
876
    X86_64_ONLY(gen_op_jz_ecxq),
877
};
878

    
879
static GenOpFunc *gen_op_dec_ECX[3] = {
880
    gen_op_decw_ECX,
881
    gen_op_decl_ECX,
882
    X86_64_ONLY(gen_op_decq_ECX),
883
};
884

    
885
static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
886
    {
887
        gen_op_jnz_subb,
888
        gen_op_jnz_subw,
889
        gen_op_jnz_subl,
890
        X86_64_ONLY(gen_op_jnz_subq),
891
    },
892
    {
893
        gen_op_jz_subb,
894
        gen_op_jz_subw,
895
        gen_op_jz_subl,
896
        X86_64_ONLY(gen_op_jz_subq),
897
    },
898
};
899

    
900
static GenOpFunc *gen_op_in_DX_T0[3] = {
901
    gen_op_inb_DX_T0,
902
    gen_op_inw_DX_T0,
903
    gen_op_inl_DX_T0,
904
};
905

    
906
static GenOpFunc *gen_op_out_DX_T0[3] = {
907
    gen_op_outb_DX_T0,
908
    gen_op_outw_DX_T0,
909
    gen_op_outl_DX_T0,
910
};
911

    
912
static GenOpFunc *gen_op_in[3] = {
913
    gen_op_inb_T0_T1,
914
    gen_op_inw_T0_T1,
915
    gen_op_inl_T0_T1,
916
};
917

    
918
static GenOpFunc *gen_op_out[3] = {
919
    gen_op_outb_T0_T1,
920
    gen_op_outw_T0_T1,
921
    gen_op_outl_T0_T1,
922
};
923

    
924
static GenOpFunc *gen_check_io_T0[3] = {
925
    gen_op_check_iob_T0,
926
    gen_op_check_iow_T0,
927
    gen_op_check_iol_T0,
928
};
929

    
930
static GenOpFunc *gen_check_io_DX[3] = {
931
    gen_op_check_iob_DX,
932
    gen_op_check_iow_DX,
933
    gen_op_check_iol_DX,
934
};
935

    
936
static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
937
{
938
    if (s->pe && (s->cpl > s->iopl || s->vm86)) {
939
        if (s->cc_op != CC_OP_DYNAMIC)
940
            gen_op_set_cc_op(s->cc_op);
941
        gen_jmp_im(cur_eip);
942
        if (use_dx)
943
            gen_check_io_DX[ot]();
944
        else
945
            gen_check_io_T0[ot]();
946
    }
947
}
948

    
949
static inline void gen_movs(DisasContext *s, int ot)
950
{
951
    gen_string_movl_A0_ESI(s);
952
    gen_op_ld_T0_A0[ot + s->mem_index]();
953
    gen_string_movl_A0_EDI(s);
954
    gen_op_st_T0_A0[ot + s->mem_index]();
955
    gen_op_movl_T0_Dshift[ot]();
956
#ifdef TARGET_X86_64
957
    if (s->aflag == 2) {
958
        gen_op_addq_ESI_T0();
959
        gen_op_addq_EDI_T0();
960
    } else 
961
#endif
962
    if (s->aflag) {
963
        gen_op_addl_ESI_T0();
964
        gen_op_addl_EDI_T0();
965
    } else {
966
        gen_op_addw_ESI_T0();
967
        gen_op_addw_EDI_T0();
968
    }
969
}
970

    
971
static inline void gen_update_cc_op(DisasContext *s)
972
{
973
    if (s->cc_op != CC_OP_DYNAMIC) {
974
        gen_op_set_cc_op(s->cc_op);
975
        s->cc_op = CC_OP_DYNAMIC;
976
    }
977
}
978

    
979
/* XXX: does not work with gdbstub "ice" single step - not a
980
   serious problem */
981
static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
982
{
983
    int l1, l2;
984

    
985
    l1 = gen_new_label();
986
    l2 = gen_new_label();
987
    gen_op_jnz_ecx[s->aflag](l1);
988
    gen_set_label(l2);
989
    gen_jmp_tb(s, next_eip, 1);
990
    gen_set_label(l1);
991
    return l2;
992
}
993

    
994
static inline void gen_stos(DisasContext *s, int ot)
995
{
996
    gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
997
    gen_string_movl_A0_EDI(s);
998
    gen_op_st_T0_A0[ot + s->mem_index]();
999
    gen_op_movl_T0_Dshift[ot]();
1000
#ifdef TARGET_X86_64
1001
    if (s->aflag == 2) {
1002
        gen_op_addq_EDI_T0();
1003
    } else 
1004
#endif
1005
    if (s->aflag) {
1006
        gen_op_addl_EDI_T0();
1007
    } else {
1008
        gen_op_addw_EDI_T0();
1009
    }
1010
}
1011

    
1012
static inline void gen_lods(DisasContext *s, int ot)
1013
{
1014
    gen_string_movl_A0_ESI(s);
1015
    gen_op_ld_T0_A0[ot + s->mem_index]();
1016
    gen_op_mov_reg_T0[ot][R_EAX]();
1017
    gen_op_movl_T0_Dshift[ot]();
1018
#ifdef TARGET_X86_64
1019
    if (s->aflag == 2) {
1020
        gen_op_addq_ESI_T0();
1021
    } else 
1022
#endif
1023
    if (s->aflag) {
1024
        gen_op_addl_ESI_T0();
1025
    } else {
1026
        gen_op_addw_ESI_T0();
1027
    }
1028
}
1029

    
1030
static inline void gen_scas(DisasContext *s, int ot)
1031
{
1032
    gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1033
    gen_string_movl_A0_EDI(s);
1034
    gen_op_ld_T1_A0[ot + s->mem_index]();
1035
    gen_op_cmpl_T0_T1_cc();
1036
    gen_op_movl_T0_Dshift[ot]();
1037
#ifdef TARGET_X86_64
1038
    if (s->aflag == 2) {
1039
        gen_op_addq_EDI_T0();
1040
    } else 
1041
#endif
1042
    if (s->aflag) {
1043
        gen_op_addl_EDI_T0();
1044
    } else {
1045
        gen_op_addw_EDI_T0();
1046
    }
1047
}
1048

    
1049
static inline void gen_cmps(DisasContext *s, int ot)
1050
{
1051
    gen_string_movl_A0_ESI(s);
1052
    gen_op_ld_T0_A0[ot + s->mem_index]();
1053
    gen_string_movl_A0_EDI(s);
1054
    gen_op_ld_T1_A0[ot + s->mem_index]();
1055
    gen_op_cmpl_T0_T1_cc();
1056
    gen_op_movl_T0_Dshift[ot]();
1057
#ifdef TARGET_X86_64
1058
    if (s->aflag == 2) {
1059
        gen_op_addq_ESI_T0();
1060
        gen_op_addq_EDI_T0();
1061
    } else 
1062
#endif
1063
    if (s->aflag) {
1064
        gen_op_addl_ESI_T0();
1065
        gen_op_addl_EDI_T0();
1066
    } else {
1067
        gen_op_addw_ESI_T0();
1068
        gen_op_addw_EDI_T0();
1069
    }
1070
}
1071

    
1072
static inline void gen_ins(DisasContext *s, int ot)
1073
{
1074
    gen_string_movl_A0_EDI(s);
1075
    gen_op_movl_T0_0();
1076
    gen_op_st_T0_A0[ot + s->mem_index]();
1077
    gen_op_in_DX_T0[ot]();
1078
    gen_op_st_T0_A0[ot + s->mem_index]();
1079
    gen_op_movl_T0_Dshift[ot]();
1080
#ifdef TARGET_X86_64
1081
    if (s->aflag == 2) {
1082
        gen_op_addq_EDI_T0();
1083
    } else 
1084
#endif
1085
    if (s->aflag) {
1086
        gen_op_addl_EDI_T0();
1087
    } else {
1088
        gen_op_addw_EDI_T0();
1089
    }
1090
}
1091

    
1092
static inline void gen_outs(DisasContext *s, int ot)
1093
{
1094
    gen_string_movl_A0_ESI(s);
1095
    gen_op_ld_T0_A0[ot + s->mem_index]();
1096
    gen_op_out_DX_T0[ot]();
1097
    gen_op_movl_T0_Dshift[ot]();
1098
#ifdef TARGET_X86_64
1099
    if (s->aflag == 2) {
1100
        gen_op_addq_ESI_T0();
1101
    } else 
1102
#endif
1103
    if (s->aflag) {
1104
        gen_op_addl_ESI_T0();
1105
    } else {
1106
        gen_op_addw_ESI_T0();
1107
    }
1108
}
1109

    
1110
/* same method as Valgrind : we generate jumps to current or next
1111
   instruction */
1112
#define GEN_REPZ(op)                                                          \
1113
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1114
                                 target_ulong cur_eip, target_ulong next_eip) \
1115
{                                                                             \
1116
    int l2;\
1117
    gen_update_cc_op(s);                                                      \
1118
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1119
    gen_ ## op(s, ot);                                                        \
1120
    gen_op_dec_ECX[s->aflag]();                                               \
1121
    /* a loop would cause two single step exceptions if ECX = 1               \
1122
       before rep string_insn */                                              \
1123
    if (!s->jmp_opt)                                                          \
1124
        gen_op_jz_ecx[s->aflag](l2);                                          \
1125
    gen_jmp(s, cur_eip);                                                      \
1126
}
1127

    
1128
#define GEN_REPZ2(op)                                                         \
1129
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1130
                                   target_ulong cur_eip,                      \
1131
                                   target_ulong next_eip,                     \
1132
                                   int nz)                                    \
1133
{                                                                             \
1134
    int l2;\
1135
    gen_update_cc_op(s);                                                      \
1136
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1137
    gen_ ## op(s, ot);                                                        \
1138
    gen_op_dec_ECX[s->aflag]();                                               \
1139
    gen_op_set_cc_op(CC_OP_SUBB + ot);                                        \
1140
    gen_op_string_jnz_sub[nz][ot](l2);\
1141
    if (!s->jmp_opt)                                                          \
1142
        gen_op_jz_ecx[s->aflag](l2);                                          \
1143
    gen_jmp(s, cur_eip);                                                      \
1144
}
1145

    
1146
GEN_REPZ(movs)
1147
GEN_REPZ(stos)
1148
GEN_REPZ(lods)
1149
GEN_REPZ(ins)
1150
GEN_REPZ(outs)
1151
GEN_REPZ2(scas)
1152
GEN_REPZ2(cmps)
1153

    
1154
enum {
1155
    JCC_O,
1156
    JCC_B,
1157
    JCC_Z,
1158
    JCC_BE,
1159
    JCC_S,
1160
    JCC_P,
1161
    JCC_L,
1162
    JCC_LE,
1163
};
1164

    
1165
static GenOpFunc1 *gen_jcc_sub[4][8] = {
1166
    [OT_BYTE] = {
1167
        NULL,
1168
        gen_op_jb_subb,
1169
        gen_op_jz_subb,
1170
        gen_op_jbe_subb,
1171
        gen_op_js_subb,
1172
        NULL,
1173
        gen_op_jl_subb,
1174
        gen_op_jle_subb,
1175
    },
1176
    [OT_WORD] = {
1177
        NULL,
1178
        gen_op_jb_subw,
1179
        gen_op_jz_subw,
1180
        gen_op_jbe_subw,
1181
        gen_op_js_subw,
1182
        NULL,
1183
        gen_op_jl_subw,
1184
        gen_op_jle_subw,
1185
    },
1186
    [OT_LONG] = {
1187
        NULL,
1188
        gen_op_jb_subl,
1189
        gen_op_jz_subl,
1190
        gen_op_jbe_subl,
1191
        gen_op_js_subl,
1192
        NULL,
1193
        gen_op_jl_subl,
1194
        gen_op_jle_subl,
1195
    },
1196
#ifdef TARGET_X86_64
1197
    [OT_QUAD] = {
1198
        NULL,
1199
        BUGGY_64(gen_op_jb_subq),
1200
        gen_op_jz_subq,
1201
        BUGGY_64(gen_op_jbe_subq),
1202
        gen_op_js_subq,
1203
        NULL,
1204
        BUGGY_64(gen_op_jl_subq),
1205
        BUGGY_64(gen_op_jle_subq),
1206
    },
1207
#endif
1208
};
1209
static GenOpFunc1 *gen_op_loop[3][4] = {
1210
    [0] = {
1211
        gen_op_loopnzw,
1212
        gen_op_loopzw,
1213
        gen_op_jnz_ecxw,
1214
    },
1215
    [1] = {
1216
        gen_op_loopnzl,
1217
        gen_op_loopzl,
1218
        gen_op_jnz_ecxl,
1219
    },
1220
#ifdef TARGET_X86_64
1221
    [2] = {
1222
        gen_op_loopnzq,
1223
        gen_op_loopzq,
1224
        gen_op_jnz_ecxq,
1225
    },
1226
#endif
1227
};
1228

    
1229
static GenOpFunc *gen_setcc_slow[8] = {
1230
    gen_op_seto_T0_cc,
1231
    gen_op_setb_T0_cc,
1232
    gen_op_setz_T0_cc,
1233
    gen_op_setbe_T0_cc,
1234
    gen_op_sets_T0_cc,
1235
    gen_op_setp_T0_cc,
1236
    gen_op_setl_T0_cc,
1237
    gen_op_setle_T0_cc,
1238
};
1239

    
1240
static GenOpFunc *gen_setcc_sub[4][8] = {
1241
    [OT_BYTE] = {
1242
        NULL,
1243
        gen_op_setb_T0_subb,
1244
        gen_op_setz_T0_subb,
1245
        gen_op_setbe_T0_subb,
1246
        gen_op_sets_T0_subb,
1247
        NULL,
1248
        gen_op_setl_T0_subb,
1249
        gen_op_setle_T0_subb,
1250
    },
1251
    [OT_WORD] = {
1252
        NULL,
1253
        gen_op_setb_T0_subw,
1254
        gen_op_setz_T0_subw,
1255
        gen_op_setbe_T0_subw,
1256
        gen_op_sets_T0_subw,
1257
        NULL,
1258
        gen_op_setl_T0_subw,
1259
        gen_op_setle_T0_subw,
1260
    },
1261
    [OT_LONG] = {
1262
        NULL,
1263
        gen_op_setb_T0_subl,
1264
        gen_op_setz_T0_subl,
1265
        gen_op_setbe_T0_subl,
1266
        gen_op_sets_T0_subl,
1267
        NULL,
1268
        gen_op_setl_T0_subl,
1269
        gen_op_setle_T0_subl,
1270
    },
1271
#ifdef TARGET_X86_64
1272
    [OT_QUAD] = {
1273
        NULL,
1274
        gen_op_setb_T0_subq,
1275
        gen_op_setz_T0_subq,
1276
        gen_op_setbe_T0_subq,
1277
        gen_op_sets_T0_subq,
1278
        NULL,
1279
        gen_op_setl_T0_subq,
1280
        gen_op_setle_T0_subq,
1281
    },
1282
#endif
1283
};
1284

    
1285
static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1286
    gen_op_fadd_ST0_FT0,
1287
    gen_op_fmul_ST0_FT0,
1288
    gen_op_fcom_ST0_FT0,
1289
    gen_op_fcom_ST0_FT0,
1290
    gen_op_fsub_ST0_FT0,
1291
    gen_op_fsubr_ST0_FT0,
1292
    gen_op_fdiv_ST0_FT0,
1293
    gen_op_fdivr_ST0_FT0,
1294
};
1295

    
1296
/* NOTE the exception in "r" op ordering */
1297
static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1298
    gen_op_fadd_STN_ST0,
1299
    gen_op_fmul_STN_ST0,
1300
    NULL,
1301
    NULL,
1302
    gen_op_fsubr_STN_ST0,
1303
    gen_op_fsub_STN_ST0,
1304
    gen_op_fdivr_STN_ST0,
1305
    gen_op_fdiv_STN_ST0,
1306
};
1307

    
1308
/* if d == OR_TMP0, it means memory operand (address in A0) */
1309
static void gen_op(DisasContext *s1, int op, int ot, int d)
1310
{
1311
    GenOpFunc *gen_update_cc;
1312
    
1313
    if (d != OR_TMP0) {
1314
        gen_op_mov_TN_reg[ot][0][d]();
1315
    } else {
1316
        gen_op_ld_T0_A0[ot + s1->mem_index]();
1317
    }
1318
    switch(op) {
1319
    case OP_ADCL:
1320
    case OP_SBBL:
1321
        if (s1->cc_op != CC_OP_DYNAMIC)
1322
            gen_op_set_cc_op(s1->cc_op);
1323
        if (d != OR_TMP0) {
1324
            gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1325
            gen_op_mov_reg_T0[ot][d]();
1326
        } else {
1327
            gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1328
        }
1329
        s1->cc_op = CC_OP_DYNAMIC;
1330
        goto the_end;
1331
    case OP_ADDL:
1332
        gen_op_addl_T0_T1();
1333
        s1->cc_op = CC_OP_ADDB + ot;
1334
        gen_update_cc = gen_op_update2_cc;
1335
        break;
1336
    case OP_SUBL:
1337
        gen_op_subl_T0_T1();
1338
        s1->cc_op = CC_OP_SUBB + ot;
1339
        gen_update_cc = gen_op_update2_cc;
1340
        break;
1341
    default:
1342
    case OP_ANDL:
1343
    case OP_ORL:
1344
    case OP_XORL:
1345
        gen_op_arith_T0_T1_cc[op]();
1346
        s1->cc_op = CC_OP_LOGICB + ot;
1347
        gen_update_cc = gen_op_update1_cc;
1348
        break;
1349
    case OP_CMPL:
1350
        gen_op_cmpl_T0_T1_cc();
1351
        s1->cc_op = CC_OP_SUBB + ot;
1352
        gen_update_cc = NULL;
1353
        break;
1354
    }
1355
    if (op != OP_CMPL) {
1356
        if (d != OR_TMP0)
1357
            gen_op_mov_reg_T0[ot][d]();
1358
        else
1359
            gen_op_st_T0_A0[ot + s1->mem_index]();
1360
    }
1361
    /* the flags update must happen after the memory write (precise
1362
       exception support) */
1363
    if (gen_update_cc)
1364
        gen_update_cc();
1365
 the_end: ;
1366
}
1367

    
1368
/* if d == OR_TMP0, it means memory operand (address in A0) */
1369
static void gen_inc(DisasContext *s1, int ot, int d, int c)
1370
{
1371
    if (d != OR_TMP0)
1372
        gen_op_mov_TN_reg[ot][0][d]();
1373
    else
1374
        gen_op_ld_T0_A0[ot + s1->mem_index]();
1375
    if (s1->cc_op != CC_OP_DYNAMIC)
1376
        gen_op_set_cc_op(s1->cc_op);
1377
    if (c > 0) {
1378
        gen_op_incl_T0();
1379
        s1->cc_op = CC_OP_INCB + ot;
1380
    } else {
1381
        gen_op_decl_T0();
1382
        s1->cc_op = CC_OP_DECB + ot;
1383
    }
1384
    if (d != OR_TMP0)
1385
        gen_op_mov_reg_T0[ot][d]();
1386
    else
1387
        gen_op_st_T0_A0[ot + s1->mem_index]();
1388
    gen_op_update_inc_cc();
1389
}
1390

    
1391
static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1392
{
1393
    if (d != OR_TMP0)
1394
        gen_op_mov_TN_reg[ot][0][d]();
1395
    else
1396
        gen_op_ld_T0_A0[ot + s1->mem_index]();
1397
    if (s != OR_TMP1)
1398
        gen_op_mov_TN_reg[ot][1][s]();
1399
    /* for zero counts, flags are not updated, so must do it dynamically */
1400
    if (s1->cc_op != CC_OP_DYNAMIC)
1401
        gen_op_set_cc_op(s1->cc_op);
1402
    
1403
    if (d != OR_TMP0)
1404
        gen_op_shift_T0_T1_cc[ot][op]();
1405
    else
1406
        gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1407
    if (d != OR_TMP0)
1408
        gen_op_mov_reg_T0[ot][d]();
1409
    s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1410
}
1411

    
1412
static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1413
{
1414
    /* currently not optimized */
1415
    gen_op_movl_T1_im(c);
1416
    gen_shift(s1, op, ot, d, OR_TMP1);
1417
}
1418

    
1419
static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1420
{
1421
    target_long disp;
1422
    int havesib;
1423
    int base;
1424
    int index;
1425
    int scale;
1426
    int opreg;
1427
    int mod, rm, code, override, must_add_seg;
1428

    
1429
    override = s->override;
1430
    must_add_seg = s->addseg;
1431
    if (override >= 0)
1432
        must_add_seg = 1;
1433
    mod = (modrm >> 6) & 3;
1434
    rm = modrm & 7;
1435

    
1436
    if (s->aflag) {
1437

    
1438
        havesib = 0;
1439
        base = rm;
1440
        index = 0;
1441
        scale = 0;
1442
        
1443
        if (base == 4) {
1444
            havesib = 1;
1445
            code = ldub_code(s->pc++);
1446
            scale = (code >> 6) & 3;
1447
            index = ((code >> 3) & 7) | REX_X(s);
1448
            base = (code & 7);
1449
        }
1450
        base |= REX_B(s);
1451

    
1452
        switch (mod) {
1453
        case 0:
1454
            if ((base & 7) == 5) {
1455
                base = -1;
1456
                disp = (int32_t)ldl_code(s->pc);
1457
                s->pc += 4;
1458
                if (CODE64(s) && !havesib) {
1459
                    disp += s->pc + s->rip_offset;
1460
                }
1461
            } else {
1462
                disp = 0;
1463
            }
1464
            break;
1465
        case 1:
1466
            disp = (int8_t)ldub_code(s->pc++);
1467
            break;
1468
        default:
1469
        case 2:
1470
            disp = ldl_code(s->pc);
1471
            s->pc += 4;
1472
            break;
1473
        }
1474
        
1475
        if (base >= 0) {
1476
            /* for correct popl handling with esp */
1477
            if (base == 4 && s->popl_esp_hack)
1478
                disp += s->popl_esp_hack;
1479
#ifdef TARGET_X86_64
1480
            if (s->aflag == 2) {
1481
                gen_op_movq_A0_reg[base]();
1482
                if (disp != 0) {
1483
                    if ((int32_t)disp == disp)
1484
                        gen_op_addq_A0_im(disp);
1485
                    else
1486
                        gen_op_addq_A0_im64(disp >> 32, disp);
1487
                }
1488
            } else 
1489
#endif
1490
            {
1491
                gen_op_movl_A0_reg[base]();
1492
                if (disp != 0)
1493
                    gen_op_addl_A0_im(disp);
1494
            }
1495
        } else {
1496
#ifdef TARGET_X86_64
1497
            if (s->aflag == 2) {
1498
                if ((int32_t)disp == disp)
1499
                    gen_op_movq_A0_im(disp);
1500
                else
1501
                    gen_op_movq_A0_im64(disp >> 32, disp);
1502
            } else 
1503
#endif
1504
            {
1505
                gen_op_movl_A0_im(disp);
1506
            }
1507
        }
1508
        /* XXX: index == 4 is always invalid */
1509
        if (havesib && (index != 4 || scale != 0)) {
1510
#ifdef TARGET_X86_64
1511
            if (s->aflag == 2) {
1512
                gen_op_addq_A0_reg_sN[scale][index]();
1513
            } else 
1514
#endif
1515
            {
1516
                gen_op_addl_A0_reg_sN[scale][index]();
1517
            }
1518
        }
1519
        if (must_add_seg) {
1520
            if (override < 0) {
1521
                if (base == R_EBP || base == R_ESP)
1522
                    override = R_SS;
1523
                else
1524
                    override = R_DS;
1525
            }
1526
#ifdef TARGET_X86_64
1527
            if (s->aflag == 2) {
1528
                gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1529
            } else 
1530
#endif
1531
            {
1532
                gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1533
            }
1534
        }
1535
    } else {
1536
        switch (mod) {
1537
        case 0:
1538
            if (rm == 6) {
1539
                disp = lduw_code(s->pc);
1540
                s->pc += 2;
1541
                gen_op_movl_A0_im(disp);
1542
                rm = 0; /* avoid SS override */
1543
                goto no_rm;
1544
            } else {
1545
                disp = 0;
1546
            }
1547
            break;
1548
        case 1:
1549
            disp = (int8_t)ldub_code(s->pc++);
1550
            break;
1551
        default:
1552
        case 2:
1553
            disp = lduw_code(s->pc);
1554
            s->pc += 2;
1555
            break;
1556
        }
1557
        switch(rm) {
1558
        case 0:
1559
            gen_op_movl_A0_reg[R_EBX]();
1560
            gen_op_addl_A0_reg_sN[0][R_ESI]();
1561
            break;
1562
        case 1:
1563
            gen_op_movl_A0_reg[R_EBX]();
1564
            gen_op_addl_A0_reg_sN[0][R_EDI]();
1565
            break;
1566
        case 2:
1567
            gen_op_movl_A0_reg[R_EBP]();
1568
            gen_op_addl_A0_reg_sN[0][R_ESI]();
1569
            break;
1570
        case 3:
1571
            gen_op_movl_A0_reg[R_EBP]();
1572
            gen_op_addl_A0_reg_sN[0][R_EDI]();
1573
            break;
1574
        case 4:
1575
            gen_op_movl_A0_reg[R_ESI]();
1576
            break;
1577
        case 5:
1578
            gen_op_movl_A0_reg[R_EDI]();
1579
            break;
1580
        case 6:
1581
            gen_op_movl_A0_reg[R_EBP]();
1582
            break;
1583
        default:
1584
        case 7:
1585
            gen_op_movl_A0_reg[R_EBX]();
1586
            break;
1587
        }
1588
        if (disp != 0)
1589
            gen_op_addl_A0_im(disp);
1590
        gen_op_andl_A0_ffff();
1591
    no_rm:
1592
        if (must_add_seg) {
1593
            if (override < 0) {
1594
                if (rm == 2 || rm == 3 || rm == 6)
1595
                    override = R_SS;
1596
                else
1597
                    override = R_DS;
1598
            }
1599
            gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1600
        }
1601
    }
1602

    
1603
    opreg = OR_A0;
1604
    disp = 0;
1605
    *reg_ptr = opreg;
1606
    *offset_ptr = disp;
1607
}
1608

    
1609
/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1610
   OR_TMP0 */
1611
static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1612
{
1613
    int mod, rm, opreg, disp;
1614

    
1615
    mod = (modrm >> 6) & 3;
1616
    rm = (modrm & 7) | REX_B(s);
1617
    if (mod == 3) {
1618
        if (is_store) {
1619
            if (reg != OR_TMP0)
1620
                gen_op_mov_TN_reg[ot][0][reg]();
1621
            gen_op_mov_reg_T0[ot][rm]();
1622
        } else {
1623
            gen_op_mov_TN_reg[ot][0][rm]();
1624
            if (reg != OR_TMP0)
1625
                gen_op_mov_reg_T0[ot][reg]();
1626
        }
1627
    } else {
1628
        gen_lea_modrm(s, modrm, &opreg, &disp);
1629
        if (is_store) {
1630
            if (reg != OR_TMP0)
1631
                gen_op_mov_TN_reg[ot][0][reg]();
1632
            gen_op_st_T0_A0[ot + s->mem_index]();
1633
        } else {
1634
            gen_op_ld_T0_A0[ot + s->mem_index]();
1635
            if (reg != OR_TMP0)
1636
                gen_op_mov_reg_T0[ot][reg]();
1637
        }
1638
    }
1639
}
1640

    
1641
static inline uint32_t insn_get(DisasContext *s, int ot)
1642
{
1643
    uint32_t ret;
1644

    
1645
    switch(ot) {
1646
    case OT_BYTE:
1647
        ret = ldub_code(s->pc);
1648
        s->pc++;
1649
        break;
1650
    case OT_WORD:
1651
        ret = lduw_code(s->pc);
1652
        s->pc += 2;
1653
        break;
1654
    default:
1655
    case OT_LONG:
1656
        ret = ldl_code(s->pc);
1657
        s->pc += 4;
1658
        break;
1659
    }
1660
    return ret;
1661
}
1662

    
1663
static inline int insn_const_size(unsigned int ot)
1664
{
1665
    if (ot <= OT_LONG)
1666
        return 1 << ot;
1667
    else
1668
        return 4;
1669
}
1670

    
1671
static inline void gen_jcc(DisasContext *s, int b, 
1672
                           target_ulong val, target_ulong next_eip)
1673
{
1674
    TranslationBlock *tb;
1675
    int inv, jcc_op;
1676
    GenOpFunc1 *func;
1677
    target_ulong tmp;
1678
    int l1, l2;
1679

    
1680
    inv = b & 1;
1681
    jcc_op = (b >> 1) & 7;
1682
    
1683
    if (s->jmp_opt) {
1684
        switch(s->cc_op) {
1685
            /* we optimize the cmp/jcc case */
1686
        case CC_OP_SUBB:
1687
        case CC_OP_SUBW:
1688
        case CC_OP_SUBL:
1689
        case CC_OP_SUBQ:
1690
            func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1691
            break;
1692
            
1693
            /* some jumps are easy to compute */
1694
        case CC_OP_ADDB:
1695
        case CC_OP_ADDW:
1696
        case CC_OP_ADDL:
1697
        case CC_OP_ADDQ:
1698

    
1699
        case CC_OP_ADCB:
1700
        case CC_OP_ADCW:
1701
        case CC_OP_ADCL:
1702
        case CC_OP_ADCQ:
1703

    
1704
        case CC_OP_SBBB:
1705
        case CC_OP_SBBW:
1706
        case CC_OP_SBBL:
1707
        case CC_OP_SBBQ:
1708

    
1709
        case CC_OP_LOGICB:
1710
        case CC_OP_LOGICW:
1711
        case CC_OP_LOGICL:
1712
        case CC_OP_LOGICQ:
1713

    
1714
        case CC_OP_INCB:
1715
        case CC_OP_INCW:
1716
        case CC_OP_INCL:
1717
        case CC_OP_INCQ:
1718

    
1719
        case CC_OP_DECB:
1720
        case CC_OP_DECW:
1721
        case CC_OP_DECL:
1722
        case CC_OP_DECQ:
1723

    
1724
        case CC_OP_SHLB:
1725
        case CC_OP_SHLW:
1726
        case CC_OP_SHLL:
1727
        case CC_OP_SHLQ:
1728

    
1729
        case CC_OP_SARB:
1730
        case CC_OP_SARW:
1731
        case CC_OP_SARL:
1732
        case CC_OP_SARQ:
1733
            switch(jcc_op) {
1734
            case JCC_Z:
1735
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1736
                break;
1737
            case JCC_S:
1738
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1739
                break;
1740
            default:
1741
                func = NULL;
1742
                break;
1743
            }
1744
            break;
1745
        default:
1746
            func = NULL;
1747
            break;
1748
        }
1749

    
1750
        if (s->cc_op != CC_OP_DYNAMIC)
1751
            gen_op_set_cc_op(s->cc_op);
1752

    
1753
        if (!func) {
1754
            gen_setcc_slow[jcc_op]();
1755
            func = gen_op_jnz_T0_label;
1756
        }
1757
    
1758
        if (inv) {
1759
            tmp = val;
1760
            val = next_eip;
1761
            next_eip = tmp;
1762
        }
1763
        tb = s->tb;
1764

    
1765
        l1 = gen_new_label();
1766
        func(l1);
1767

    
1768
        gen_op_goto_tb0();
1769
        gen_jmp_im(next_eip);
1770
        gen_op_movl_T0_im((long)tb + 0);
1771
        gen_op_exit_tb();
1772

    
1773
        gen_set_label(l1);
1774
        gen_op_goto_tb1();
1775
        gen_jmp_im(val);
1776
        gen_op_movl_T0_im((long)tb + 1);
1777
        gen_op_exit_tb();
1778

    
1779
        s->is_jmp = 3;
1780
    } else {
1781

    
1782
        if (s->cc_op != CC_OP_DYNAMIC) {
1783
            gen_op_set_cc_op(s->cc_op);
1784
            s->cc_op = CC_OP_DYNAMIC;
1785
        }
1786
        gen_setcc_slow[jcc_op]();
1787
        if (inv) {
1788
            tmp = val;
1789
            val = next_eip;
1790
            next_eip = tmp;
1791
        }
1792
        l1 = gen_new_label();
1793
        l2 = gen_new_label();
1794
        gen_op_jnz_T0_label(l1);
1795
        gen_jmp_im(next_eip);
1796
        gen_op_jmp_label(l2);
1797
        gen_set_label(l1);
1798
        gen_jmp_im(val);
1799
        gen_set_label(l2);
1800
        gen_eob(s);
1801
    }
1802
}
1803

    
1804
static void gen_setcc(DisasContext *s, int b)
1805
{
1806
    int inv, jcc_op;
1807
    GenOpFunc *func;
1808

    
1809
    inv = b & 1;
1810
    jcc_op = (b >> 1) & 7;
1811
    switch(s->cc_op) {
1812
        /* we optimize the cmp/jcc case */
1813
    case CC_OP_SUBB:
1814
    case CC_OP_SUBW:
1815
    case CC_OP_SUBL:
1816
    case CC_OP_SUBQ:
1817
        func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1818
        if (!func)
1819
            goto slow_jcc;
1820
        break;
1821
        
1822
        /* some jumps are easy to compute */
1823
    case CC_OP_ADDB:
1824
    case CC_OP_ADDW:
1825
    case CC_OP_ADDL:
1826
    case CC_OP_ADDQ:
1827

    
1828
    case CC_OP_LOGICB:
1829
    case CC_OP_LOGICW:
1830
    case CC_OP_LOGICL:
1831
    case CC_OP_LOGICQ:
1832

    
1833
    case CC_OP_INCB:
1834
    case CC_OP_INCW:
1835
    case CC_OP_INCL:
1836
    case CC_OP_INCQ:
1837

    
1838
    case CC_OP_DECB:
1839
    case CC_OP_DECW:
1840
    case CC_OP_DECL:
1841
    case CC_OP_DECQ:
1842

    
1843
    case CC_OP_SHLB:
1844
    case CC_OP_SHLW:
1845
    case CC_OP_SHLL:
1846
    case CC_OP_SHLQ:
1847
        switch(jcc_op) {
1848
        case JCC_Z:
1849
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1850
            break;
1851
        case JCC_S:
1852
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1853
            break;
1854
        default:
1855
            goto slow_jcc;
1856
        }
1857
        break;
1858
    default:
1859
    slow_jcc:
1860
        if (s->cc_op != CC_OP_DYNAMIC)
1861
            gen_op_set_cc_op(s->cc_op);
1862
        func = gen_setcc_slow[jcc_op];
1863
        break;
1864
    }
1865
    func();
1866
    if (inv) {
1867
        gen_op_xor_T0_1();
1868
    }
1869
}
1870

    
1871
/* move T0 to seg_reg and compute if the CPU state may change. Never
1872
   call this function with seg_reg == R_CS */
1873
static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1874
{
1875
    if (s->pe && !s->vm86) {
1876
        /* XXX: optimize by finding processor state dynamically */
1877
        if (s->cc_op != CC_OP_DYNAMIC)
1878
            gen_op_set_cc_op(s->cc_op);
1879
        gen_jmp_im(cur_eip);
1880
        gen_op_movl_seg_T0(seg_reg);
1881
        /* abort translation because the addseg value may change or
1882
           because ss32 may change. For R_SS, translation must always
1883
           stop as a special handling must be done to disable hardware
1884
           interrupts for the next instruction */
1885
        if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1886
            s->is_jmp = 3;
1887
    } else {
1888
        gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1889
        if (seg_reg == R_SS)
1890
            s->is_jmp = 3;
1891
    }
1892
}
1893

    
1894
static inline void gen_stack_update(DisasContext *s, int addend)
1895
{
1896
#ifdef TARGET_X86_64
1897
    if (CODE64(s)) {
1898
        if (addend == 8)
1899
            gen_op_addq_ESP_8();
1900
        else 
1901
            gen_op_addq_ESP_im(addend);
1902
    } else
1903
#endif
1904
    if (s->ss32) {
1905
        if (addend == 2)
1906
            gen_op_addl_ESP_2();
1907
        else if (addend == 4)
1908
            gen_op_addl_ESP_4();
1909
        else 
1910
            gen_op_addl_ESP_im(addend);
1911
    } else {
1912
        if (addend == 2)
1913
            gen_op_addw_ESP_2();
1914
        else if (addend == 4)
1915
            gen_op_addw_ESP_4();
1916
        else
1917
            gen_op_addw_ESP_im(addend);
1918
    }
1919
}
1920

    
1921
/* generate a push. It depends on ss32, addseg and dflag */
1922
static void gen_push_T0(DisasContext *s)
1923
{
1924
#ifdef TARGET_X86_64
1925
    if (CODE64(s)) {
1926
        /* XXX: check 16 bit behaviour */
1927
        gen_op_movq_A0_reg[R_ESP]();
1928
        gen_op_subq_A0_8();
1929
        gen_op_st_T0_A0[OT_QUAD + s->mem_index]();
1930
        gen_op_movq_ESP_A0();
1931
    } else 
1932
#endif
1933
    {
1934
        gen_op_movl_A0_reg[R_ESP]();
1935
        if (!s->dflag)
1936
            gen_op_subl_A0_2();
1937
        else
1938
            gen_op_subl_A0_4();
1939
        if (s->ss32) {
1940
            if (s->addseg) {
1941
                gen_op_movl_T1_A0();
1942
                gen_op_addl_A0_SS();
1943
            }
1944
        } else {
1945
            gen_op_andl_A0_ffff();
1946
            gen_op_movl_T1_A0();
1947
            gen_op_addl_A0_SS();
1948
        }
1949
        gen_op_st_T0_A0[s->dflag + 1 + s->mem_index]();
1950
        if (s->ss32 && !s->addseg)
1951
            gen_op_movl_ESP_A0();
1952
        else
1953
            gen_op_mov_reg_T1[s->ss32 + 1][R_ESP]();
1954
    }
1955
}
1956

    
1957
/* generate a push. It depends on ss32, addseg and dflag */
1958
/* slower version for T1, only used for call Ev */
1959
static void gen_push_T1(DisasContext *s)
1960
{
1961
#ifdef TARGET_X86_64
1962
    if (CODE64(s)) {
1963
        /* XXX: check 16 bit behaviour */
1964
        gen_op_movq_A0_reg[R_ESP]();
1965
        gen_op_subq_A0_8();
1966
        gen_op_st_T1_A0[OT_QUAD + s->mem_index]();
1967
        gen_op_movq_ESP_A0();
1968
    } else 
1969
#endif
1970
    {
1971
        gen_op_movl_A0_reg[R_ESP]();
1972
        if (!s->dflag)
1973
            gen_op_subl_A0_2();
1974
        else
1975
            gen_op_subl_A0_4();
1976
        if (s->ss32) {
1977
            if (s->addseg) {
1978
                gen_op_addl_A0_SS();
1979
            }
1980
        } else {
1981
            gen_op_andl_A0_ffff();
1982
            gen_op_addl_A0_SS();
1983
        }
1984
        gen_op_st_T1_A0[s->dflag + 1 + s->mem_index]();
1985
        
1986
        if (s->ss32 && !s->addseg)
1987
            gen_op_movl_ESP_A0();
1988
        else
1989
            gen_stack_update(s, (-2) << s->dflag);
1990
    }
1991
}
1992

    
1993
/* two step pop is necessary for precise exceptions */
1994
static void gen_pop_T0(DisasContext *s)
1995
{
1996
#ifdef TARGET_X86_64
1997
    if (CODE64(s)) {
1998
        /* XXX: check 16 bit behaviour */
1999
        gen_op_movq_A0_reg[R_ESP]();
2000
        gen_op_ld_T0_A0[OT_QUAD + s->mem_index]();
2001
    } else 
2002
#endif
2003
    {
2004
        gen_op_movl_A0_reg[R_ESP]();
2005
        if (s->ss32) {
2006
            if (s->addseg)
2007
                gen_op_addl_A0_SS();
2008
        } else {
2009
            gen_op_andl_A0_ffff();
2010
            gen_op_addl_A0_SS();
2011
        }
2012
        gen_op_ld_T0_A0[s->dflag + 1 + s->mem_index]();
2013
    }
2014
}
2015

    
2016
static void gen_pop_update(DisasContext *s)
2017
{
2018
#ifdef TARGET_X86_64
2019
    if (CODE64(s)) {
2020
        gen_stack_update(s, 8);
2021
    } else
2022
#endif
2023
    {
2024
        gen_stack_update(s, 2 << s->dflag);
2025
    }
2026
}
2027

    
2028
static void gen_stack_A0(DisasContext *s)
2029
{
2030
    gen_op_movl_A0_ESP();
2031
    if (!s->ss32)
2032
        gen_op_andl_A0_ffff();
2033
    gen_op_movl_T1_A0();
2034
    if (s->addseg)
2035
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2036
}
2037

    
2038
/* NOTE: wrap around in 16 bit not fully handled */
2039
static void gen_pusha(DisasContext *s)
2040
{
2041
    int i;
2042
    gen_op_movl_A0_ESP();
2043
    gen_op_addl_A0_im(-16 <<  s->dflag);
2044
    if (!s->ss32)
2045
        gen_op_andl_A0_ffff();
2046
    gen_op_movl_T1_A0();
2047
    if (s->addseg)
2048
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2049
    for(i = 0;i < 8; i++) {
2050
        gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
2051
        gen_op_st_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2052
        gen_op_addl_A0_im(2 <<  s->dflag);
2053
    }
2054
    gen_op_mov_reg_T1[OT_WORD + s->dflag][R_ESP]();
2055
}
2056

    
2057
/* NOTE: wrap around in 16 bit not fully handled */
2058
static void gen_popa(DisasContext *s)
2059
{
2060
    int i;
2061
    gen_op_movl_A0_ESP();
2062
    if (!s->ss32)
2063
        gen_op_andl_A0_ffff();
2064
    gen_op_movl_T1_A0();
2065
    gen_op_addl_T1_im(16 <<  s->dflag);
2066
    if (s->addseg)
2067
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2068
    for(i = 0;i < 8; i++) {
2069
        /* ESP is not reloaded */
2070
        if (i != 3) {
2071
            gen_op_ld_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2072
            gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
2073
        }
2074
        gen_op_addl_A0_im(2 <<  s->dflag);
2075
    }
2076
    gen_op_mov_reg_T1[OT_WORD + s->dflag][R_ESP]();
2077
}
2078

    
2079
static void gen_enter(DisasContext *s, int esp_addend, int level)
2080
{
2081
    int ot, opsize;
2082

    
2083
    ot = s->dflag + OT_WORD;
2084
    level &= 0x1f;
2085
    opsize = 2 << s->dflag;
2086

    
2087
    gen_op_movl_A0_ESP();
2088
    gen_op_addl_A0_im(-opsize);
2089
    if (!s->ss32)
2090
        gen_op_andl_A0_ffff();
2091
    gen_op_movl_T1_A0();
2092
    if (s->addseg)
2093
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2094
    /* push bp */
2095
    gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2096
    gen_op_st_T0_A0[ot + s->mem_index]();
2097
    if (level) {
2098
        gen_op_enter_level(level, s->dflag);
2099
    }
2100
    gen_op_mov_reg_T1[ot][R_EBP]();
2101
    gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2102
    gen_op_mov_reg_T1[ot][R_ESP]();
2103
}
2104

    
2105
static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2106
{
2107
    if (s->cc_op != CC_OP_DYNAMIC)
2108
        gen_op_set_cc_op(s->cc_op);
2109
    gen_jmp_im(cur_eip);
2110
    gen_op_raise_exception(trapno);
2111
    s->is_jmp = 3;
2112
}
2113

    
2114
/* an interrupt is different from an exception because of the
2115
   priviledge checks */
2116
static void gen_interrupt(DisasContext *s, int intno, 
2117
                          target_ulong cur_eip, target_ulong next_eip)
2118
{
2119
    if (s->cc_op != CC_OP_DYNAMIC)
2120
        gen_op_set_cc_op(s->cc_op);
2121
    gen_jmp_im(cur_eip);
2122
    gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2123
    s->is_jmp = 3;
2124
}
2125

    
2126
static void gen_debug(DisasContext *s, target_ulong cur_eip)
2127
{
2128
    if (s->cc_op != CC_OP_DYNAMIC)
2129
        gen_op_set_cc_op(s->cc_op);
2130
    gen_jmp_im(cur_eip);
2131
    gen_op_debug();
2132
    s->is_jmp = 3;
2133
}
2134

    
2135
/* generate a generic end of block. Trace exception is also generated
2136
   if needed */
2137
static void gen_eob(DisasContext *s)
2138
{
2139
    if (s->cc_op != CC_OP_DYNAMIC)
2140
        gen_op_set_cc_op(s->cc_op);
2141
    if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2142
        gen_op_reset_inhibit_irq();
2143
    }
2144
    if (s->singlestep_enabled) {
2145
        gen_op_debug();
2146
    } else if (s->tf) {
2147
        gen_op_raise_exception(EXCP01_SSTP);
2148
    } else {
2149
        gen_op_movl_T0_0();
2150
        gen_op_exit_tb();
2151
    }
2152
    s->is_jmp = 3;
2153
}
2154

    
2155
/* generate a jump to eip. No segment change must happen before as a
2156
   direct call to the next block may occur */
2157
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2158
{
2159
    TranslationBlock *tb = s->tb;
2160

    
2161
    if (s->jmp_opt) {
2162
        if (s->cc_op != CC_OP_DYNAMIC)
2163
            gen_op_set_cc_op(s->cc_op);
2164
        if (tb_num)
2165
            gen_op_goto_tb1();
2166
        else
2167
            gen_op_goto_tb0();
2168
        gen_jmp_im(eip);
2169
        gen_op_movl_T0_im((long)tb + tb_num);
2170
        gen_op_exit_tb();
2171
        s->is_jmp = 3;
2172
    } else {
2173
        gen_jmp_im(eip);
2174
        gen_eob(s);
2175
    }
2176
}
2177

    
2178
static void gen_jmp(DisasContext *s, target_ulong eip)
2179
{
2180
    gen_jmp_tb(s, eip, 0);
2181
}
2182

    
2183
static void gen_movtl_T0_im(target_ulong val)
2184
{
2185
#ifdef TARGET_X86_64    
2186
    if ((int32_t)val == val) {
2187
        gen_op_movl_T0_im(val);
2188
    } else {
2189
        gen_op_movq_T0_im64(val >> 32, val);
2190
    }
2191
#else
2192
    gen_op_movl_T0_im(val);
2193
#endif
2194
}
2195

    
2196
static GenOpFunc1 *gen_ldo_env_A0[3] = {
2197
    gen_op_ldo_raw_env_A0,
2198
#ifndef CONFIG_USER_ONLY
2199
    gen_op_ldo_kernel_env_A0,
2200
    gen_op_ldo_user_env_A0,
2201
#endif
2202
};
2203

    
2204
static GenOpFunc1 *gen_sto_env_A0[3] = {
2205
    gen_op_sto_raw_env_A0,
2206
#ifndef CONFIG_USER_ONLY
2207
    gen_op_sto_kernel_env_A0,
2208
    gen_op_sto_user_env_A0,
2209
#endif
2210
};
2211

    
2212
/* convert one instruction. s->is_jmp is set if the translation must
2213
   be stopped. Return the next pc value */
2214
static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
2215
{
2216
    int b, prefixes, aflag, dflag;
2217
    int shift, ot;
2218
    int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
2219
    target_ulong next_eip, tval;
2220
    int rex_w, rex_r;
2221

    
2222
    s->pc = pc_start;
2223
    prefixes = 0;
2224
    aflag = s->code32;
2225
    dflag = s->code32;
2226
    s->override = -1;
2227
    rex_w = -1;
2228
    rex_r = 0;
2229
#ifdef TARGET_X86_64
2230
    s->rex_x = 0;
2231
    s->rex_b = 0;
2232
    x86_64_hregs = 0; 
2233
#endif
2234
    s->rip_offset = 0; /* for relative ip address */
2235
 next_byte:
2236
    b = ldub_code(s->pc);
2237
    s->pc++;
2238
    /* check prefixes */
2239
#ifdef TARGET_X86_64
2240
    if (CODE64(s)) {
2241
        switch (b) {
2242
        case 0xf3:
2243
            prefixes |= PREFIX_REPZ;
2244
            goto next_byte;
2245
        case 0xf2:
2246
            prefixes |= PREFIX_REPNZ;
2247
            goto next_byte;
2248
        case 0xf0:
2249
            prefixes |= PREFIX_LOCK;
2250
            goto next_byte;
2251
        case 0x2e:
2252
            s->override = R_CS;
2253
            goto next_byte;
2254
        case 0x36:
2255
            s->override = R_SS;
2256
            goto next_byte;
2257
        case 0x3e:
2258
            s->override = R_DS;
2259
            goto next_byte;
2260
        case 0x26:
2261
            s->override = R_ES;
2262
            goto next_byte;
2263
        case 0x64:
2264
            s->override = R_FS;
2265
            goto next_byte;
2266
        case 0x65:
2267
            s->override = R_GS;
2268
            goto next_byte;
2269
        case 0x66:
2270
            prefixes |= PREFIX_DATA;
2271
            goto next_byte;
2272
        case 0x67:
2273
            prefixes |= PREFIX_ADR;
2274
            goto next_byte;
2275
        case 0x40 ... 0x4f:
2276
            /* REX prefix */
2277
            rex_w = (b >> 3) & 1;
2278
            rex_r = (b & 0x4) << 1;
2279
            s->rex_x = (b & 0x2) << 2;
2280
            REX_B(s) = (b & 0x1) << 3;
2281
            x86_64_hregs = 1; /* select uniform byte register addressing */
2282
            goto next_byte;
2283
        }
2284
        if (rex_w == 1) {
2285
            /* 0x66 is ignored if rex.w is set */
2286
            dflag = 2;
2287
        } else {
2288
            if (prefixes & PREFIX_DATA)
2289
                dflag ^= 1;
2290
        }
2291
        if (!(prefixes & PREFIX_ADR))
2292
            aflag = 2;
2293
    } else 
2294
#endif
2295
    {
2296
        switch (b) {
2297
        case 0xf3:
2298
            prefixes |= PREFIX_REPZ;
2299
            goto next_byte;
2300
        case 0xf2:
2301
            prefixes |= PREFIX_REPNZ;
2302
            goto next_byte;
2303
        case 0xf0:
2304
            prefixes |= PREFIX_LOCK;
2305
            goto next_byte;
2306
        case 0x2e:
2307
            s->override = R_CS;
2308
            goto next_byte;
2309
        case 0x36:
2310
            s->override = R_SS;
2311
            goto next_byte;
2312
        case 0x3e:
2313
            s->override = R_DS;
2314
            goto next_byte;
2315
        case 0x26:
2316
            s->override = R_ES;
2317
            goto next_byte;
2318
        case 0x64:
2319
            s->override = R_FS;
2320
            goto next_byte;
2321
        case 0x65:
2322
            s->override = R_GS;
2323
            goto next_byte;
2324
        case 0x66:
2325
            prefixes |= PREFIX_DATA;
2326
            goto next_byte;
2327
        case 0x67:
2328
            prefixes |= PREFIX_ADR;
2329
            goto next_byte;
2330
        }
2331
        if (prefixes & PREFIX_DATA)
2332
            dflag ^= 1;
2333
        if (prefixes & PREFIX_ADR)
2334
            aflag ^= 1;
2335
    }
2336

    
2337
    s->prefix = prefixes;
2338
    s->aflag = aflag;
2339
    s->dflag = dflag;
2340

    
2341
    /* lock generation */
2342
    if (prefixes & PREFIX_LOCK)
2343
        gen_op_lock();
2344

    
2345
    /* now check op code */
2346
 reswitch:
2347
    switch(b) {
2348
    case 0x0f:
2349
        /**************************/
2350
        /* extended op code */
2351
        b = ldub_code(s->pc++) | 0x100;
2352
        goto reswitch;
2353
        
2354
        /**************************/
2355
        /* arith & logic */
2356
    case 0x00 ... 0x05:
2357
    case 0x08 ... 0x0d:
2358
    case 0x10 ... 0x15:
2359
    case 0x18 ... 0x1d:
2360
    case 0x20 ... 0x25:
2361
    case 0x28 ... 0x2d:
2362
    case 0x30 ... 0x35:
2363
    case 0x38 ... 0x3d:
2364
        {
2365
            int op, f, val;
2366
            op = (b >> 3) & 7;
2367
            f = (b >> 1) & 3;
2368

    
2369
            if ((b & 1) == 0)
2370
                ot = OT_BYTE;
2371
            else
2372
                ot = dflag + OT_WORD;
2373
            
2374
            switch(f) {
2375
            case 0: /* OP Ev, Gv */
2376
                modrm = ldub_code(s->pc++);
2377
                reg = ((modrm >> 3) & 7) | rex_r;
2378
                mod = (modrm >> 6) & 3;
2379
                rm = (modrm & 7) | REX_B(s);
2380
                if (mod != 3) {
2381
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2382
                    opreg = OR_TMP0;
2383
                } else if (op == OP_XORL && rm == reg) {
2384
                xor_zero:
2385
                    /* xor reg, reg optimisation */
2386
                    gen_op_movl_T0_0();
2387
                    s->cc_op = CC_OP_LOGICB + ot;
2388
                    gen_op_mov_reg_T0[ot][reg]();
2389
                    gen_op_update1_cc();
2390
                    break;
2391
                } else {
2392
                    opreg = rm;
2393
                }
2394
                gen_op_mov_TN_reg[ot][1][reg]();
2395
                gen_op(s, op, ot, opreg);
2396
                break;
2397
            case 1: /* OP Gv, Ev */
2398
                modrm = ldub_code(s->pc++);
2399
                mod = (modrm >> 6) & 3;
2400
                reg = ((modrm >> 3) & 7) | rex_r;
2401
                rm = (modrm & 7) | REX_B(s);
2402
                if (mod != 3) {
2403
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2404
                    gen_op_ld_T1_A0[ot + s->mem_index]();
2405
                } else if (op == OP_XORL && rm == reg) {
2406
                    goto xor_zero;
2407
                } else {
2408
                    gen_op_mov_TN_reg[ot][1][rm]();
2409
                }
2410
                gen_op(s, op, ot, reg);
2411
                break;
2412
            case 2: /* OP A, Iv */
2413
                val = insn_get(s, ot);
2414
                gen_op_movl_T1_im(val);
2415
                gen_op(s, op, ot, OR_EAX);
2416
                break;
2417
            }
2418
        }
2419
        break;
2420

    
2421
    case 0x80: /* GRP1 */
2422
    case 0x81:
2423
    case 0x82:
2424
    case 0x83:
2425
        {
2426
            int val;
2427

    
2428
            if ((b & 1) == 0)
2429
                ot = OT_BYTE;
2430
            else
2431
                ot = dflag + OT_WORD;
2432
            
2433
            modrm = ldub_code(s->pc++);
2434
            mod = (modrm >> 6) & 3;
2435
            rm = (modrm & 7) | REX_B(s);
2436
            op = (modrm >> 3) & 7;
2437
            
2438
            if (mod != 3) {
2439
                if (b == 0x83)
2440
                    s->rip_offset = 1;
2441
                else
2442
                    s->rip_offset = insn_const_size(ot);
2443
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2444
                opreg = OR_TMP0;
2445
            } else {
2446
                opreg = rm;
2447
            }
2448

    
2449
            switch(b) {
2450
            default:
2451
            case 0x80:
2452
            case 0x81:
2453
            case 0x82:
2454
                val = insn_get(s, ot);
2455
                break;
2456
            case 0x83:
2457
                val = (int8_t)insn_get(s, OT_BYTE);
2458
                break;
2459
            }
2460
            gen_op_movl_T1_im(val);
2461
            gen_op(s, op, ot, opreg);
2462
        }
2463
        break;
2464

    
2465
        /**************************/
2466
        /* inc, dec, and other misc arith */
2467
    case 0x40 ... 0x47: /* inc Gv */
2468
        ot = dflag ? OT_LONG : OT_WORD;
2469
        gen_inc(s, ot, OR_EAX + (b & 7), 1);
2470
        break;
2471
    case 0x48 ... 0x4f: /* dec Gv */
2472
        ot = dflag ? OT_LONG : OT_WORD;
2473
        gen_inc(s, ot, OR_EAX + (b & 7), -1);
2474
        break;
2475
    case 0xf6: /* GRP3 */
2476
    case 0xf7:
2477
        if ((b & 1) == 0)
2478
            ot = OT_BYTE;
2479
        else
2480
            ot = dflag + OT_WORD;
2481

    
2482
        modrm = ldub_code(s->pc++);
2483
        mod = (modrm >> 6) & 3;
2484
        rm = (modrm & 7) | REX_B(s);
2485
        op = (modrm >> 3) & 7;
2486
        if (mod != 3) {
2487
            if (op == 0)
2488
                s->rip_offset = insn_const_size(ot);
2489
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2490
            gen_op_ld_T0_A0[ot + s->mem_index]();
2491
        } else {
2492
            gen_op_mov_TN_reg[ot][0][rm]();
2493
        }
2494

    
2495
        switch(op) {
2496
        case 0: /* test */
2497
            val = insn_get(s, ot);
2498
            gen_op_movl_T1_im(val);
2499
            gen_op_testl_T0_T1_cc();
2500
            s->cc_op = CC_OP_LOGICB + ot;
2501
            break;
2502
        case 2: /* not */
2503
            gen_op_notl_T0();
2504
            if (mod != 3) {
2505
                gen_op_st_T0_A0[ot + s->mem_index]();
2506
            } else {
2507
                gen_op_mov_reg_T0[ot][rm]();
2508
            }
2509
            break;
2510
        case 3: /* neg */
2511
            gen_op_negl_T0();
2512
            if (mod != 3) {
2513
                gen_op_st_T0_A0[ot + s->mem_index]();
2514
            } else {
2515
                gen_op_mov_reg_T0[ot][rm]();
2516
            }
2517
            gen_op_update_neg_cc();
2518
            s->cc_op = CC_OP_SUBB + ot;
2519
            break;
2520
        case 4: /* mul */
2521
            switch(ot) {
2522
            case OT_BYTE:
2523
                gen_op_mulb_AL_T0();
2524
                s->cc_op = CC_OP_MULB;
2525
                break;
2526
            case OT_WORD:
2527
                gen_op_mulw_AX_T0();
2528
                s->cc_op = CC_OP_MULW;
2529
                break;
2530
            default:
2531
            case OT_LONG:
2532
                gen_op_mull_EAX_T0();
2533
                s->cc_op = CC_OP_MULL;
2534
                break;
2535
#ifdef TARGET_X86_64
2536
            case OT_QUAD:
2537
                gen_op_mulq_EAX_T0();
2538
                s->cc_op = CC_OP_MULQ;
2539
                break;
2540
#endif
2541
            }
2542
            break;
2543
        case 5: /* imul */
2544
            switch(ot) {
2545
            case OT_BYTE:
2546
                gen_op_imulb_AL_T0();
2547
                s->cc_op = CC_OP_MULB;
2548
                break;
2549
            case OT_WORD:
2550
                gen_op_imulw_AX_T0();
2551
                s->cc_op = CC_OP_MULW;
2552
                break;
2553
            default:
2554
            case OT_LONG:
2555
                gen_op_imull_EAX_T0();
2556
                s->cc_op = CC_OP_MULL;
2557
                break;
2558
#ifdef TARGET_X86_64
2559
            case OT_QUAD:
2560
                gen_op_imulq_EAX_T0();
2561
                s->cc_op = CC_OP_MULQ;
2562
                break;
2563
#endif
2564
            }
2565
            break;
2566
        case 6: /* div */
2567
            switch(ot) {
2568
            case OT_BYTE:
2569
                gen_jmp_im(pc_start - s->cs_base);
2570
                gen_op_divb_AL_T0();
2571
                break;
2572
            case OT_WORD:
2573
                gen_jmp_im(pc_start - s->cs_base);
2574
                gen_op_divw_AX_T0();
2575
                break;
2576
            default:
2577
            case OT_LONG:
2578
                gen_jmp_im(pc_start - s->cs_base);
2579
                gen_op_divl_EAX_T0();
2580
                break;
2581
#ifdef TARGET_X86_64
2582
            case OT_QUAD:
2583
                gen_jmp_im(pc_start - s->cs_base);
2584
                gen_op_divq_EAX_T0();
2585
                break;
2586
#endif
2587
            }
2588
            break;
2589
        case 7: /* idiv */
2590
            switch(ot) {
2591
            case OT_BYTE:
2592
                gen_jmp_im(pc_start - s->cs_base);
2593
                gen_op_idivb_AL_T0();
2594
                break;
2595
            case OT_WORD:
2596
                gen_jmp_im(pc_start - s->cs_base);
2597
                gen_op_idivw_AX_T0();
2598
                break;
2599
            default:
2600
            case OT_LONG:
2601
                gen_jmp_im(pc_start - s->cs_base);
2602
                gen_op_idivl_EAX_T0();
2603
                break;
2604
#ifdef TARGET_X86_64
2605
            case OT_QUAD:
2606
                gen_jmp_im(pc_start - s->cs_base);
2607
                gen_op_idivq_EAX_T0();
2608
                break;
2609
#endif
2610
            }
2611
            break;
2612
        default:
2613
            goto illegal_op;
2614
        }
2615
        break;
2616

    
2617
    case 0xfe: /* GRP4 */
2618
    case 0xff: /* GRP5 */
2619
        if ((b & 1) == 0)
2620
            ot = OT_BYTE;
2621
        else
2622
            ot = dflag + OT_WORD;
2623

    
2624
        modrm = ldub_code(s->pc++);
2625
        mod = (modrm >> 6) & 3;
2626
        rm = (modrm & 7) | REX_B(s);
2627
        op = (modrm >> 3) & 7;
2628
        if (op >= 2 && b == 0xfe) {
2629
            goto illegal_op;
2630
        }
2631
        if (CODE64(s)) {
2632
            if (op >= 2 && op <= 5) {
2633
                /* operand size for jumps is 64 bit */
2634
                ot = OT_QUAD;
2635
            } else if (op == 6) {
2636
                /* default push size is 64 bit */
2637
                ot = dflag ? OT_QUAD : OT_WORD;
2638
            }
2639
        }
2640
        if (mod != 3) {
2641
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2642
            if (op >= 2 && op != 3 && op != 5)
2643
                gen_op_ld_T0_A0[ot + s->mem_index]();
2644
        } else {
2645
            gen_op_mov_TN_reg[ot][0][rm]();
2646
        }
2647

    
2648
        switch(op) {
2649
        case 0: /* inc Ev */
2650
            if (mod != 3)
2651
                opreg = OR_TMP0;
2652
            else
2653
                opreg = rm;
2654
            gen_inc(s, ot, opreg, 1);
2655
            break;
2656
        case 1: /* dec Ev */
2657
            if (mod != 3)
2658
                opreg = OR_TMP0;
2659
            else
2660
                opreg = rm;
2661
            gen_inc(s, ot, opreg, -1);
2662
            break;
2663
        case 2: /* call Ev */
2664
            /* XXX: optimize if memory (no 'and' is necessary) */
2665
            if (s->dflag == 0)
2666
                gen_op_andl_T0_ffff();
2667
            next_eip = s->pc - s->cs_base;
2668
            gen_op_movl_T1_im(next_eip);
2669
            gen_push_T1(s);
2670
            gen_op_jmp_T0();
2671
            gen_eob(s);
2672
            break;
2673
        case 3: /* lcall Ev */
2674
            gen_op_ld_T1_A0[ot + s->mem_index]();
2675
            gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
2676
            gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
2677
        do_lcall:
2678
            if (s->pe && !s->vm86) {
2679
                if (s->cc_op != CC_OP_DYNAMIC)
2680
                    gen_op_set_cc_op(s->cc_op);
2681
                gen_jmp_im(pc_start - s->cs_base);
2682
                gen_op_lcall_protected_T0_T1(dflag, s->pc - s->cs_base);
2683
            } else {
2684
                gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
2685
            }
2686
            gen_eob(s);
2687
            break;
2688
        case 4: /* jmp Ev */
2689
            if (s->dflag == 0)
2690
                gen_op_andl_T0_ffff();
2691
            gen_op_jmp_T0();
2692
            gen_eob(s);
2693
            break;
2694
        case 5: /* ljmp Ev */
2695
            gen_op_ld_T1_A0[ot + s->mem_index]();
2696
            gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
2697
            gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
2698
        do_ljmp:
2699
            if (s->pe && !s->vm86) {
2700
                if (s->cc_op != CC_OP_DYNAMIC)
2701
                    gen_op_set_cc_op(s->cc_op);
2702
                gen_jmp_im(pc_start - s->cs_base);
2703
                gen_op_ljmp_protected_T0_T1(s->pc - s->cs_base);
2704
            } else {
2705
                gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
2706
                gen_op_movl_T0_T1();
2707
                gen_op_jmp_T0();
2708
            }
2709
            gen_eob(s);
2710
            break;
2711
        case 6: /* push Ev */
2712
            gen_push_T0(s);
2713
            break;
2714
        default:
2715
            goto illegal_op;
2716
        }
2717
        break;
2718

    
2719
    case 0x84: /* test Ev, Gv */
2720
    case 0x85: 
2721
        if ((b & 1) == 0)
2722
            ot = OT_BYTE;
2723
        else
2724
            ot = dflag + OT_WORD;
2725

    
2726
        modrm = ldub_code(s->pc++);
2727
        mod = (modrm >> 6) & 3;
2728
        rm = (modrm & 7) | REX_B(s);
2729
        reg = ((modrm >> 3) & 7) | rex_r;
2730
        
2731
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2732
        gen_op_mov_TN_reg[ot][1][reg]();
2733
        gen_op_testl_T0_T1_cc();
2734
        s->cc_op = CC_OP_LOGICB + ot;
2735
        break;
2736
        
2737
    case 0xa8: /* test eAX, Iv */
2738
    case 0xa9:
2739
        if ((b & 1) == 0)
2740
            ot = OT_BYTE;
2741
        else
2742
            ot = dflag + OT_WORD;
2743
        val = insn_get(s, ot);
2744

    
2745
        gen_op_mov_TN_reg[ot][0][OR_EAX]();
2746
        gen_op_movl_T1_im(val);
2747
        gen_op_testl_T0_T1_cc();
2748
        s->cc_op = CC_OP_LOGICB + ot;
2749
        break;
2750
        
2751
    case 0x98: /* CWDE/CBW */
2752
#ifdef TARGET_X86_64
2753
        if (dflag == 2) {
2754
            gen_op_movslq_RAX_EAX();
2755
        } else
2756
#endif
2757
        if (dflag == 1)
2758
            gen_op_movswl_EAX_AX();
2759
        else
2760
            gen_op_movsbw_AX_AL();
2761
        break;
2762
    case 0x99: /* CDQ/CWD */
2763
#ifdef TARGET_X86_64
2764
        if (dflag == 2) {
2765
            gen_op_movsqo_RDX_RAX();
2766
        } else
2767
#endif
2768
        if (dflag == 1)
2769
            gen_op_movslq_EDX_EAX();
2770
        else
2771
            gen_op_movswl_DX_AX();
2772
        break;
2773
    case 0x1af: /* imul Gv, Ev */
2774
    case 0x69: /* imul Gv, Ev, I */
2775
    case 0x6b:
2776
        ot = dflag + OT_WORD;
2777
        modrm = ldub_code(s->pc++);
2778
        reg = ((modrm >> 3) & 7) | rex_r;
2779
        if (b == 0x69)
2780
            s->rip_offset = insn_const_size(ot);
2781
        else if (b == 0x6b)
2782
            s->rip_offset = 1;
2783
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2784
        if (b == 0x69) {
2785
            val = insn_get(s, ot);
2786
            gen_op_movl_T1_im(val);
2787
        } else if (b == 0x6b) {
2788
            val = (int8_t)insn_get(s, OT_BYTE);
2789
            gen_op_movl_T1_im(val);
2790
        } else {
2791
            gen_op_mov_TN_reg[ot][1][reg]();
2792
        }
2793

    
2794
#ifdef TARGET_X86_64
2795
        if (ot == OT_QUAD) {
2796
            gen_op_imulq_T0_T1();
2797
        } else
2798
#endif
2799
        if (ot == OT_LONG) {
2800
            gen_op_imull_T0_T1();
2801
        } else {
2802
            gen_op_imulw_T0_T1();
2803
        }
2804
        gen_op_mov_reg_T0[ot][reg]();
2805
        s->cc_op = CC_OP_MULB + ot;
2806
        break;
2807
    case 0x1c0:
2808
    case 0x1c1: /* xadd Ev, Gv */
2809
        if ((b & 1) == 0)
2810
            ot = OT_BYTE;
2811
        else
2812
            ot = dflag + OT_WORD;
2813
        modrm = ldub_code(s->pc++);
2814
        reg = ((modrm >> 3) & 7) | rex_r;
2815
        mod = (modrm >> 6) & 3;
2816
        if (mod == 3) {
2817
            rm = (modrm & 7) | REX_B(s);
2818
            gen_op_mov_TN_reg[ot][0][reg]();
2819
            gen_op_mov_TN_reg[ot][1][rm]();
2820
            gen_op_addl_T0_T1();
2821
            gen_op_mov_reg_T1[ot][reg]();
2822
            gen_op_mov_reg_T0[ot][rm]();
2823
        } else {
2824
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2825
            gen_op_mov_TN_reg[ot][0][reg]();
2826
            gen_op_ld_T1_A0[ot + s->mem_index]();
2827
            gen_op_addl_T0_T1();
2828
            gen_op_st_T0_A0[ot + s->mem_index]();
2829
            gen_op_mov_reg_T1[ot][reg]();
2830
        }
2831
        gen_op_update2_cc();
2832
        s->cc_op = CC_OP_ADDB + ot;
2833
        break;
2834
    case 0x1b0:
2835
    case 0x1b1: /* cmpxchg Ev, Gv */
2836
        if ((b & 1) == 0)
2837
            ot = OT_BYTE;
2838
        else
2839
            ot = dflag + OT_WORD;
2840
        modrm = ldub_code(s->pc++);
2841
        reg = ((modrm >> 3) & 7) | rex_r;
2842
        mod = (modrm >> 6) & 3;
2843
        gen_op_mov_TN_reg[ot][1][reg]();
2844
        if (mod == 3) {
2845
            rm = (modrm & 7) | REX_B(s);
2846
            gen_op_mov_TN_reg[ot][0][rm]();
2847
            gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
2848
            gen_op_mov_reg_T0[ot][rm]();
2849
        } else {
2850
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2851
            gen_op_ld_T0_A0[ot + s->mem_index]();
2852
            gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
2853
        }
2854
        s->cc_op = CC_OP_SUBB + ot;
2855
        break;
2856
    case 0x1c7: /* cmpxchg8b */
2857
        modrm = ldub_code(s->pc++);
2858
        mod = (modrm >> 6) & 3;
2859
        if (mod == 3)
2860
            goto illegal_op;
2861
        if (s->cc_op != CC_OP_DYNAMIC)
2862
            gen_op_set_cc_op(s->cc_op);
2863
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2864
        gen_op_cmpxchg8b();
2865
        s->cc_op = CC_OP_EFLAGS;
2866
        break;
2867
        
2868
        /**************************/
2869
        /* push/pop */
2870
    case 0x50 ... 0x57: /* push */
2871
        gen_op_mov_TN_reg[OT_LONG][0][(b & 7) | REX_B(s)]();
2872
        gen_push_T0(s);
2873
        break;
2874
    case 0x58 ... 0x5f: /* pop */
2875
        if (CODE64(s)) {
2876
            ot = dflag ? OT_QUAD : OT_WORD;
2877
        } else {
2878
            ot = dflag + OT_WORD;
2879
        }
2880
        gen_pop_T0(s);
2881
        /* NOTE: order is important for pop %sp */
2882
        gen_pop_update(s);
2883
        gen_op_mov_reg_T0[ot][(b & 7) | REX_B(s)]();
2884
        break;
2885
    case 0x60: /* pusha */
2886
        if (CODE64(s))
2887
            goto illegal_op;
2888
        gen_pusha(s);
2889
        break;
2890
    case 0x61: /* popa */
2891
        if (CODE64(s))
2892
            goto illegal_op;
2893
        gen_popa(s);
2894
        break;
2895
    case 0x68: /* push Iv */
2896
    case 0x6a:
2897
        if (CODE64(s)) {
2898
            ot = dflag ? OT_QUAD : OT_WORD;
2899
        } else {
2900
            ot = dflag + OT_WORD;
2901
        }
2902
        if (b == 0x68)
2903
            val = insn_get(s, ot);
2904
        else
2905
            val = (int8_t)insn_get(s, OT_BYTE);
2906
        gen_op_movl_T0_im(val);
2907
        gen_push_T0(s);
2908
        break;
2909
    case 0x8f: /* pop Ev */
2910
        if (CODE64(s)) {
2911
            ot = dflag ? OT_QUAD : OT_WORD;
2912
        } else {
2913
            ot = dflag + OT_WORD;
2914
        }
2915
        modrm = ldub_code(s->pc++);
2916
        mod = (modrm >> 6) & 3;
2917
        gen_pop_T0(s);
2918
        if (mod == 3) {
2919
            /* NOTE: order is important for pop %sp */
2920
            gen_pop_update(s);
2921
            rm = (modrm & 7) | REX_B(s);
2922
            gen_op_mov_reg_T0[ot][rm]();
2923
        } else {
2924
            /* NOTE: order is important too for MMU exceptions */
2925
            s->popl_esp_hack = 1 << ot;
2926
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
2927
            s->popl_esp_hack = 0;
2928
            gen_pop_update(s);
2929
        }
2930
        break;
2931
    case 0xc8: /* enter */
2932
        {
2933
            /* XXX: long mode support */
2934
            int level;
2935
            val = lduw_code(s->pc);
2936
            s->pc += 2;
2937
            level = ldub_code(s->pc++);
2938
            gen_enter(s, val, level);
2939
        }
2940
        break;
2941
    case 0xc9: /* leave */
2942
        /* XXX: exception not precise (ESP is updated before potential exception) */
2943
        /* XXX: may be invalid for 16 bit in long mode */
2944
        if (CODE64(s)) {
2945
            gen_op_mov_TN_reg[OT_QUAD][0][R_EBP]();
2946
            gen_op_mov_reg_T0[OT_QUAD][R_ESP]();
2947
        } else if (s->ss32) {
2948
            gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2949
            gen_op_mov_reg_T0[OT_LONG][R_ESP]();
2950
        } else {
2951
            gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
2952
            gen_op_mov_reg_T0[OT_WORD][R_ESP]();
2953
        }
2954
        gen_pop_T0(s);
2955
        if (CODE64(s)) {
2956
            ot = dflag ? OT_QUAD : OT_WORD;
2957
        } else {
2958
            ot = dflag + OT_WORD;
2959
        }
2960
        gen_op_mov_reg_T0[ot][R_EBP]();
2961
        gen_pop_update(s);
2962
        break;
2963
    case 0x06: /* push es */
2964
    case 0x0e: /* push cs */
2965
    case 0x16: /* push ss */
2966
    case 0x1e: /* push ds */
2967
        if (CODE64(s))
2968
            goto illegal_op;
2969
        gen_op_movl_T0_seg(b >> 3);
2970
        gen_push_T0(s);
2971
        break;
2972
    case 0x1a0: /* push fs */
2973
    case 0x1a8: /* push gs */
2974
        gen_op_movl_T0_seg((b >> 3) & 7);
2975
        gen_push_T0(s);
2976
        break;
2977
    case 0x07: /* pop es */
2978
    case 0x17: /* pop ss */
2979
    case 0x1f: /* pop ds */
2980
        if (CODE64(s))
2981
            goto illegal_op;
2982
        reg = b >> 3;
2983
        gen_pop_T0(s);
2984
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
2985
        gen_pop_update(s);
2986
        if (reg == R_SS) {
2987
            /* if reg == SS, inhibit interrupts/trace. */
2988
            /* If several instructions disable interrupts, only the
2989
               _first_ does it */
2990
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
2991
                gen_op_set_inhibit_irq();
2992
            s->tf = 0;
2993
        }
2994
        if (s->is_jmp) {
2995
            gen_jmp_im(s->pc - s->cs_base);
2996
            gen_eob(s);
2997
        }
2998
        break;
2999
    case 0x1a1: /* pop fs */
3000
    case 0x1a9: /* pop gs */
3001
        gen_pop_T0(s);
3002
        gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
3003
        gen_pop_update(s);
3004
        if (s->is_jmp) {
3005
            gen_jmp_im(s->pc - s->cs_base);
3006
            gen_eob(s);
3007
        }
3008
        break;
3009

    
3010
        /**************************/
3011
        /* mov */
3012
    case 0x88:
3013
    case 0x89: /* mov Gv, Ev */
3014
        if ((b & 1) == 0)
3015
            ot = OT_BYTE;
3016
        else
3017
            ot = dflag + OT_WORD;
3018
        modrm = ldub_code(s->pc++);
3019
        reg = ((modrm >> 3) & 7) | rex_r;
3020
        
3021
        /* generate a generic store */
3022
        gen_ldst_modrm(s, modrm, ot, reg, 1);
3023
        break;
3024
    case 0xc6:
3025
    case 0xc7: /* mov Ev, Iv */
3026
        if ((b & 1) == 0)
3027
            ot = OT_BYTE;
3028
        else
3029
            ot = dflag + OT_WORD;
3030
        modrm = ldub_code(s->pc++);
3031
        mod = (modrm >> 6) & 3;
3032
        if (mod != 3) {
3033
            s->rip_offset = insn_const_size(ot);
3034
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3035
        }
3036
        val = insn_get(s, ot);
3037
        gen_op_movl_T0_im(val);
3038
        if (mod != 3)
3039
            gen_op_st_T0_A0[ot + s->mem_index]();
3040
        else
3041
            gen_op_mov_reg_T0[ot][(modrm & 7) | REX_B(s)]();
3042
        break;
3043
    case 0x8a:
3044
    case 0x8b: /* mov Ev, Gv */
3045
        if ((b & 1) == 0)
3046
            ot = OT_BYTE;
3047
        else
3048
            ot = OT_WORD + dflag;
3049
        modrm = ldub_code(s->pc++);
3050
        reg = ((modrm >> 3) & 7) | rex_r;
3051
        
3052
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3053
        gen_op_mov_reg_T0[ot][reg]();
3054
        break;
3055
    case 0x8e: /* mov seg, Gv */
3056
        modrm = ldub_code(s->pc++);
3057
        reg = (modrm >> 3) & 7;
3058
        if (reg >= 6 || reg == R_CS)
3059
            goto illegal_op;
3060
        gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3061
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3062
        if (reg == R_SS) {
3063
            /* if reg == SS, inhibit interrupts/trace */
3064
            /* If several instructions disable interrupts, only the
3065
               _first_ does it */
3066
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3067
                gen_op_set_inhibit_irq();
3068
            s->tf = 0;
3069
        }
3070
        if (s->is_jmp) {
3071
            gen_jmp_im(s->pc - s->cs_base);
3072
            gen_eob(s);
3073
        }
3074
        break;
3075
    case 0x8c: /* mov Gv, seg */
3076
        modrm = ldub_code(s->pc++);
3077
        reg = (modrm >> 3) & 7;
3078
        mod = (modrm >> 6) & 3;
3079
        if (reg >= 6)
3080
            goto illegal_op;
3081
        gen_op_movl_T0_seg(reg);
3082
        if (mod == 3)
3083
            ot = OT_WORD + dflag;
3084
        else
3085
            ot = OT_WORD;
3086
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3087
        break;
3088

    
3089
    case 0x1b6: /* movzbS Gv, Eb */
3090
    case 0x1b7: /* movzwS Gv, Eb */
3091
    case 0x1be: /* movsbS Gv, Eb */
3092
    case 0x1bf: /* movswS Gv, Eb */
3093
        {
3094
            int d_ot;
3095
            /* d_ot is the size of destination */
3096
            d_ot = dflag + OT_WORD;
3097
            /* ot is the size of source */
3098
            ot = (b & 1) + OT_BYTE;
3099
            modrm = ldub_code(s->pc++);
3100
            reg = ((modrm >> 3) & 7) | rex_r;
3101
            mod = (modrm >> 6) & 3;
3102
            rm = (modrm & 7) | REX_B(s);
3103
            
3104
            if (mod == 3) {
3105
                gen_op_mov_TN_reg[ot][0][rm]();
3106
                switch(ot | (b & 8)) {
3107
                case OT_BYTE:
3108
                    gen_op_movzbl_T0_T0();
3109
                    break;
3110
                case OT_BYTE | 8:
3111
                    gen_op_movsbl_T0_T0();
3112
                    break;
3113
                case OT_WORD:
3114
                    gen_op_movzwl_T0_T0();
3115
                    break;
3116
                default:
3117
                case OT_WORD | 8:
3118
                    gen_op_movswl_T0_T0();
3119
                    break;
3120
                }
3121
                gen_op_mov_reg_T0[d_ot][reg]();
3122
            } else {
3123
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3124
                if (b & 8) {
3125
                    gen_op_lds_T0_A0[ot + s->mem_index]();
3126
                } else {
3127
                    gen_op_ldu_T0_A0[ot + s->mem_index]();
3128
                }
3129
                gen_op_mov_reg_T0[d_ot][reg]();
3130
            }
3131
        }
3132
        break;
3133

    
3134
    case 0x8d: /* lea */
3135
        ot = dflag + OT_WORD;
3136
        modrm = ldub_code(s->pc++);
3137
        mod = (modrm >> 6) & 3;
3138
        if (mod == 3)
3139
            goto illegal_op;
3140
        reg = ((modrm >> 3) & 7) | rex_r;
3141
        /* we must ensure that no segment is added */
3142
        s->override = -1;
3143
        val = s->addseg;
3144
        s->addseg = 0;
3145
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3146
        s->addseg = val;
3147
        gen_op_mov_reg_A0[ot - OT_WORD][reg]();
3148
        break;
3149
        
3150
    case 0xa0: /* mov EAX, Ov */
3151
    case 0xa1:
3152
    case 0xa2: /* mov Ov, EAX */
3153
    case 0xa3:
3154
        {
3155
            target_ulong offset_addr;
3156

    
3157
            if ((b & 1) == 0)
3158
                ot = OT_BYTE;
3159
            else
3160
                ot = dflag + OT_WORD;
3161
#ifdef TARGET_X86_64
3162
            if (CODE64(s)) {
3163
                offset_addr = ldq_code(s->pc);
3164
                s->pc += 8;
3165
                if (offset_addr == (int32_t)offset_addr)
3166
                    gen_op_movq_A0_im(offset_addr);
3167
                else
3168
                    gen_op_movq_A0_im64(offset_addr >> 32, offset_addr);
3169
            } else 
3170
#endif
3171
            {
3172
                if (s->aflag) {
3173
                    offset_addr = insn_get(s, OT_LONG);
3174
                } else {
3175
                    offset_addr = insn_get(s, OT_WORD);
3176
                }
3177
                gen_op_movl_A0_im(offset_addr);
3178
            }
3179
            /* handle override */
3180
            {
3181
                int override, must_add_seg;
3182
                must_add_seg = s->addseg;
3183
                if (s->override >= 0) {
3184
                    override = s->override;
3185
                    must_add_seg = 1;
3186
                } else {
3187
                    override = R_DS;
3188
                }
3189
                if (must_add_seg) {
3190
                    gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
3191
                }
3192
            }
3193
            if ((b & 2) == 0) {
3194
                gen_op_ld_T0_A0[ot + s->mem_index]();
3195
                gen_op_mov_reg_T0[ot][R_EAX]();
3196
            } else {
3197
                gen_op_mov_TN_reg[ot][0][R_EAX]();
3198
                gen_op_st_T0_A0[ot + s->mem_index]();
3199
            }
3200
        }
3201
        break;
3202
    case 0xd7: /* xlat */
3203
#ifdef TARGET_X86_64
3204
        if (CODE64(s)) {
3205
            gen_op_movq_A0_reg[R_EBX]();
3206
            gen_op_addq_A0_AL();
3207
        } else 
3208
#endif
3209
        {
3210
            gen_op_movl_A0_reg[R_EBX]();
3211
            gen_op_addl_A0_AL();
3212
            if (s->aflag == 0)
3213
                gen_op_andl_A0_ffff();
3214
        }
3215
        /* handle override */
3216
        {
3217
            int override, must_add_seg;
3218
            must_add_seg = s->addseg;
3219
            override = R_DS;
3220
            if (s->override >= 0) {
3221
                override = s->override;
3222
                must_add_seg = 1;
3223
            } else {
3224
                override = R_DS;
3225
            }
3226
            if (must_add_seg) {
3227
                gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
3228
            }
3229
        }
3230
        gen_op_ldu_T0_A0[OT_BYTE + s->mem_index]();
3231
        gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
3232
        break;
3233
    case 0xb0 ... 0xb7: /* mov R, Ib */
3234
        val = insn_get(s, OT_BYTE);
3235
        gen_op_movl_T0_im(val);
3236
        gen_op_mov_reg_T0[OT_BYTE][(b & 7) | REX_B(s)]();
3237
        break;
3238
    case 0xb8 ... 0xbf: /* mov R, Iv */
3239
#ifdef TARGET_X86_64
3240
        if (dflag == 2) {
3241
            uint64_t tmp;
3242
            /* 64 bit case */
3243
            tmp = ldq_code(s->pc);
3244
            s->pc += 8;
3245
            reg = (b & 7) | REX_B(s);
3246
            gen_movtl_T0_im(tmp);
3247
            gen_op_mov_reg_T0[OT_QUAD][reg]();
3248
        } else 
3249
#endif
3250
        {
3251
            ot = dflag ? OT_LONG : OT_WORD;
3252
            val = insn_get(s, ot);
3253
            reg = (b & 7) | REX_B(s);
3254
            gen_op_movl_T0_im(val);
3255
            gen_op_mov_reg_T0[ot][reg]();
3256
        }
3257
        break;
3258

    
3259
    case 0x91 ... 0x97: /* xchg R, EAX */
3260
        ot = dflag + OT_WORD;
3261
        reg = (b & 7) | REX_B(s);
3262
        rm = R_EAX;
3263
        goto do_xchg_reg;
3264
    case 0x86:
3265
    case 0x87: /* xchg Ev, Gv */
3266
        if ((b & 1) == 0)
3267
            ot = OT_BYTE;
3268
        else
3269
            ot = dflag + OT_WORD;
3270
        modrm = ldub_code(s->pc++);
3271
        reg = ((modrm >> 3) & 7) | rex_r;
3272
        mod = (modrm >> 6) & 3;
3273
        if (mod == 3) {
3274
            rm = (modrm & 7) | REX_B(s);
3275
        do_xchg_reg:
3276
            gen_op_mov_TN_reg[ot][0][reg]();
3277
            gen_op_mov_TN_reg[ot][1][rm]();
3278
            gen_op_mov_reg_T0[ot][rm]();
3279
            gen_op_mov_reg_T1[ot][reg]();
3280
        } else {
3281
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3282
            gen_op_mov_TN_reg[ot][0][reg]();
3283
            /* for xchg, lock is implicit */
3284
            if (!(prefixes & PREFIX_LOCK))
3285
                gen_op_lock();
3286
            gen_op_ld_T1_A0[ot + s->mem_index]();
3287
            gen_op_st_T0_A0[ot + s->mem_index]();
3288
            if (!(prefixes & PREFIX_LOCK))
3289
                gen_op_unlock();
3290
            gen_op_mov_reg_T1[ot][reg]();
3291
        }
3292
        break;
3293
    case 0xc4: /* les Gv */
3294
        if (CODE64(s))
3295
            goto illegal_op;
3296
        op = R_ES;
3297
        goto do_lxx;
3298
    case 0xc5: /* lds Gv */
3299
        if (CODE64(s))
3300
            goto illegal_op;
3301
        op = R_DS;
3302
        goto do_lxx;
3303
    case 0x1b2: /* lss Gv */
3304
        op = R_SS;
3305
        goto do_lxx;
3306
    case 0x1b4: /* lfs Gv */
3307
        op = R_FS;
3308
        goto do_lxx;
3309
    case 0x1b5: /* lgs Gv */
3310
        op = R_GS;
3311
    do_lxx:
3312
        ot = dflag ? OT_LONG : OT_WORD;
3313
        modrm = ldub_code(s->pc++);
3314
        reg = ((modrm >> 3) & 7) | rex_r;
3315
        mod = (modrm >> 6) & 3;
3316
        if (mod == 3)
3317
            goto illegal_op;
3318
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3319
        gen_op_ld_T1_A0[ot + s->mem_index]();
3320
        gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
3321
        /* load the segment first to handle exceptions properly */
3322
        gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3323
        gen_movl_seg_T0(s, op, pc_start - s->cs_base);
3324
        /* then put the data */
3325
        gen_op_mov_reg_T1[ot][reg]();
3326
        if (s->is_jmp) {
3327
            gen_jmp_im(s->pc - s->cs_base);
3328
            gen_eob(s);
3329
        }
3330
        break;
3331
        
3332
        /************************/
3333
        /* shifts */
3334
    case 0xc0:
3335
    case 0xc1:
3336
        /* shift Ev,Ib */
3337
        shift = 2;
3338
    grp2:
3339
        {
3340
            if ((b & 1) == 0)
3341
                ot = OT_BYTE;
3342
            else
3343
                ot = dflag + OT_WORD;
3344
            
3345
            modrm = ldub_code(s->pc++);
3346
            mod = (modrm >> 6) & 3;
3347
            op = (modrm >> 3) & 7;
3348
            
3349
            if (mod != 3) {
3350
                if (shift == 2) {
3351
                    s->rip_offset = 1;
3352
                }
3353
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3354
                opreg = OR_TMP0;
3355
            } else {
3356
                opreg = (modrm & 7) | REX_B(s);
3357
            }
3358

    
3359
            /* simpler op */
3360
            if (shift == 0) {
3361
                gen_shift(s, op, ot, opreg, OR_ECX);
3362
            } else {
3363
                if (shift == 2) {
3364
                    shift = ldub_code(s->pc++);
3365
                }
3366
                gen_shifti(s, op, ot, opreg, shift);
3367
            }
3368
        }
3369
        break;
3370
    case 0xd0:
3371
    case 0xd1:
3372
        /* shift Ev,1 */
3373
        shift = 1;
3374
        goto grp2;
3375
    case 0xd2:
3376
    case 0xd3:
3377
        /* shift Ev,cl */
3378
        shift = 0;
3379
        goto grp2;
3380

    
3381
    case 0x1a4: /* shld imm */
3382
        op = 0;
3383
        shift = 1;
3384
        goto do_shiftd;
3385
    case 0x1a5: /* shld cl */
3386
        op = 0;
3387
        shift = 0;
3388
        goto do_shiftd;
3389
    case 0x1ac: /* shrd imm */
3390
        op = 1;
3391
        shift = 1;
3392
        goto do_shiftd;
3393
    case 0x1ad: /* shrd cl */
3394
        op = 1;
3395
        shift = 0;
3396
    do_shiftd:
3397
        ot = dflag + OT_WORD;
3398
        modrm = ldub_code(s->pc++);
3399
        mod = (modrm >> 6) & 3;
3400
        rm = (modrm & 7) | REX_B(s);
3401
        reg = ((modrm >> 3) & 7) | rex_r;
3402
        
3403
        if (mod != 3) {
3404
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3405
            gen_op_ld_T0_A0[ot + s->mem_index]();
3406
        } else {
3407
            gen_op_mov_TN_reg[ot][0][rm]();
3408
        }
3409
        gen_op_mov_TN_reg[ot][1][reg]();
3410
        
3411
        if (shift) {
3412
            val = ldub_code(s->pc++);
3413
            if (ot == OT_QUAD)
3414
                val &= 0x3f;
3415
            else
3416
                val &= 0x1f;
3417
            if (val) {
3418
                if (mod == 3)
3419
                    gen_op_shiftd_T0_T1_im_cc[ot][op](val);
3420
                else
3421
                    gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
3422
                if (op == 0 && ot != OT_WORD)
3423
                    s->cc_op = CC_OP_SHLB + ot;
3424
                else
3425
                    s->cc_op = CC_OP_SARB + ot;
3426
            }
3427
        } else {
3428
            if (s->cc_op != CC_OP_DYNAMIC)
3429
                gen_op_set_cc_op(s->cc_op);
3430
            if (mod == 3)
3431
                gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
3432
            else
3433
                gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
3434
            s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
3435
        }
3436
        if (mod == 3) {
3437
            gen_op_mov_reg_T0[ot][rm]();
3438
        }
3439
        break;
3440

    
3441
        /************************/
3442
        /* floats */
3443
    case 0xd8 ... 0xdf: 
3444
        if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
3445
            /* if CR0.EM or CR0.TS are set, generate an FPU exception */
3446
            /* XXX: what to do if illegal op ? */
3447
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3448
            break;
3449
        }
3450
        modrm = ldub_code(s->pc++);
3451
        mod = (modrm >> 6) & 3;
3452
        rm = modrm & 7;
3453
        op = ((b & 7) << 3) | ((modrm >> 3) & 7);
3454
        if (mod != 3) {
3455
            /* memory op */
3456
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3457
            switch(op) {
3458
            case 0x00 ... 0x07: /* fxxxs */
3459
            case 0x10 ... 0x17: /* fixxxl */
3460
            case 0x20 ... 0x27: /* fxxxl */
3461
            case 0x30 ... 0x37: /* fixxx */
3462
                {
3463
                    int op1;
3464
                    op1 = op & 7;
3465

    
3466
                    switch(op >> 4) {
3467
                    case 0:
3468
                        gen_op_flds_FT0_A0();
3469
                        break;
3470
                    case 1:
3471
                        gen_op_fildl_FT0_A0();
3472
                        break;
3473
                    case 2:
3474
                        gen_op_fldl_FT0_A0();
3475
                        break;
3476
                    case 3:
3477
                    default:
3478
                        gen_op_fild_FT0_A0();
3479
                        break;
3480
                    }
3481
                    
3482
                    gen_op_fp_arith_ST0_FT0[op1]();
3483
                    if (op1 == 3) {
3484
                        /* fcomp needs pop */
3485
                        gen_op_fpop();
3486
                    }
3487
                }
3488
                break;
3489
            case 0x08: /* flds */
3490
            case 0x0a: /* fsts */
3491
            case 0x0b: /* fstps */
3492
            case 0x18: /* fildl */
3493
            case 0x1a: /* fistl */
3494
            case 0x1b: /* fistpl */
3495
            case 0x28: /* fldl */
3496
            case 0x2a: /* fstl */
3497
            case 0x2b: /* fstpl */
3498
            case 0x38: /* filds */
3499
            case 0x3a: /* fists */
3500
            case 0x3b: /* fistps */
3501
                
3502
                switch(op & 7) {
3503
                case 0:
3504
                    switch(op >> 4) {
3505
                    case 0:
3506
                        gen_op_flds_ST0_A0();
3507
                        break;
3508
                    case 1:
3509
                        gen_op_fildl_ST0_A0();
3510
                        break;
3511
                    case 2:
3512
                        gen_op_fldl_ST0_A0();
3513
                        break;
3514
                    case 3:
3515
                    default:
3516
                        gen_op_fild_ST0_A0();
3517
                        break;
3518
                    }
3519
                    break;
3520
                default:
3521
                    switch(op >> 4) {
3522
                    case 0:
3523
                        gen_op_fsts_ST0_A0();
3524
                        break;
3525
                    case 1:
3526
                        gen_op_fistl_ST0_A0();
3527
                        break;
3528
                    case 2:
3529
                        gen_op_fstl_ST0_A0();
3530
                        break;
3531
                    case 3:
3532
                    default:
3533
                        gen_op_fist_ST0_A0();
3534
                        break;
3535
                    }
3536
                    if ((op & 7) == 3)
3537
                        gen_op_fpop();
3538
                    break;
3539
                }
3540
                break;
3541
            case 0x0c: /* fldenv mem */
3542
                gen_op_fldenv_A0(s->dflag);
3543
                break;
3544
            case 0x0d: /* fldcw mem */
3545
                gen_op_fldcw_A0();
3546
                break;
3547
            case 0x0e: /* fnstenv mem */
3548
                gen_op_fnstenv_A0(s->dflag);
3549
                break;
3550
            case 0x0f: /* fnstcw mem */
3551
                gen_op_fnstcw_A0();
3552
                break;
3553
            case 0x1d: /* fldt mem */
3554
                gen_op_fldt_ST0_A0();
3555
                break;
3556
            case 0x1f: /* fstpt mem */
3557
                gen_op_fstt_ST0_A0();
3558
                gen_op_fpop();
3559
                break;
3560
            case 0x2c: /* frstor mem */
3561
                gen_op_frstor_A0(s->dflag);
3562
                break;
3563
            case 0x2e: /* fnsave mem */
3564
                gen_op_fnsave_A0(s->dflag);
3565
                break;
3566
            case 0x2f: /* fnstsw mem */
3567
                gen_op_fnstsw_A0();
3568
                break;
3569
            case 0x3c: /* fbld */
3570
                gen_op_fbld_ST0_A0();
3571
                break;
3572
            case 0x3e: /* fbstp */
3573
                gen_op_fbst_ST0_A0();
3574
                gen_op_fpop();
3575
                break;
3576
            case 0x3d: /* fildll */
3577
                gen_op_fildll_ST0_A0();
3578
                break;
3579
            case 0x3f: /* fistpll */
3580
                gen_op_fistll_ST0_A0();
3581
                gen_op_fpop();
3582
                break;
3583
            default:
3584
                goto illegal_op;
3585
            }
3586
        } else {
3587
            /* register float ops */
3588
            opreg = rm;
3589

    
3590
            switch(op) {
3591
            case 0x08: /* fld sti */
3592
                gen_op_fpush();
3593
                gen_op_fmov_ST0_STN((opreg + 1) & 7);
3594
                break;
3595
            case 0x09: /* fxchg sti */
3596
            case 0x29: /* fxchg4 sti, undocumented op */
3597
            case 0x39: /* fxchg7 sti, undocumented op */
3598
                gen_op_fxchg_ST0_STN(opreg);
3599
                break;
3600
            case 0x0a: /* grp d9/2 */
3601
                switch(rm) {
3602
                case 0: /* fnop */
3603
                    /* check exceptions (FreeBSD FPU probe) */
3604
                    if (s->cc_op != CC_OP_DYNAMIC)
3605
                        gen_op_set_cc_op(s->cc_op);
3606
                    gen_jmp_im(pc_start - s->cs_base);
3607
                    gen_op_fwait();
3608
                    break;
3609
                default:
3610
                    goto illegal_op;
3611
                }
3612
                break;
3613
            case 0x0c: /* grp d9/4 */
3614
                switch(rm) {
3615
                case 0: /* fchs */
3616
                    gen_op_fchs_ST0();
3617
                    break;
3618
                case 1: /* fabs */
3619
                    gen_op_fabs_ST0();
3620
                    break;
3621
                case 4: /* ftst */
3622
                    gen_op_fldz_FT0();
3623
                    gen_op_fcom_ST0_FT0();
3624
                    break;
3625
                case 5: /* fxam */
3626
                    gen_op_fxam_ST0();
3627
                    break;
3628
                default:
3629
                    goto illegal_op;
3630
                }
3631
                break;
3632
            case 0x0d: /* grp d9/5 */
3633
                {
3634
                    switch(rm) {
3635
                    case 0:
3636
                        gen_op_fpush();
3637
                        gen_op_fld1_ST0();
3638
                        break;
3639
                    case 1:
3640
                        gen_op_fpush();
3641
                        gen_op_fldl2t_ST0();
3642
                        break;
3643
                    case 2:
3644
                        gen_op_fpush();
3645
                        gen_op_fldl2e_ST0();
3646
                        break;
3647
                    case 3:
3648
                        gen_op_fpush();
3649
                        gen_op_fldpi_ST0();
3650
                        break;
3651
                    case 4:
3652
                        gen_op_fpush();
3653
                        gen_op_fldlg2_ST0();
3654
                        break;
3655
                    case 5:
3656
                        gen_op_fpush();
3657
                        gen_op_fldln2_ST0();
3658
                        break;
3659
                    case 6:
3660
                        gen_op_fpush();
3661
                        gen_op_fldz_ST0();
3662
                        break;
3663
                    default:
3664
                        goto illegal_op;
3665
                    }
3666
                }
3667
                break;
3668
            case 0x0e: /* grp d9/6 */
3669
                switch(rm) {
3670
                case 0: /* f2xm1 */
3671
                    gen_op_f2xm1();
3672
                    break;
3673
                case 1: /* fyl2x */
3674
                    gen_op_fyl2x();
3675
                    break;
3676
                case 2: /* fptan */
3677
                    gen_op_fptan();
3678
                    break;
3679
                case 3: /* fpatan */
3680
                    gen_op_fpatan();
3681
                    break;
3682
                case 4: /* fxtract */
3683
                    gen_op_fxtract();
3684
                    break;
3685
                case 5: /* fprem1 */
3686
                    gen_op_fprem1();
3687
                    break;
3688
                case 6: /* fdecstp */
3689
                    gen_op_fdecstp();
3690
                    break;
3691
                default:
3692
                case 7: /* fincstp */
3693
                    gen_op_fincstp();
3694
                    break;
3695
                }
3696
                break;
3697
            case 0x0f: /* grp d9/7 */
3698
                switch(rm) {
3699
                case 0: /* fprem */
3700
                    gen_op_fprem();
3701
                    break;
3702
                case 1: /* fyl2xp1 */
3703
                    gen_op_fyl2xp1();
3704
                    break;
3705
                case 2: /* fsqrt */
3706
                    gen_op_fsqrt();
3707
                    break;
3708
                case 3: /* fsincos */
3709
                    gen_op_fsincos();
3710
                    break;
3711
                case 5: /* fscale */
3712
                    gen_op_fscale();
3713
                    break;
3714
                case 4: /* frndint */
3715
                    gen_op_frndint();
3716
                    break;
3717
                case 6: /* fsin */
3718
                    gen_op_fsin();
3719
                    break;
3720
                default:
3721
                case 7: /* fcos */
3722
                    gen_op_fcos();
3723
                    break;
3724
                }
3725
                break;
3726
            case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
3727
            case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
3728
            case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
3729
                {
3730
                    int op1;
3731
                    
3732
                    op1 = op & 7;
3733
                    if (op >= 0x20) {
3734
                        gen_op_fp_arith_STN_ST0[op1](opreg);
3735
                        if (op >= 0x30)
3736
                            gen_op_fpop();
3737
                    } else {
3738
                        gen_op_fmov_FT0_STN(opreg);
3739
                        gen_op_fp_arith_ST0_FT0[op1]();
3740
                    }
3741
                }
3742
                break;
3743
            case 0x02: /* fcom */
3744
            case 0x22: /* fcom2, undocumented op */
3745
                gen_op_fmov_FT0_STN(opreg);
3746
                gen_op_fcom_ST0_FT0();
3747
                break;
3748
            case 0x03: /* fcomp */
3749
            case 0x23: /* fcomp3, undocumented op */
3750
            case 0x32: /* fcomp5, undocumented op */
3751
                gen_op_fmov_FT0_STN(opreg);
3752
                gen_op_fcom_ST0_FT0();
3753
                gen_op_fpop();
3754
                break;
3755
            case 0x15: /* da/5 */
3756
                switch(rm) {
3757
                case 1: /* fucompp */
3758
                    gen_op_fmov_FT0_STN(1);
3759
                    gen_op_fucom_ST0_FT0();
3760
                    gen_op_fpop();
3761
                    gen_op_fpop();
3762
                    break;
3763
                default:
3764
                    goto illegal_op;
3765
                }
3766
                break;
3767
            case 0x1c:
3768
                switch(rm) {
3769
                case 0: /* feni (287 only, just do nop here) */
3770
                    break;
3771
                case 1: /* fdisi (287 only, just do nop here) */
3772
                    break;
3773
                case 2: /* fclex */
3774
                    gen_op_fclex();
3775
                    break;
3776
                case 3: /* fninit */
3777
                    gen_op_fninit();
3778
                    break;
3779
                case 4: /* fsetpm (287 only, just do nop here) */
3780
                    break;
3781
                default:
3782
                    goto illegal_op;
3783
                }
3784
                break;
3785
            case 0x1d: /* fucomi */
3786
                if (s->cc_op != CC_OP_DYNAMIC)
3787
                    gen_op_set_cc_op(s->cc_op);
3788
                gen_op_fmov_FT0_STN(opreg);
3789
                gen_op_fucomi_ST0_FT0();
3790
                s->cc_op = CC_OP_EFLAGS;
3791
                break;
3792
            case 0x1e: /* fcomi */
3793
                if (s->cc_op != CC_OP_DYNAMIC)
3794
                    gen_op_set_cc_op(s->cc_op);
3795
                gen_op_fmov_FT0_STN(opreg);
3796
                gen_op_fcomi_ST0_FT0();
3797
                s->cc_op = CC_OP_EFLAGS;
3798
                break;
3799
            case 0x28: /* ffree sti */
3800
                gen_op_ffree_STN(opreg);
3801
                break; 
3802
            case 0x2a: /* fst sti */
3803
                gen_op_fmov_STN_ST0(opreg);
3804
                break;
3805
            case 0x2b: /* fstp sti */
3806
            case 0x0b: /* fstp1 sti, undocumented op */
3807
            case 0x3a: /* fstp8 sti, undocumented op */
3808
            case 0x3b: /* fstp9 sti, undocumented op */
3809
                gen_op_fmov_STN_ST0(opreg);
3810
                gen_op_fpop();
3811
                break;
3812
            case 0x2c: /* fucom st(i) */
3813
                gen_op_fmov_FT0_STN(opreg);
3814
                gen_op_fucom_ST0_FT0();
3815
                break;
3816
            case 0x2d: /* fucomp st(i) */
3817
                gen_op_fmov_FT0_STN(opreg);
3818
                gen_op_fucom_ST0_FT0();
3819
                gen_op_fpop();
3820
                break;
3821
            case 0x33: /* de/3 */
3822
                switch(rm) {
3823
                case 1: /* fcompp */
3824
                    gen_op_fmov_FT0_STN(1);
3825
                    gen_op_fcom_ST0_FT0();
3826
                    gen_op_fpop();
3827
                    gen_op_fpop();
3828
                    break;
3829
                default:
3830
                    goto illegal_op;
3831
                }
3832
                break;
3833
            case 0x38: /* ffreep sti, undocumented op */
3834
                gen_op_ffree_STN(opreg);
3835
                gen_op_fpop();
3836
                break;
3837
            case 0x3c: /* df/4 */
3838
                switch(rm) {
3839
                case 0:
3840
                    gen_op_fnstsw_EAX();
3841
                    break;
3842
                default:
3843
                    goto illegal_op;
3844
                }
3845
                break;
3846
            case 0x3d: /* fucomip */
3847
                if (s->cc_op != CC_OP_DYNAMIC)
3848
                    gen_op_set_cc_op(s->cc_op);
3849
                gen_op_fmov_FT0_STN(opreg);
3850
                gen_op_fucomi_ST0_FT0();
3851
                gen_op_fpop();
3852
                s->cc_op = CC_OP_EFLAGS;
3853
                break;
3854
            case 0x3e: /* fcomip */
3855
                if (s->cc_op != CC_OP_DYNAMIC)
3856
                    gen_op_set_cc_op(s->cc_op);
3857
                gen_op_fmov_FT0_STN(opreg);
3858
                gen_op_fcomi_ST0_FT0();
3859
                gen_op_fpop();
3860
                s->cc_op = CC_OP_EFLAGS;
3861
                break;
3862
            case 0x10 ... 0x13: /* fcmovxx */
3863
            case 0x18 ... 0x1b:
3864
                {
3865
                    int op1;
3866
                    const static uint8_t fcmov_cc[8] = {
3867
                        (JCC_B << 1),
3868
                        (JCC_Z << 1),
3869
                        (JCC_BE << 1),
3870
                        (JCC_P << 1),
3871
                    };
3872
                    op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
3873
                    gen_setcc(s, op1);
3874
                    gen_op_fcmov_ST0_STN_T0(opreg);
3875
                }
3876
                break;
3877
            default:
3878
                goto illegal_op;
3879
            }
3880
        }
3881
#ifdef USE_CODE_COPY
3882
        s->tb->cflags |= CF_TB_FP_USED;
3883
#endif
3884
        break;
3885
        /************************/
3886
        /* string ops */
3887

    
3888
    case 0xa4: /* movsS */
3889
    case 0xa5:
3890
        if ((b & 1) == 0)
3891
            ot = OT_BYTE;
3892
        else
3893
            ot = dflag + OT_WORD;
3894

    
3895
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
3896
            gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
3897
        } else {
3898
            gen_movs(s, ot);
3899
        }
3900
        break;
3901
        
3902
    case 0xaa: /* stosS */
3903
    case 0xab:
3904
        if ((b & 1) == 0)
3905
            ot = OT_BYTE;
3906
        else
3907
            ot = dflag + OT_WORD;
3908

    
3909
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
3910
            gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
3911
        } else {
3912
            gen_stos(s, ot);
3913
        }
3914
        break;
3915
    case 0xac: /* lodsS */
3916
    case 0xad:
3917
        if ((b & 1) == 0)
3918
            ot = OT_BYTE;
3919
        else
3920
            ot = dflag + OT_WORD;
3921
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
3922
            gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
3923
        } else {
3924
            gen_lods(s, ot);
3925
        }
3926
        break;
3927
    case 0xae: /* scasS */
3928
    case 0xaf:
3929
        if ((b & 1) == 0)
3930
            ot = OT_BYTE;
3931
        else
3932
            ot = dflag + OT_WORD;
3933
        if (prefixes & PREFIX_REPNZ) {
3934
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
3935
        } else if (prefixes & PREFIX_REPZ) {
3936
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
3937
        } else {
3938
            gen_scas(s, ot);
3939
            s->cc_op = CC_OP_SUBB + ot;
3940
        }
3941
        break;
3942

    
3943
    case 0xa6: /* cmpsS */
3944
    case 0xa7:
3945
        if ((b & 1) == 0)
3946
            ot = OT_BYTE;
3947
        else
3948
            ot = dflag + OT_WORD;
3949
        if (prefixes & PREFIX_REPNZ) {
3950
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
3951
        } else if (prefixes & PREFIX_REPZ) {
3952
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
3953
        } else {
3954
            gen_cmps(s, ot);
3955
            s->cc_op = CC_OP_SUBB + ot;
3956
        }
3957
        break;
3958
    case 0x6c: /* insS */
3959
    case 0x6d:
3960
        if ((b & 1) == 0)
3961
            ot = OT_BYTE;
3962
        else
3963
            ot = dflag ? OT_LONG : OT_WORD;
3964
        gen_check_io(s, ot, 1, pc_start - s->cs_base);
3965
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
3966
            gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
3967
        } else {
3968
            gen_ins(s, ot);
3969
        }
3970
        break;
3971
    case 0x6e: /* outsS */
3972
    case 0x6f:
3973
        if ((b & 1) == 0)
3974
            ot = OT_BYTE;
3975
        else
3976
            ot = dflag ? OT_LONG : OT_WORD;
3977
        gen_check_io(s, ot, 1, pc_start - s->cs_base);
3978
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
3979
            gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
3980
        } else {
3981
            gen_outs(s, ot);
3982
        }
3983
        break;
3984

    
3985
        /************************/
3986
        /* port I/O */
3987
    case 0xe4:
3988
    case 0xe5:
3989
        if ((b & 1) == 0)
3990
            ot = OT_BYTE;
3991
        else
3992
            ot = dflag ? OT_LONG : OT_WORD;
3993
        val = ldub_code(s->pc++);
3994
        gen_op_movl_T0_im(val);
3995
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
3996
        gen_op_in[ot]();
3997
        gen_op_mov_reg_T1[ot][R_EAX]();
3998
        break;
3999
    case 0xe6:
4000
    case 0xe7:
4001
        if ((b & 1) == 0)
4002
            ot = OT_BYTE;
4003
        else
4004
            ot = dflag ? OT_LONG : OT_WORD;
4005
        val = ldub_code(s->pc++);
4006
        gen_op_movl_T0_im(val);
4007
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
4008
        gen_op_mov_TN_reg[ot][1][R_EAX]();
4009
        gen_op_out[ot]();
4010
        break;
4011
    case 0xec:
4012
    case 0xed:
4013
        if ((b & 1) == 0)
4014
            ot = OT_BYTE;
4015
        else
4016
            ot = dflag ? OT_LONG : OT_WORD;
4017
        gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4018
        gen_op_andl_T0_ffff();
4019
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
4020
        gen_op_in[ot]();
4021
        gen_op_mov_reg_T1[ot][R_EAX]();
4022
        break;
4023
    case 0xee:
4024
    case 0xef:
4025
        if ((b & 1) == 0)
4026
            ot = OT_BYTE;
4027
        else
4028
            ot = dflag ? OT_LONG : OT_WORD;
4029
        gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4030
        gen_op_andl_T0_ffff();
4031
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
4032
        gen_op_mov_TN_reg[ot][1][R_EAX]();
4033
        gen_op_out[ot]();
4034
        break;
4035

    
4036
        /************************/
4037
        /* control */
4038
    case 0xc2: /* ret im */
4039
        val = ldsw_code(s->pc);
4040
        s->pc += 2;
4041
        gen_pop_T0(s);
4042
        gen_stack_update(s, val + (2 << s->dflag));
4043
        if (s->dflag == 0)
4044
            gen_op_andl_T0_ffff();
4045
        gen_op_jmp_T0();
4046
        gen_eob(s);
4047
        break;
4048
    case 0xc3: /* ret */
4049
        gen_pop_T0(s);
4050
        gen_pop_update(s);
4051
        if (s->dflag == 0)
4052
            gen_op_andl_T0_ffff();
4053
        gen_op_jmp_T0();
4054
        gen_eob(s);
4055
        break;
4056
    case 0xca: /* lret im */
4057
        val = ldsw_code(s->pc);
4058
        s->pc += 2;
4059
    do_lret:
4060
        if (s->pe && !s->vm86) {
4061
            if (s->cc_op != CC_OP_DYNAMIC)
4062
                gen_op_set_cc_op(s->cc_op);
4063
            gen_jmp_im(pc_start - s->cs_base);
4064
            gen_op_lret_protected(s->dflag, val);
4065
        } else {
4066
            gen_stack_A0(s);
4067
            /* pop offset */
4068
            gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4069
            if (s->dflag == 0)
4070
                gen_op_andl_T0_ffff();
4071
            /* NOTE: keeping EIP updated is not a problem in case of
4072
               exception */
4073
            gen_op_jmp_T0();
4074
            /* pop selector */
4075
            gen_op_addl_A0_im(2 << s->dflag);
4076
            gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4077
            gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
4078
            /* add stack offset */
4079
            gen_stack_update(s, val + (4 << s->dflag));
4080
        }
4081
        gen_eob(s);
4082
        break;
4083
    case 0xcb: /* lret */
4084
        val = 0;
4085
        goto do_lret;
4086
    case 0xcf: /* iret */
4087
        if (!s->pe) {
4088
            /* real mode */
4089
            gen_op_iret_real(s->dflag);
4090
            s->cc_op = CC_OP_EFLAGS;
4091
        } else if (s->vm86) {
4092
            if (s->iopl != 3) {
4093
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4094
            } else {
4095
                gen_op_iret_real(s->dflag);
4096
                s->cc_op = CC_OP_EFLAGS;
4097
            }
4098
        } else {
4099
            if (s->cc_op != CC_OP_DYNAMIC)
4100
                gen_op_set_cc_op(s->cc_op);
4101
            gen_jmp_im(pc_start - s->cs_base);
4102
            gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
4103
            s->cc_op = CC_OP_EFLAGS;
4104
        }
4105
        gen_eob(s);
4106
        break;
4107
    case 0xe8: /* call im */
4108
        {
4109
            if (dflag)
4110
                tval = (int32_t)insn_get(s, OT_LONG);
4111
            else
4112
                tval = (int16_t)insn_get(s, OT_WORD);
4113
            next_eip = s->pc - s->cs_base;
4114
            tval += next_eip;
4115
            if (s->dflag == 0)
4116
                tval &= 0xffff;
4117
            gen_movtl_T0_im(next_eip);
4118
            gen_push_T0(s);
4119
            gen_jmp(s, tval);
4120
        }
4121
        break;
4122
    case 0x9a: /* lcall im */
4123
        {
4124
            unsigned int selector, offset;
4125
            
4126
            if (CODE64(s))
4127
                goto illegal_op;
4128
            ot = dflag ? OT_LONG : OT_WORD;
4129
            offset = insn_get(s, ot);
4130
            selector = insn_get(s, OT_WORD);
4131
            
4132
            gen_op_movl_T0_im(selector);
4133
            gen_op_movl_T1_imu(offset);
4134
        }
4135
        goto do_lcall;
4136
    case 0xe9: /* jmp */
4137
        if (dflag)
4138
            tval = (int32_t)insn_get(s, OT_LONG);
4139
        else
4140
            tval = (int16_t)insn_get(s, OT_WORD);
4141
        tval += s->pc - s->cs_base;
4142
        if (s->dflag == 0)
4143
            tval &= 0xffff;
4144
        gen_jmp(s, tval);
4145
        break;
4146
    case 0xea: /* ljmp im */
4147
        {
4148
            unsigned int selector, offset;
4149

    
4150
            if (CODE64(s))
4151
                goto illegal_op;
4152
            ot = dflag ? OT_LONG : OT_WORD;
4153
            offset = insn_get(s, ot);
4154
            selector = insn_get(s, OT_WORD);
4155
            
4156
            gen_op_movl_T0_im(selector);
4157
            gen_op_movl_T1_imu(offset);
4158
        }
4159
        goto do_ljmp;
4160
    case 0xeb: /* jmp Jb */
4161
        tval = (int8_t)insn_get(s, OT_BYTE);
4162
        tval += s->pc - s->cs_base;
4163
        if (s->dflag == 0)
4164
            tval &= 0xffff;
4165
        gen_jmp(s, tval);
4166
        break;
4167
    case 0x70 ... 0x7f: /* jcc Jb */
4168
        tval = (int8_t)insn_get(s, OT_BYTE);
4169
        goto do_jcc;
4170
    case 0x180 ... 0x18f: /* jcc Jv */
4171
        if (dflag) {
4172
            tval = (int32_t)insn_get(s, OT_LONG);
4173
        } else {
4174
            tval = (int16_t)insn_get(s, OT_WORD); 
4175
        }
4176
    do_jcc:
4177
        next_eip = s->pc - s->cs_base;
4178
        tval += next_eip;
4179
        if (s->dflag == 0)
4180
            tval &= 0xffff;
4181
        gen_jcc(s, b, tval, next_eip);
4182
        break;
4183

    
4184
    case 0x190 ... 0x19f: /* setcc Gv */
4185
        modrm = ldub_code(s->pc++);
4186
        gen_setcc(s, b);
4187
        gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
4188
        break;
4189
    case 0x140 ... 0x14f: /* cmov Gv, Ev */
4190
        ot = dflag + OT_WORD;
4191
        modrm = ldub_code(s->pc++);
4192
        reg = ((modrm >> 3) & 7) | rex_r;
4193
        mod = (modrm >> 6) & 3;
4194
        gen_setcc(s, b);
4195
        if (mod != 3) {
4196
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4197
            gen_op_ld_T1_A0[ot + s->mem_index]();
4198
        } else {
4199
            rm = (modrm & 7) | REX_B(s);
4200
            gen_op_mov_TN_reg[ot][1][rm]();
4201
        }
4202
        gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
4203
        break;
4204
        
4205
        /************************/
4206
        /* flags */
4207
    case 0x9c: /* pushf */
4208
        if (s->vm86 && s->iopl != 3) {
4209
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4210
        } else {
4211
            if (s->cc_op != CC_OP_DYNAMIC)
4212
                gen_op_set_cc_op(s->cc_op);
4213
            gen_op_movl_T0_eflags();
4214
            gen_push_T0(s);
4215
        }
4216
        break;
4217
    case 0x9d: /* popf */
4218
        if (s->vm86 && s->iopl != 3) {
4219
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4220
        } else {
4221
            gen_pop_T0(s);
4222
            if (s->cpl == 0) {
4223
                if (s->dflag) {
4224
                    gen_op_movl_eflags_T0_cpl0();
4225
                } else {
4226
                    gen_op_movw_eflags_T0_cpl0();
4227
                }
4228
            } else {
4229
                if (s->cpl <= s->iopl) {
4230
                    if (s->dflag) {
4231
                        gen_op_movl_eflags_T0_io();
4232
                    } else {
4233
                        gen_op_movw_eflags_T0_io();
4234
                    }
4235
                } else {
4236
                    if (s->dflag) {
4237
                        gen_op_movl_eflags_T0();
4238
                    } else {
4239
                        gen_op_movw_eflags_T0();
4240
                    }
4241
                }
4242
            }
4243
            gen_pop_update(s);
4244
            s->cc_op = CC_OP_EFLAGS;
4245
            /* abort translation because TF flag may change */
4246
            gen_jmp_im(s->pc - s->cs_base);
4247
            gen_eob(s);
4248
        }
4249
        break;
4250
    case 0x9e: /* sahf */
4251
        if (CODE64(s))
4252
            goto illegal_op;
4253
        gen_op_mov_TN_reg[OT_BYTE][0][R_AH]();
4254
        if (s->cc_op != CC_OP_DYNAMIC)
4255
            gen_op_set_cc_op(s->cc_op);
4256
        gen_op_movb_eflags_T0();
4257
        s->cc_op = CC_OP_EFLAGS;
4258
        break;
4259
    case 0x9f: /* lahf */
4260
        if (CODE64(s))
4261
            goto illegal_op;
4262
        if (s->cc_op != CC_OP_DYNAMIC)
4263
            gen_op_set_cc_op(s->cc_op);
4264
        gen_op_movl_T0_eflags();
4265
        gen_op_mov_reg_T0[OT_BYTE][R_AH]();
4266
        break;
4267
    case 0xf5: /* cmc */
4268
        if (s->cc_op != CC_OP_DYNAMIC)
4269
            gen_op_set_cc_op(s->cc_op);
4270
        gen_op_cmc();
4271
        s->cc_op = CC_OP_EFLAGS;
4272
        break;
4273
    case 0xf8: /* clc */
4274
        if (s->cc_op != CC_OP_DYNAMIC)
4275
            gen_op_set_cc_op(s->cc_op);
4276
        gen_op_clc();
4277
        s->cc_op = CC_OP_EFLAGS;
4278
        break;
4279
    case 0xf9: /* stc */
4280
        if (s->cc_op != CC_OP_DYNAMIC)
4281
            gen_op_set_cc_op(s->cc_op);
4282
        gen_op_stc();
4283
        s->cc_op = CC_OP_EFLAGS;
4284
        break;
4285
    case 0xfc: /* cld */
4286
        gen_op_cld();
4287
        break;
4288
    case 0xfd: /* std */
4289
        gen_op_std();
4290
        break;
4291

    
4292
        /************************/
4293
        /* bit operations */
4294
    case 0x1ba: /* bt/bts/btr/btc Gv, im */
4295
        ot = dflag + OT_WORD;
4296
        modrm = ldub_code(s->pc++);
4297
        op = ((modrm >> 3) & 7) | rex_r;
4298
        mod = (modrm >> 6) & 3;
4299
        rm = (modrm & 7) | REX_B(s);
4300
        if (mod != 3) {
4301
            s->rip_offset = 1;
4302
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4303
            gen_op_ld_T0_A0[ot + s->mem_index]();
4304
        } else {
4305
            gen_op_mov_TN_reg[ot][0][rm]();
4306
        }
4307
        /* load shift */
4308
        val = ldub_code(s->pc++);
4309
        gen_op_movl_T1_im(val);
4310
        if (op < 4)
4311
            goto illegal_op;
4312
        op -= 4;
4313
        gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
4314
        s->cc_op = CC_OP_SARB + ot;
4315
        if (op != 0) {
4316
            if (mod != 3)
4317
                gen_op_st_T0_A0[ot + s->mem_index]();
4318
            else
4319
                gen_op_mov_reg_T0[ot][rm]();
4320
            gen_op_update_bt_cc();
4321
        }
4322
        break;
4323
    case 0x1a3: /* bt Gv, Ev */
4324
        op = 0;
4325
        goto do_btx;
4326
    case 0x1ab: /* bts */
4327
        op = 1;
4328
        goto do_btx;
4329
    case 0x1b3: /* btr */
4330
        op = 2;
4331
        goto do_btx;
4332
    case 0x1bb: /* btc */
4333
        op = 3;
4334
    do_btx:
4335
        ot = dflag + OT_WORD;
4336
        modrm = ldub_code(s->pc++);
4337
        reg = ((modrm >> 3) & 7) | rex_r;
4338
        mod = (modrm >> 6) & 3;
4339
        rm = (modrm & 7) | REX_B(s);
4340
        gen_op_mov_TN_reg[OT_LONG][1][reg]();
4341
        if (mod != 3) {
4342
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4343
            /* specific case: we need to add a displacement */
4344
            gen_op_add_bit_A0_T1[ot - OT_WORD]();
4345
            gen_op_ld_T0_A0[ot + s->mem_index]();
4346
        } else {
4347
            gen_op_mov_TN_reg[ot][0][rm]();
4348
        }
4349
        gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
4350
        s->cc_op = CC_OP_SARB + ot;
4351
        if (op != 0) {
4352
            if (mod != 3)
4353
                gen_op_st_T0_A0[ot + s->mem_index]();
4354
            else
4355
                gen_op_mov_reg_T0[ot][rm]();
4356
            gen_op_update_bt_cc();
4357
        }
4358
        break;
4359
    case 0x1bc: /* bsf */
4360
    case 0x1bd: /* bsr */
4361
        ot = dflag + OT_WORD;
4362
        modrm = ldub_code(s->pc++);
4363
        reg = ((modrm >> 3) & 7) | rex_r;
4364
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4365
        /* NOTE: in order to handle the 0 case, we must load the
4366
           result. It could be optimized with a generated jump */
4367
        gen_op_mov_TN_reg[ot][1][reg]();
4368
        gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
4369
        gen_op_mov_reg_T1[ot][reg]();
4370
        s->cc_op = CC_OP_LOGICB + ot;
4371
        break;
4372
        /************************/
4373
        /* bcd */
4374
    case 0x27: /* daa */
4375
        if (CODE64(s))
4376
            goto illegal_op;
4377
        if (s->cc_op != CC_OP_DYNAMIC)
4378
            gen_op_set_cc_op(s->cc_op);
4379
        gen_op_daa();
4380
        s->cc_op = CC_OP_EFLAGS;
4381
        break;
4382
    case 0x2f: /* das */
4383
        if (CODE64(s))
4384
            goto illegal_op;
4385
        if (s->cc_op != CC_OP_DYNAMIC)
4386
            gen_op_set_cc_op(s->cc_op);
4387
        gen_op_das();
4388
        s->cc_op = CC_OP_EFLAGS;
4389
        break;
4390
    case 0x37: /* aaa */
4391
        if (CODE64(s))
4392
            goto illegal_op;
4393
        if (s->cc_op != CC_OP_DYNAMIC)
4394
            gen_op_set_cc_op(s->cc_op);
4395
        gen_op_aaa();
4396
        s->cc_op = CC_OP_EFLAGS;
4397
        break;
4398
    case 0x3f: /* aas */
4399
        if (CODE64(s))
4400
            goto illegal_op;
4401
        if (s->cc_op != CC_OP_DYNAMIC)
4402
            gen_op_set_cc_op(s->cc_op);
4403
        gen_op_aas();
4404
        s->cc_op = CC_OP_EFLAGS;
4405
        break;
4406
    case 0xd4: /* aam */
4407
        if (CODE64(s))
4408
            goto illegal_op;
4409
        val = ldub_code(s->pc++);
4410
        gen_op_aam(val);
4411
        s->cc_op = CC_OP_LOGICB;
4412
        break;
4413
    case 0xd5: /* aad */
4414
        if (CODE64(s))
4415
            goto illegal_op;
4416
        val = ldub_code(s->pc++);
4417
        gen_op_aad(val);
4418
        s->cc_op = CC_OP_LOGICB;
4419
        break;
4420
        /************************/
4421
        /* misc */
4422
    case 0x90: /* nop */
4423
        /* XXX: xchg + rex handling */
4424
        /* XXX: correct lock test for all insn */
4425
        if (prefixes & PREFIX_LOCK)
4426
            goto illegal_op;
4427
        break;
4428
    case 0x9b: /* fwait */
4429
        if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) == 
4430
            (HF_MP_MASK | HF_TS_MASK)) {
4431
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4432
        } else {
4433
            if (s->cc_op != CC_OP_DYNAMIC)
4434
                gen_op_set_cc_op(s->cc_op);
4435
            gen_jmp_im(pc_start - s->cs_base);
4436
            gen_op_fwait();
4437
        }
4438
        break;
4439
    case 0xcc: /* int3 */
4440
        gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
4441
        break;
4442
    case 0xcd: /* int N */
4443
        val = ldub_code(s->pc++);
4444
        if (s->vm86 && s->iopl != 3) {
4445
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base); 
4446
        } else {
4447
            gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
4448
        }
4449
        break;
4450
    case 0xce: /* into */
4451
        if (CODE64(s))
4452
            goto illegal_op;
4453
        if (s->cc_op != CC_OP_DYNAMIC)
4454
            gen_op_set_cc_op(s->cc_op);
4455
        gen_jmp_im(pc_start - s->cs_base);
4456
        gen_op_into(s->pc - pc_start);
4457
        break;
4458
    case 0xf1: /* icebp (undocumented, exits to external debugger) */
4459
#if 0
4460
        gen_debug(s, pc_start - s->cs_base);
4461
#else
4462
        /* test ! */
4463
        cpu_set_log(CPU_LOG_TB_IN_ASM | CPU_LOG_PCALL);
4464
#endif
4465
        break;
4466
    case 0xfa: /* cli */
4467
        if (!s->vm86) {
4468
            if (s->cpl <= s->iopl) {
4469
                gen_op_cli();
4470
            } else {
4471
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4472
            }
4473
        } else {
4474
            if (s->iopl == 3) {
4475
                gen_op_cli();
4476
            } else {
4477
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4478
            }
4479
        }
4480
        break;
4481
    case 0xfb: /* sti */
4482
        if (!s->vm86) {
4483
            if (s->cpl <= s->iopl) {
4484
            gen_sti:
4485
                gen_op_sti();
4486
                /* interruptions are enabled only the first insn after sti */
4487
                /* If several instructions disable interrupts, only the
4488
                   _first_ does it */
4489
                if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4490
                    gen_op_set_inhibit_irq();
4491
                /* give a chance to handle pending irqs */
4492
                gen_jmp_im(s->pc - s->cs_base);
4493
                gen_eob(s);
4494
            } else {
4495
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4496
            }
4497
        } else {
4498
            if (s->iopl == 3) {
4499
                goto gen_sti;
4500
            } else {
4501
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4502
            }
4503
        }
4504
        break;
4505
    case 0x62: /* bound */
4506
        if (CODE64(s))
4507
            goto illegal_op;
4508
        ot = dflag ? OT_LONG : OT_WORD;
4509
        modrm = ldub_code(s->pc++);
4510
        reg = (modrm >> 3) & 7;
4511
        mod = (modrm >> 6) & 3;
4512
        if (mod == 3)
4513
            goto illegal_op;
4514
        gen_op_mov_TN_reg[ot][0][reg]();
4515
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4516
        gen_jmp_im(pc_start - s->cs_base);
4517
        if (ot == OT_WORD)
4518
            gen_op_boundw();
4519
        else
4520
            gen_op_boundl();
4521
        break;
4522
    case 0x1c8 ... 0x1cf: /* bswap reg */
4523
        reg = (b & 7) | REX_B(s);
4524
#ifdef TARGET_X86_64
4525
        if (dflag == 2) {
4526
            gen_op_mov_TN_reg[OT_QUAD][0][reg]();
4527
            gen_op_bswapq_T0();
4528
            gen_op_mov_reg_T0[OT_QUAD][reg]();
4529
        } else 
4530
#endif
4531
        {
4532
            gen_op_mov_TN_reg[OT_LONG][0][reg]();
4533
            gen_op_bswapl_T0();
4534
            gen_op_mov_reg_T0[OT_LONG][reg]();
4535
        }
4536
        break;
4537
    case 0xd6: /* salc */
4538
        if (CODE64(s))
4539
            goto illegal_op;
4540
        if (s->cc_op != CC_OP_DYNAMIC)
4541
            gen_op_set_cc_op(s->cc_op);
4542
        gen_op_salc();
4543
        break;
4544
    case 0xe0: /* loopnz */
4545
    case 0xe1: /* loopz */
4546
        if (s->cc_op != CC_OP_DYNAMIC)
4547
            gen_op_set_cc_op(s->cc_op);
4548
        /* FALL THRU */
4549
    case 0xe2: /* loop */
4550
    case 0xe3: /* jecxz */
4551
        {
4552
            int l1, l2;
4553

    
4554
            tval = (int8_t)insn_get(s, OT_BYTE);
4555
            next_eip = s->pc - s->cs_base;
4556
            tval += next_eip;
4557
            if (s->dflag == 0)
4558
                tval &= 0xffff;
4559
            
4560
            l1 = gen_new_label();
4561
            l2 = gen_new_label();
4562
            b &= 3;
4563
            if (b == 3) {
4564
                gen_op_jz_ecx[s->aflag](l1);
4565
            } else {
4566
                gen_op_dec_ECX[s->aflag]();
4567
                gen_op_loop[s->aflag][b](l1);
4568
            }
4569

    
4570
            gen_jmp_im(next_eip);
4571
            gen_op_jmp_label(l2);
4572
            gen_set_label(l1);
4573
            gen_jmp_im(tval);
4574
            gen_set_label(l2);
4575
            gen_eob(s);
4576
        }
4577
        break;
4578
    case 0x130: /* wrmsr */
4579
    case 0x132: /* rdmsr */
4580
        if (s->cpl != 0) {
4581
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4582
        } else {
4583
            if (b & 2)
4584
                gen_op_rdmsr();
4585
            else
4586
                gen_op_wrmsr();
4587
        }
4588
        break;
4589
    case 0x131: /* rdtsc */
4590
        gen_op_rdtsc();
4591
        break;
4592
    case 0x134: /* sysenter */
4593
        if (CODE64(s))
4594
            goto illegal_op;
4595
        if (!s->pe) {
4596
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4597
        } else {
4598
            if (s->cc_op != CC_OP_DYNAMIC) {
4599
                gen_op_set_cc_op(s->cc_op);
4600
                s->cc_op = CC_OP_DYNAMIC;
4601
            }
4602
            gen_jmp_im(pc_start - s->cs_base);
4603
            gen_op_sysenter();
4604
            gen_eob(s);
4605
        }
4606
        break;
4607
    case 0x135: /* sysexit */
4608
        if (CODE64(s))
4609
            goto illegal_op;
4610
        if (!s->pe) {
4611
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4612
        } else {
4613
            if (s->cc_op != CC_OP_DYNAMIC) {
4614
                gen_op_set_cc_op(s->cc_op);
4615
                s->cc_op = CC_OP_DYNAMIC;
4616
            }
4617
            gen_jmp_im(pc_start - s->cs_base);
4618
            gen_op_sysexit();
4619
            gen_eob(s);
4620
        }
4621
        break;
4622
#ifdef TARGET_X86_64
4623
    case 0x105: /* syscall */
4624
        /* XXX: is it usable in real mode ? */
4625
        if (s->cc_op != CC_OP_DYNAMIC) {
4626
            gen_op_set_cc_op(s->cc_op);
4627
            s->cc_op = CC_OP_DYNAMIC;
4628
        }
4629
        gen_jmp_im(pc_start - s->cs_base);
4630
        gen_op_syscall(s->pc - pc_start);
4631
        gen_eob(s);
4632
        break;
4633
    case 0x107: /* sysret */
4634
        if (!s->pe) {
4635
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4636
        } else {
4637
            if (s->cc_op != CC_OP_DYNAMIC) {
4638
                gen_op_set_cc_op(s->cc_op);
4639
                s->cc_op = CC_OP_DYNAMIC;
4640
            }
4641
            gen_jmp_im(pc_start - s->cs_base);
4642
            gen_op_sysret(s->dflag);
4643
            gen_eob(s);
4644
        }
4645
        break;
4646
#endif
4647
    case 0x1a2: /* cpuid */
4648
        gen_op_cpuid();
4649
        break;
4650
    case 0xf4: /* hlt */
4651
        if (s->cpl != 0) {
4652
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4653
        } else {
4654
            if (s->cc_op != CC_OP_DYNAMIC)
4655
                gen_op_set_cc_op(s->cc_op);
4656
            gen_jmp_im(s->pc - s->cs_base);
4657
            gen_op_hlt();
4658
            s->is_jmp = 3;
4659
        }
4660
        break;
4661
    case 0x100:
4662
        modrm = ldub_code(s->pc++);
4663
        mod = (modrm >> 6) & 3;
4664
        op = (modrm >> 3) & 7;
4665
        switch(op) {
4666
        case 0: /* sldt */
4667
            if (!s->pe || s->vm86)
4668
                goto illegal_op;
4669
            gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
4670
            ot = OT_WORD;
4671
            if (mod == 3)
4672
                ot += s->dflag;
4673
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4674
            break;
4675
        case 2: /* lldt */
4676
            if (!s->pe || s->vm86)
4677
                goto illegal_op;
4678
            if (s->cpl != 0) {
4679
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4680
            } else {
4681
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4682
                gen_jmp_im(pc_start - s->cs_base);
4683
                gen_op_lldt_T0();
4684
            }
4685
            break;
4686
        case 1: /* str */
4687
            if (!s->pe || s->vm86)
4688
                goto illegal_op;
4689
            gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
4690
            ot = OT_WORD;
4691
            if (mod == 3)
4692
                ot += s->dflag;
4693
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4694
            break;
4695
        case 3: /* ltr */
4696
            if (!s->pe || s->vm86)
4697
                goto illegal_op;
4698
            if (s->cpl != 0) {
4699
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4700
            } else {
4701
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4702
                gen_jmp_im(pc_start - s->cs_base);
4703
                gen_op_ltr_T0();
4704
            }
4705
            break;
4706
        case 4: /* verr */
4707
        case 5: /* verw */
4708
            if (!s->pe || s->vm86)
4709
                goto illegal_op;
4710
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4711
            if (s->cc_op != CC_OP_DYNAMIC)
4712
                gen_op_set_cc_op(s->cc_op);
4713
            if (op == 4)
4714
                gen_op_verr();
4715
            else
4716
                gen_op_verw();
4717
            s->cc_op = CC_OP_EFLAGS;
4718
            break;
4719
        default:
4720
            goto illegal_op;
4721
        }
4722
        break;
4723
    case 0x101:
4724
        modrm = ldub_code(s->pc++);
4725
        mod = (modrm >> 6) & 3;
4726
        op = (modrm >> 3) & 7;
4727
        switch(op) {
4728
        case 0: /* sgdt */
4729
        case 1: /* sidt */
4730
            if (mod == 3)
4731
                goto illegal_op;
4732
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4733
            if (op == 0)
4734
                gen_op_movl_T0_env(offsetof(CPUX86State,gdt.limit));
4735
            else
4736
                gen_op_movl_T0_env(offsetof(CPUX86State,idt.limit));
4737
            gen_op_st_T0_A0[OT_WORD + s->mem_index]();
4738
#ifdef TARGET_X86_64
4739
            if (CODE64(s)) 
4740
                gen_op_addq_A0_im(2);
4741
            else
4742
#endif
4743
                gen_op_addl_A0_im(2);
4744
            if (op == 0)
4745
                gen_op_movtl_T0_env(offsetof(CPUX86State,gdt.base));
4746
            else
4747
                gen_op_movtl_T0_env(offsetof(CPUX86State,idt.base));
4748
            if (!s->dflag)
4749
                gen_op_andl_T0_im(0xffffff);
4750
            gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
4751
            break;
4752
        case 2: /* lgdt */
4753
        case 3: /* lidt */
4754
            if (mod == 3)
4755
                goto illegal_op;
4756
            if (s->cpl != 0) {
4757
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4758
            } else {
4759
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4760
                gen_op_ld_T1_A0[OT_WORD + s->mem_index]();
4761
#ifdef TARGET_X86_64
4762
                if (CODE64(s))
4763
                    gen_op_addq_A0_im(2);
4764
                else
4765
#endif
4766
                    gen_op_addl_A0_im(2);
4767
                gen_op_ld_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
4768
                if (!s->dflag)
4769
                    gen_op_andl_T0_im(0xffffff);
4770
                if (op == 2) {
4771
                    gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
4772
                    gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
4773
                } else {
4774
                    gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
4775
                    gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
4776
                }
4777
            }
4778
            break;
4779
        case 4: /* smsw */
4780
            gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
4781
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
4782
            break;
4783
        case 6: /* lmsw */
4784
            if (s->cpl != 0) {
4785
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4786
            } else {
4787
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4788
                gen_op_lmsw_T0();
4789
                gen_jmp_im(s->pc - s->cs_base);
4790
                gen_eob(s);
4791
            }
4792
            break;
4793
        case 7: /* invlpg */
4794
            if (s->cpl != 0) {
4795
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4796
            } else {
4797
                if (mod == 3) {
4798
#ifdef TARGET_X86_64
4799
                    if (CODE64(s) && (modrm & 7) == 0) {
4800
                        /* swapgs */
4801
                        gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
4802
                        gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
4803
                        gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
4804
                        gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
4805
                    } else 
4806
#endif
4807
                    {
4808
                        goto illegal_op;
4809
                    }
4810
                } else {
4811
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4812
                    gen_op_invlpg_A0();
4813
                    gen_jmp_im(s->pc - s->cs_base);
4814
                    gen_eob(s);
4815
                }
4816
            }
4817
            break;
4818
        default:
4819
            goto illegal_op;
4820
        }
4821
        break;
4822
    case 0x108: /* invd */
4823
    case 0x109: /* wbinvd */
4824
        if (s->cpl != 0) {
4825
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4826
        } else {
4827
            /* nothing to do */
4828
        }
4829
        break;
4830
    case 0x1ae:
4831
        modrm = ldub_code(s->pc++);
4832
        mod = (modrm >> 6) & 3;
4833
        op = (modrm >> 3) & 7;
4834
        switch(op) {
4835
        case 0: /* fxsave */
4836
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR))
4837
                goto illegal_op;
4838
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4839
            gen_op_fxsave_A0((s->dflag == 2));
4840
            break;
4841
        case 1: /* fxrstor */
4842
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR))
4843
                goto illegal_op;
4844
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4845
            gen_op_fxrstor_A0((s->dflag == 2));
4846
            break;
4847
        case 5: /* lfence */
4848
        case 6: /* mfence */
4849
        case 7: /* sfence */
4850
            if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
4851
                goto illegal_op;
4852
            break;
4853
        default:
4854
            goto illegal_op;
4855
        }
4856
        break;
4857
    case 0x63: /* arpl or movslS (x86_64) */
4858
#ifdef TARGET_X86_64
4859
        if (CODE64(s)) {
4860
            int d_ot;
4861
            /* d_ot is the size of destination */
4862
            d_ot = dflag + OT_WORD;
4863

    
4864
            modrm = ldub_code(s->pc++);
4865
            reg = ((modrm >> 3) & 7) | rex_r;
4866
            mod = (modrm >> 6) & 3;
4867
            rm = (modrm & 7) | REX_B(s);
4868
            
4869
            if (mod == 3) {
4870
                gen_op_mov_TN_reg[OT_LONG][0][rm]();
4871
                /* sign extend */
4872
                if (d_ot == OT_QUAD)
4873
                    gen_op_movslq_T0_T0();
4874
                gen_op_mov_reg_T0[d_ot][reg]();
4875
            } else {
4876
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4877
                if (d_ot == OT_QUAD) {
4878
                    gen_op_lds_T0_A0[OT_LONG + s->mem_index]();
4879
                } else {
4880
                    gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
4881
                }
4882
                gen_op_mov_reg_T0[d_ot][reg]();
4883
            }
4884
        } else 
4885
#endif
4886
        {
4887
            if (!s->pe || s->vm86)
4888
                goto illegal_op;
4889
            ot = dflag ? OT_LONG : OT_WORD;
4890
            modrm = ldub_code(s->pc++);
4891
            reg = (modrm >> 3) & 7;
4892
            mod = (modrm >> 6) & 3;
4893
            rm = modrm & 7;
4894
            if (mod != 3) {
4895
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4896
                gen_op_ld_T0_A0[ot + s->mem_index]();
4897
            } else {
4898
                gen_op_mov_TN_reg[ot][0][rm]();
4899
            }
4900
            if (s->cc_op != CC_OP_DYNAMIC)
4901
                gen_op_set_cc_op(s->cc_op);
4902
            gen_op_arpl();
4903
            s->cc_op = CC_OP_EFLAGS;
4904
            if (mod != 3) {
4905
                gen_op_st_T0_A0[ot + s->mem_index]();
4906
            } else {
4907
                gen_op_mov_reg_T0[ot][rm]();
4908
            }
4909
            gen_op_arpl_update();
4910
        }
4911
        break;
4912
    case 0x102: /* lar */
4913
    case 0x103: /* lsl */
4914
        if (!s->pe || s->vm86)
4915
            goto illegal_op;
4916
        ot = dflag ? OT_LONG : OT_WORD;
4917
        modrm = ldub_code(s->pc++);
4918
        reg = ((modrm >> 3) & 7) | rex_r;
4919
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4920
        gen_op_mov_TN_reg[ot][1][reg]();
4921
        if (s->cc_op != CC_OP_DYNAMIC)
4922
            gen_op_set_cc_op(s->cc_op);
4923
        if (b == 0x102)
4924
            gen_op_lar();
4925
        else
4926
            gen_op_lsl();
4927
        s->cc_op = CC_OP_EFLAGS;
4928
        gen_op_mov_reg_T1[ot][reg]();
4929
        break;
4930
    case 0x118:
4931
        modrm = ldub_code(s->pc++);
4932
        mod = (modrm >> 6) & 3;
4933
        op = (modrm >> 3) & 7;
4934
        switch(op) {
4935
        case 0: /* prefetchnta */
4936
        case 1: /* prefetchnt0 */
4937
        case 2: /* prefetchnt0 */
4938
        case 3: /* prefetchnt0 */
4939
            if (mod == 3)
4940
                goto illegal_op;
4941
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4942
            /* nothing more to do */
4943
            break;
4944
        default:
4945
            goto illegal_op;
4946
        }
4947
        break;
4948
    case 0x120: /* mov reg, crN */
4949
    case 0x122: /* mov crN, reg */
4950
        if (s->cpl != 0) {
4951
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4952
        } else {
4953
            modrm = ldub_code(s->pc++);
4954
            if ((modrm & 0xc0) != 0xc0)
4955
                goto illegal_op;
4956
            rm = (modrm & 7) | REX_B(s);
4957
            reg = ((modrm >> 3) & 7) | rex_r;
4958
            if (CODE64(s))
4959
                ot = OT_QUAD;
4960
            else
4961
                ot = OT_LONG;
4962
            switch(reg) {
4963
            case 0:
4964
            case 2:
4965
            case 3:
4966
            case 4:
4967
                if (b & 2) {
4968
                    gen_op_mov_TN_reg[ot][0][rm]();
4969
                    gen_op_movl_crN_T0(reg);
4970
                    gen_jmp_im(s->pc - s->cs_base);
4971
                    gen_eob(s);
4972
                } else {
4973
                    gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
4974
                    gen_op_mov_reg_T0[ot][rm]();
4975
                }
4976
                break;
4977
                /* XXX: add CR8 for x86_64 */
4978
            default:
4979
                goto illegal_op;
4980
            }
4981
        }
4982
        break;
4983
    case 0x121: /* mov reg, drN */
4984
    case 0x123: /* mov drN, reg */
4985
        if (s->cpl != 0) {
4986
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4987
        } else {
4988
            modrm = ldub_code(s->pc++);
4989
            if ((modrm & 0xc0) != 0xc0)
4990
                goto illegal_op;
4991
            rm = (modrm & 7) | REX_B(s);
4992
            reg = ((modrm >> 3) & 7) | rex_r;
4993
            if (CODE64(s))
4994
                ot = OT_QUAD;
4995
            else
4996
                ot = OT_LONG;
4997
            /* XXX: do it dynamically with CR4.DE bit */
4998
            if (reg == 4 || reg == 5 || reg >= 8)
4999
                goto illegal_op;
5000
            if (b & 2) {
5001
                gen_op_mov_TN_reg[ot][0][rm]();
5002
                gen_op_movl_drN_T0(reg);
5003
                gen_jmp_im(s->pc - s->cs_base);
5004
                gen_eob(s);
5005
            } else {
5006
                gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
5007
                gen_op_mov_reg_T0[ot][rm]();
5008
            }
5009
        }
5010
        break;
5011
    case 0x106: /* clts */
5012
        if (s->cpl != 0) {
5013
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5014
        } else {
5015
            gen_op_clts();
5016
            /* abort block because static cpu state changed */
5017
            gen_jmp_im(s->pc - s->cs_base);
5018
            gen_eob(s);
5019
        }
5020
        break;
5021
    /* SSE support */
5022
    case 0x16f:
5023
        if (prefixes & PREFIX_DATA) {
5024
            /* movdqa xmm1, xmm2/mem128 */
5025
            if (!(s->cpuid_features & CPUID_SSE))
5026
                goto illegal_op;
5027
            modrm = ldub_code(s->pc++);
5028
            reg = ((modrm >> 3) & 7) | rex_r;
5029
            mod = (modrm >> 6) & 3;
5030
            if (mod != 3) {
5031
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5032
                gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
5033
            } else {
5034
                rm = (modrm & 7) | REX_B(s);
5035
                gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
5036
                            offsetof(CPUX86State,xmm_regs[rm]));
5037
            }
5038
        } else {
5039
            goto illegal_op;
5040
        }
5041
        break;
5042
    case 0x1e7:
5043
        if (prefixes & PREFIX_DATA) {
5044
            /* movntdq mem128, xmm1 */
5045
            if (!(s->cpuid_features & CPUID_SSE))
5046
                goto illegal_op;
5047
            modrm = ldub_code(s->pc++);
5048
            reg = ((modrm >> 3) & 7) | rex_r;
5049
            mod = (modrm >> 6) & 3;
5050
            if (mod != 3) {
5051
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5052
                gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
5053
            } else {
5054
                goto illegal_op;
5055
            }
5056
        } else {
5057
            goto illegal_op;
5058
        }
5059
        break;
5060
    case 0x17f:
5061
        if (prefixes & PREFIX_DATA) {
5062
            /* movdqa xmm2/mem128, xmm1 */
5063
            if (!(s->cpuid_features & CPUID_SSE))
5064
                goto illegal_op;
5065
            modrm = ldub_code(s->pc++);
5066
            reg = ((modrm >> 3) & 7) | rex_r;
5067
            mod = (modrm >> 6) & 3;
5068
            if (mod != 3) {
5069
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5070
                gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
5071
            } else {
5072
                rm = (modrm & 7) | REX_B(s);
5073
                gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
5074
                            offsetof(CPUX86State,xmm_regs[reg]));
5075
            }
5076
        } else {
5077
            goto illegal_op;
5078
        }
5079
        break;
5080
    default:
5081
        goto illegal_op;
5082
    }
5083
    /* lock generation */
5084
    if (s->prefix & PREFIX_LOCK)
5085
        gen_op_unlock();
5086
    return s->pc;
5087
 illegal_op:
5088
    if (s->prefix & PREFIX_LOCK)
5089
        gen_op_unlock();
5090
    /* XXX: ensure that no lock was generated */
5091
    gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
5092
    return s->pc;
5093
}
5094

    
5095
#define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
5096
#define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
5097

    
5098
/* flags read by an operation */
5099
static uint16_t opc_read_flags[NB_OPS] = { 
5100
    [INDEX_op_aas] = CC_A,
5101
    [INDEX_op_aaa] = CC_A,
5102
    [INDEX_op_das] = CC_A | CC_C,
5103
    [INDEX_op_daa] = CC_A | CC_C,
5104

    
5105
    /* subtle: due to the incl/decl implementation, C is used */
5106
    [INDEX_op_update_inc_cc] = CC_C, 
5107

    
5108
    [INDEX_op_into] = CC_O,
5109

    
5110
    [INDEX_op_jb_subb] = CC_C,
5111
    [INDEX_op_jb_subw] = CC_C,
5112
    [INDEX_op_jb_subl] = CC_C,
5113

    
5114
    [INDEX_op_jz_subb] = CC_Z,
5115
    [INDEX_op_jz_subw] = CC_Z,
5116
    [INDEX_op_jz_subl] = CC_Z,
5117

    
5118
    [INDEX_op_jbe_subb] = CC_Z | CC_C,
5119
    [INDEX_op_jbe_subw] = CC_Z | CC_C,
5120
    [INDEX_op_jbe_subl] = CC_Z | CC_C,
5121

    
5122
    [INDEX_op_js_subb] = CC_S,
5123
    [INDEX_op_js_subw] = CC_S,
5124
    [INDEX_op_js_subl] = CC_S,
5125

    
5126
    [INDEX_op_jl_subb] = CC_O | CC_S,
5127
    [INDEX_op_jl_subw] = CC_O | CC_S,
5128
    [INDEX_op_jl_subl] = CC_O | CC_S,
5129

    
5130
    [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
5131
    [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
5132
    [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
5133

    
5134
    [INDEX_op_loopnzw] = CC_Z,
5135
    [INDEX_op_loopnzl] = CC_Z,
5136
    [INDEX_op_loopzw] = CC_Z,
5137
    [INDEX_op_loopzl] = CC_Z,
5138

    
5139
    [INDEX_op_seto_T0_cc] = CC_O,
5140
    [INDEX_op_setb_T0_cc] = CC_C,
5141
    [INDEX_op_setz_T0_cc] = CC_Z,
5142
    [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
5143
    [INDEX_op_sets_T0_cc] = CC_S,
5144
    [INDEX_op_setp_T0_cc] = CC_P,
5145
    [INDEX_op_setl_T0_cc] = CC_O | CC_S,
5146
    [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
5147

    
5148
    [INDEX_op_setb_T0_subb] = CC_C,
5149
    [INDEX_op_setb_T0_subw] = CC_C,
5150
    [INDEX_op_setb_T0_subl] = CC_C,
5151

    
5152
    [INDEX_op_setz_T0_subb] = CC_Z,
5153
    [INDEX_op_setz_T0_subw] = CC_Z,
5154
    [INDEX_op_setz_T0_subl] = CC_Z,
5155

    
5156
    [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
5157
    [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
5158
    [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
5159

    
5160
    [INDEX_op_sets_T0_subb] = CC_S,
5161
    [INDEX_op_sets_T0_subw] = CC_S,
5162
    [INDEX_op_sets_T0_subl] = CC_S,
5163

    
5164
    [INDEX_op_setl_T0_subb] = CC_O | CC_S,
5165
    [INDEX_op_setl_T0_subw] = CC_O | CC_S,
5166
    [INDEX_op_setl_T0_subl] = CC_O | CC_S,
5167

    
5168
    [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
5169
    [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
5170
    [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
5171

    
5172
    [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
5173
    [INDEX_op_cmc] = CC_C,
5174
    [INDEX_op_salc] = CC_C,
5175

    
5176
    /* needed for correct flag optimisation before string ops */
5177
    [INDEX_op_jnz_ecxw] = CC_OSZAPC,
5178
    [INDEX_op_jnz_ecxl] = CC_OSZAPC,
5179
    [INDEX_op_jz_ecxw] = CC_OSZAPC,
5180
    [INDEX_op_jz_ecxl] = CC_OSZAPC,
5181

    
5182
#ifdef TARGET_X86_64
5183
    [INDEX_op_jb_subq] = CC_C,
5184
    [INDEX_op_jz_subq] = CC_Z,
5185
    [INDEX_op_jbe_subq] = CC_Z | CC_C,
5186
    [INDEX_op_js_subq] = CC_S,
5187
    [INDEX_op_jl_subq] = CC_O | CC_S,
5188
    [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
5189

    
5190
    [INDEX_op_loopnzq] = CC_Z,
5191
    [INDEX_op_loopzq] = CC_Z,
5192

    
5193
    [INDEX_op_setb_T0_subq] = CC_C,
5194
    [INDEX_op_setz_T0_subq] = CC_Z,
5195
    [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
5196
    [INDEX_op_sets_T0_subq] = CC_S,
5197
    [INDEX_op_setl_T0_subq] = CC_O | CC_S,
5198
    [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
5199

    
5200
    [INDEX_op_jnz_ecxq] = CC_OSZAPC,
5201
    [INDEX_op_jz_ecxq] = CC_OSZAPC,
5202
#endif
5203

    
5204
#define DEF_READF(SUFFIX)\
5205
    [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5206
    [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5207
    [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5208
    X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5209
    [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5210
    [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5211
    [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5212
    X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5213
\
5214
    [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5215
    [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5216
    [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
5217
    X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5218
    [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5219
    [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5220
    [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5221
    X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
5222

    
5223
    DEF_READF( )
5224
    DEF_READF(_raw)
5225
#ifndef CONFIG_USER_ONLY
5226
    DEF_READF(_kernel)
5227
    DEF_READF(_user)
5228
#endif
5229
};
5230

    
5231
/* flags written by an operation */
5232
static uint16_t opc_write_flags[NB_OPS] = { 
5233
    [INDEX_op_update2_cc] = CC_OSZAPC,
5234
    [INDEX_op_update1_cc] = CC_OSZAPC,
5235
    [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
5236
    [INDEX_op_update_neg_cc] = CC_OSZAPC,
5237
    /* subtle: due to the incl/decl implementation, C is used */
5238
    [INDEX_op_update_inc_cc] = CC_OSZAPC, 
5239
    [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
5240

    
5241
    [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
5242
    [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
5243
    [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
5244
    X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
5245
    [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
5246
    [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
5247
    [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
5248
    X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
5249
    [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
5250
    [INDEX_op_imull_T0_T1] = CC_OSZAPC,
5251
    X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
5252

    
5253
    /* bcd */
5254
    [INDEX_op_aam] = CC_OSZAPC,
5255
    [INDEX_op_aad] = CC_OSZAPC,
5256
    [INDEX_op_aas] = CC_OSZAPC,
5257
    [INDEX_op_aaa] = CC_OSZAPC,
5258
    [INDEX_op_das] = CC_OSZAPC,
5259
    [INDEX_op_daa] = CC_OSZAPC,
5260

    
5261
    [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
5262
    [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
5263
    [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
5264
    [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
5265
    [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
5266
    [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
5267
    [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
5268
    [INDEX_op_clc] = CC_C,
5269
    [INDEX_op_stc] = CC_C,
5270
    [INDEX_op_cmc] = CC_C,
5271

    
5272
    [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
5273
    [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
5274
    X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
5275
    [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
5276
    [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
5277
    X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
5278
    [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
5279
    [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
5280
    X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
5281
    [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
5282
    [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
5283
    X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
5284

    
5285
    [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
5286
    [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
5287
    X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
5288
    [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
5289
    [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
5290
    X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
5291

    
5292
    [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
5293
    [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
5294
    [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
5295
    X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
5296

    
5297
    [INDEX_op_cmpxchg8b] = CC_Z,
5298
    [INDEX_op_lar] = CC_Z,
5299
    [INDEX_op_lsl] = CC_Z,
5300
    [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
5301
    [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
5302

    
5303
#define DEF_WRITEF(SUFFIX)\
5304
    [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5305
    [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5306
    [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5307
    X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
5308
    [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5309
    [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5310
    [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5311
    X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
5312
\
5313
    [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5314
    [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5315
    [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5316
    X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
5317
    [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5318
    [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5319
    [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5320
    X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
5321
\
5322
    [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5323
    [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5324
    [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5325
    X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
5326
    [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5327
    [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5328
    [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5329
    X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
5330
\
5331
    [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5332
    [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5333
    [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5334
    X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
5335
\
5336
    [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5337
    [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5338
    [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5339
    X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
5340
\
5341
    [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5342
    [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5343
    [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5344
    X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
5345
\
5346
    [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
5347
    [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
5348
    X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
5349
    [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
5350
    [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
5351
    X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
5352
\
5353
    [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
5354
    [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
5355
    X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
5356
    [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
5357
    [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
5358
    X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
5359
\
5360
    [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
5361
    [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
5362
    [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
5363
    X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
5364

    
5365

    
5366
    DEF_WRITEF( )
5367
    DEF_WRITEF(_raw)
5368
#ifndef CONFIG_USER_ONLY
5369
    DEF_WRITEF(_kernel)
5370
    DEF_WRITEF(_user)
5371
#endif
5372
};
5373

    
5374
/* simpler form of an operation if no flags need to be generated */
5375
static uint16_t opc_simpler[NB_OPS] = { 
5376
    [INDEX_op_update2_cc] = INDEX_op_nop,
5377
    [INDEX_op_update1_cc] = INDEX_op_nop,
5378
    [INDEX_op_update_neg_cc] = INDEX_op_nop,
5379
#if 0
5380
    /* broken: CC_OP logic must be rewritten */
5381
    [INDEX_op_update_inc_cc] = INDEX_op_nop,
5382
#endif
5383

    
5384
    [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
5385
    [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
5386
    [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
5387
    X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
5388

    
5389
    [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
5390
    [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
5391
    [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
5392
    X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
5393

    
5394
    [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
5395
    [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
5396
    [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
5397
    X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
5398

    
5399
#define DEF_SIMPLER(SUFFIX)\
5400
    [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
5401
    [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
5402
    [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
5403
    X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
5404
\
5405
    [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
5406
    [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
5407
    [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
5408
    X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
5409

    
5410
    DEF_SIMPLER( )
5411
    DEF_SIMPLER(_raw)
5412
#ifndef CONFIG_USER_ONLY
5413
    DEF_SIMPLER(_kernel)
5414
    DEF_SIMPLER(_user)
5415
#endif
5416
};
5417

    
5418
void optimize_flags_init(void)
5419
{
5420
    int i;
5421
    /* put default values in arrays */
5422
    for(i = 0; i < NB_OPS; i++) {
5423
        if (opc_simpler[i] == 0)
5424
            opc_simpler[i] = i;
5425
    }
5426
}
5427

    
5428
/* CPU flags computation optimization: we move backward thru the
5429
   generated code to see which flags are needed. The operation is
5430
   modified if suitable */
5431
static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
5432
{
5433
    uint16_t *opc_ptr;
5434
    int live_flags, write_flags, op;
5435

    
5436
    opc_ptr = opc_buf + opc_buf_len;
5437
    /* live_flags contains the flags needed by the next instructions
5438
       in the code. At the end of the bloc, we consider that all the
5439
       flags are live. */
5440
    live_flags = CC_OSZAPC;
5441
    while (opc_ptr > opc_buf) {
5442
        op = *--opc_ptr;
5443
        /* if none of the flags written by the instruction is used,
5444
           then we can try to find a simpler instruction */
5445
        write_flags = opc_write_flags[op];
5446
        if ((live_flags & write_flags) == 0) {
5447
            *opc_ptr = opc_simpler[op];
5448
        }
5449
        /* compute the live flags before the instruction */
5450
        live_flags &= ~write_flags;
5451
        live_flags |= opc_read_flags[op];
5452
    }
5453
}
5454

    
5455
/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
5456
   basic block 'tb'. If search_pc is TRUE, also generate PC
5457
   information for each intermediate instruction. */
5458
static inline int gen_intermediate_code_internal(CPUState *env,
5459
                                                 TranslationBlock *tb, 
5460
                                                 int search_pc)
5461
{
5462
    DisasContext dc1, *dc = &dc1;
5463
    target_ulong pc_ptr;
5464
    uint16_t *gen_opc_end;
5465
    int flags, j, lj, cflags;
5466
    target_ulong pc_start;
5467
    target_ulong cs_base;
5468
    
5469
    /* generate intermediate code */
5470
    pc_start = tb->pc;
5471
    cs_base = tb->cs_base;
5472
    flags = tb->flags;
5473
    cflags = tb->cflags;
5474

    
5475
    dc->pe = (flags >> HF_PE_SHIFT) & 1;
5476
    dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
5477
    dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
5478
    dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
5479
    dc->f_st = 0;
5480
    dc->vm86 = (flags >> VM_SHIFT) & 1;
5481
    dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
5482
    dc->iopl = (flags >> IOPL_SHIFT) & 3;
5483
    dc->tf = (flags >> TF_SHIFT) & 1;
5484
    dc->singlestep_enabled = env->singlestep_enabled;
5485
    dc->cc_op = CC_OP_DYNAMIC;
5486
    dc->cs_base = cs_base;
5487
    dc->tb = tb;
5488
    dc->popl_esp_hack = 0;
5489
    /* select memory access functions */
5490
    dc->mem_index = 0;
5491
    if (flags & HF_SOFTMMU_MASK) {
5492
        if (dc->cpl == 3)
5493
            dc->mem_index = 2 * 4;
5494
        else
5495
            dc->mem_index = 1 * 4;
5496
    }
5497
    dc->cpuid_features = env->cpuid_features;
5498
#ifdef TARGET_X86_64
5499
    dc->lma = (flags >> HF_LMA_SHIFT) & 1;
5500
    dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
5501
#endif
5502
    dc->flags = flags;
5503
    dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
5504
                    (flags & HF_INHIBIT_IRQ_MASK)
5505
#ifndef CONFIG_SOFTMMU
5506
                    || (flags & HF_SOFTMMU_MASK)
5507
#endif
5508
                    );
5509
#if 0
5510
    /* check addseg logic */
5511
    if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
5512
        printf("ERROR addseg\n");
5513
#endif
5514

    
5515
    gen_opc_ptr = gen_opc_buf;
5516
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5517
    gen_opparam_ptr = gen_opparam_buf;
5518
    nb_gen_labels = 0;
5519

    
5520
    dc->is_jmp = DISAS_NEXT;
5521
    pc_ptr = pc_start;
5522
    lj = -1;
5523

    
5524
    for(;;) {
5525
        if (env->nb_breakpoints > 0) {
5526
            for(j = 0; j < env->nb_breakpoints; j++) {
5527
                if (env->breakpoints[j] == pc_ptr) {
5528
                    gen_debug(dc, pc_ptr - dc->cs_base);
5529
                    break;
5530
                }
5531
            }
5532
        }
5533
        if (search_pc) {
5534
            j = gen_opc_ptr - gen_opc_buf;
5535
            if (lj < j) {
5536
                lj++;
5537
                while (lj < j)
5538
                    gen_opc_instr_start[lj++] = 0;
5539
            }
5540
            gen_opc_pc[lj] = pc_ptr;
5541
            gen_opc_cc_op[lj] = dc->cc_op;
5542
            gen_opc_instr_start[lj] = 1;
5543
        }
5544
        pc_ptr = disas_insn(dc, pc_ptr);
5545
        /* stop translation if indicated */
5546
        if (dc->is_jmp)
5547
            break;
5548
        /* if single step mode, we generate only one instruction and
5549
           generate an exception */
5550
        /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
5551
           the flag and abort the translation to give the irqs a
5552
           change to be happen */
5553
        if (dc->tf || dc->singlestep_enabled || 
5554
            (flags & HF_INHIBIT_IRQ_MASK) ||
5555
            (cflags & CF_SINGLE_INSN)) {
5556
            gen_jmp_im(pc_ptr - dc->cs_base);
5557
            gen_eob(dc);
5558
            break;
5559
        }
5560
        /* if too long translation, stop generation too */
5561
        if (gen_opc_ptr >= gen_opc_end ||
5562
            (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
5563
            gen_jmp_im(pc_ptr - dc->cs_base);
5564
            gen_eob(dc);
5565
            break;
5566
        }
5567
    }
5568
    *gen_opc_ptr = INDEX_op_end;
5569
    /* we don't forget to fill the last values */
5570
    if (search_pc) {
5571
        j = gen_opc_ptr - gen_opc_buf;
5572
        lj++;
5573
        while (lj <= j)
5574
            gen_opc_instr_start[lj++] = 0;
5575
    }
5576
        
5577
#ifdef DEBUG_DISAS
5578
    if (loglevel & CPU_LOG_TB_CPU) {
5579
        cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
5580
    }
5581
    if (loglevel & CPU_LOG_TB_IN_ASM) {
5582
        int disas_flags;
5583
        fprintf(logfile, "----------------\n");
5584
        fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
5585
#ifdef TARGET_X86_64
5586
        if (dc->code64)
5587
            disas_flags = 2;
5588
        else
5589
#endif
5590
            disas_flags = !dc->code32;
5591
        target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
5592
        fprintf(logfile, "\n");
5593
        if (loglevel & CPU_LOG_TB_OP) {
5594
            fprintf(logfile, "OP:\n");
5595
            dump_ops(gen_opc_buf, gen_opparam_buf);
5596
            fprintf(logfile, "\n");
5597
        }
5598
    }
5599
#endif
5600

    
5601
    /* optimize flag computations */
5602
    optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
5603

    
5604
#ifdef DEBUG_DISAS
5605
    if (loglevel & CPU_LOG_TB_OP_OPT) {
5606
        fprintf(logfile, "AFTER FLAGS OPT:\n");
5607
        dump_ops(gen_opc_buf, gen_opparam_buf);
5608
        fprintf(logfile, "\n");
5609
    }
5610
#endif
5611
    if (!search_pc)
5612
        tb->size = pc_ptr - pc_start;
5613
    return 0;
5614
}
5615

    
5616
int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
5617
{
5618
    return gen_intermediate_code_internal(env, tb, 0);
5619
}
5620

    
5621
int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
5622
{
5623
    return gen_intermediate_code_internal(env, tb, 1);
5624
}
5625