Statistics
| Branch: | Revision:

root / target-i386 / translate.c @ aba9d61e

History | View | Annotate | Download (188.2 kB)

1
/*
2
 *  i386 translation
3
 * 
4
 *  Copyright (c) 2003 Fabrice Bellard
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20
#include <stdarg.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23
#include <string.h>
24
#include <inttypes.h>
25
#include <signal.h>
26
#include <assert.h>
27

    
28
#include "cpu.h"
29
#include "exec-all.h"
30
#include "disas.h"
31

    
32
/* XXX: move that elsewhere */
33
static uint16_t *gen_opc_ptr;
34
static uint32_t *gen_opparam_ptr;
35

    
36
#define PREFIX_REPZ   0x01
37
#define PREFIX_REPNZ  0x02
38
#define PREFIX_LOCK   0x04
39
#define PREFIX_DATA   0x08
40
#define PREFIX_ADR    0x10
41

    
42
#ifdef TARGET_X86_64
43
#define X86_64_ONLY(x) x
44
#define X86_64_DEF(x...) x
45
#define CODE64(s) ((s)->code64)
46
#define REX_X(s) ((s)->rex_x)
47
#define REX_B(s) ((s)->rex_b)
48
/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
49
#if 1
50
#define BUGGY_64(x) NULL
51
#endif
52
#else
53
#define X86_64_ONLY(x) NULL
54
#define X86_64_DEF(x...)
55
#define CODE64(s) 0
56
#define REX_X(s) 0
57
#define REX_B(s) 0
58
#endif
59

    
60
#ifdef TARGET_X86_64
61
static int x86_64_hregs;
62
#endif
63

    
64
#ifdef USE_DIRECT_JUMP
65
#define TBPARAM(x)
66
#else
67
#define TBPARAM(x) (long)(x)
68
#endif
69

    
70
typedef struct DisasContext {
71
    /* current insn context */
72
    int override; /* -1 if no override */
73
    int prefix;
74
    int aflag, dflag;
75
    target_ulong pc; /* pc = eip + cs_base */
76
    int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
77
                   static state change (stop translation) */
78
    /* current block context */
79
    target_ulong cs_base; /* base of CS segment */
80
    int pe;     /* protected mode */
81
    int code32; /* 32 bit code segment */
82
#ifdef TARGET_X86_64
83
    int lma;    /* long mode active */
84
    int code64; /* 64 bit code segment */
85
    int rex_x, rex_b;
86
#endif
87
    int ss32;   /* 32 bit stack segment */
88
    int cc_op;  /* current CC operation */
89
    int addseg; /* non zero if either DS/ES/SS have a non zero base */
90
    int f_st;   /* currently unused */
91
    int vm86;   /* vm86 mode */
92
    int cpl;
93
    int iopl;
94
    int tf;     /* TF cpu flag */
95
    int singlestep_enabled; /* "hardware" single step enabled */
96
    int jmp_opt; /* use direct block chaining for direct jumps */
97
    int mem_index; /* select memory access functions */
98
    int flags; /* all execution flags */
99
    struct TranslationBlock *tb;
100
    int popl_esp_hack; /* for correct popl with esp base handling */
101
    int rip_offset; /* only used in x86_64, but left for simplicity */
102
    int cpuid_features;
103
} DisasContext;
104

    
105
static void gen_eob(DisasContext *s);
106
static void gen_jmp(DisasContext *s, target_ulong eip);
107
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
108

    
109
/* i386 arith/logic operations */
110
enum {
111
    OP_ADDL, 
112
    OP_ORL, 
113
    OP_ADCL, 
114
    OP_SBBL,
115
    OP_ANDL, 
116
    OP_SUBL, 
117
    OP_XORL, 
118
    OP_CMPL,
119
};
120

    
121
/* i386 shift ops */
122
enum {
123
    OP_ROL, 
124
    OP_ROR, 
125
    OP_RCL, 
126
    OP_RCR, 
127
    OP_SHL, 
128
    OP_SHR, 
129
    OP_SHL1, /* undocumented */
130
    OP_SAR = 7,
131
};
132

    
133
enum {
134
#define DEF(s, n, copy_size) INDEX_op_ ## s,
135
#include "opc.h"
136
#undef DEF
137
    NB_OPS,
138
};
139

    
140
#include "gen-op.h"
141

    
142
/* operand size */
143
enum {
144
    OT_BYTE = 0,
145
    OT_WORD,
146
    OT_LONG, 
147
    OT_QUAD,
148
};
149

    
150
enum {
151
    /* I386 int registers */
152
    OR_EAX,   /* MUST be even numbered */
153
    OR_ECX,
154
    OR_EDX,
155
    OR_EBX,
156
    OR_ESP,
157
    OR_EBP,
158
    OR_ESI,
159
    OR_EDI,
160

    
161
    OR_TMP0 = 16,    /* temporary operand register */
162
    OR_TMP1,
163
    OR_A0, /* temporary register used when doing address evaluation */
164
};
165

    
166
#ifdef TARGET_X86_64
167

    
168
#define NB_OP_SIZES 4
169

    
170
#define DEF_REGS(prefix, suffix) \
171
  prefix ## EAX ## suffix,\
172
  prefix ## ECX ## suffix,\
173
  prefix ## EDX ## suffix,\
174
  prefix ## EBX ## suffix,\
175
  prefix ## ESP ## suffix,\
176
  prefix ## EBP ## suffix,\
177
  prefix ## ESI ## suffix,\
178
  prefix ## EDI ## suffix,\
179
  prefix ## R8 ## suffix,\
180
  prefix ## R9 ## suffix,\
181
  prefix ## R10 ## suffix,\
182
  prefix ## R11 ## suffix,\
183
  prefix ## R12 ## suffix,\
184
  prefix ## R13 ## suffix,\
185
  prefix ## R14 ## suffix,\
186
  prefix ## R15 ## suffix,
187

    
188
#define DEF_BREGS(prefixb, prefixh, suffix)             \
189
                                                        \
190
static void prefixb ## ESP ## suffix ## _wrapper(void)  \
191
{                                                       \
192
    if (x86_64_hregs)                                 \
193
        prefixb ## ESP ## suffix ();                    \
194
    else                                                \
195
        prefixh ## EAX ## suffix ();                    \
196
}                                                       \
197
                                                        \
198
static void prefixb ## EBP ## suffix ## _wrapper(void)  \
199
{                                                       \
200
    if (x86_64_hregs)                                 \
201
        prefixb ## EBP ## suffix ();                    \
202
    else                                                \
203
        prefixh ## ECX ## suffix ();                    \
204
}                                                       \
205
                                                        \
206
static void prefixb ## ESI ## suffix ## _wrapper(void)  \
207
{                                                       \
208
    if (x86_64_hregs)                                 \
209
        prefixb ## ESI ## suffix ();                    \
210
    else                                                \
211
        prefixh ## EDX ## suffix ();                    \
212
}                                                       \
213
                                                        \
214
static void prefixb ## EDI ## suffix ## _wrapper(void)  \
215
{                                                       \
216
    if (x86_64_hregs)                                 \
217
        prefixb ## EDI ## suffix ();                    \
218
    else                                                \
219
        prefixh ## EBX ## suffix ();                    \
220
}
221

    
222
DEF_BREGS(gen_op_movb_, gen_op_movh_, _T0)
223
DEF_BREGS(gen_op_movb_, gen_op_movh_, _T1)
224
DEF_BREGS(gen_op_movl_T0_, gen_op_movh_T0_, )
225
DEF_BREGS(gen_op_movl_T1_, gen_op_movh_T1_, )
226

    
227
#else /* !TARGET_X86_64 */
228

    
229
#define NB_OP_SIZES 3
230

    
231
#define DEF_REGS(prefix, suffix) \
232
  prefix ## EAX ## suffix,\
233
  prefix ## ECX ## suffix,\
234
  prefix ## EDX ## suffix,\
235
  prefix ## EBX ## suffix,\
236
  prefix ## ESP ## suffix,\
237
  prefix ## EBP ## suffix,\
238
  prefix ## ESI ## suffix,\
239
  prefix ## EDI ## suffix,
240

    
241
#endif /* !TARGET_X86_64 */
242

    
243
static GenOpFunc *gen_op_mov_reg_T0[NB_OP_SIZES][CPU_NB_REGS] = {
244
    [OT_BYTE] = {
245
        gen_op_movb_EAX_T0,
246
        gen_op_movb_ECX_T0,
247
        gen_op_movb_EDX_T0,
248
        gen_op_movb_EBX_T0,
249
#ifdef TARGET_X86_64
250
        gen_op_movb_ESP_T0_wrapper,
251
        gen_op_movb_EBP_T0_wrapper,
252
        gen_op_movb_ESI_T0_wrapper,
253
        gen_op_movb_EDI_T0_wrapper,
254
        gen_op_movb_R8_T0,
255
        gen_op_movb_R9_T0,
256
        gen_op_movb_R10_T0,
257
        gen_op_movb_R11_T0,
258
        gen_op_movb_R12_T0,
259
        gen_op_movb_R13_T0,
260
        gen_op_movb_R14_T0,
261
        gen_op_movb_R15_T0,
262
#else
263
        gen_op_movh_EAX_T0,
264
        gen_op_movh_ECX_T0,
265
        gen_op_movh_EDX_T0,
266
        gen_op_movh_EBX_T0,
267
#endif
268
    },
269
    [OT_WORD] = {
270
        DEF_REGS(gen_op_movw_, _T0)
271
    },
272
    [OT_LONG] = {
273
        DEF_REGS(gen_op_movl_, _T0)
274
    },
275
#ifdef TARGET_X86_64
276
    [OT_QUAD] = {
277
        DEF_REGS(gen_op_movq_, _T0)
278
    },
279
#endif
280
};
281

    
282
static GenOpFunc *gen_op_mov_reg_T1[NB_OP_SIZES][CPU_NB_REGS] = {
283
    [OT_BYTE] = {
284
        gen_op_movb_EAX_T1,
285
        gen_op_movb_ECX_T1,
286
        gen_op_movb_EDX_T1,
287
        gen_op_movb_EBX_T1,
288
#ifdef TARGET_X86_64
289
        gen_op_movb_ESP_T1_wrapper,
290
        gen_op_movb_EBP_T1_wrapper,
291
        gen_op_movb_ESI_T1_wrapper,
292
        gen_op_movb_EDI_T1_wrapper,
293
        gen_op_movb_R8_T1,
294
        gen_op_movb_R9_T1,
295
        gen_op_movb_R10_T1,
296
        gen_op_movb_R11_T1,
297
        gen_op_movb_R12_T1,
298
        gen_op_movb_R13_T1,
299
        gen_op_movb_R14_T1,
300
        gen_op_movb_R15_T1,
301
#else
302
        gen_op_movh_EAX_T1,
303
        gen_op_movh_ECX_T1,
304
        gen_op_movh_EDX_T1,
305
        gen_op_movh_EBX_T1,
306
#endif
307
    },
308
    [OT_WORD] = {
309
        DEF_REGS(gen_op_movw_, _T1)
310
    },
311
    [OT_LONG] = {
312
        DEF_REGS(gen_op_movl_, _T1)
313
    },
314
#ifdef TARGET_X86_64
315
    [OT_QUAD] = {
316
        DEF_REGS(gen_op_movq_, _T1)
317
    },
318
#endif
319
};
320

    
321
static GenOpFunc *gen_op_mov_reg_A0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
322
    [0] = {
323
        DEF_REGS(gen_op_movw_, _A0)
324
    },
325
    [1] = {
326
        DEF_REGS(gen_op_movl_, _A0)
327
    },
328
#ifdef TARGET_X86_64
329
    [2] = {
330
        DEF_REGS(gen_op_movq_, _A0)
331
    },
332
#endif
333
};
334

    
335
static GenOpFunc *gen_op_mov_TN_reg[NB_OP_SIZES][2][CPU_NB_REGS] = 
336
{
337
    [OT_BYTE] = {
338
        {
339
            gen_op_movl_T0_EAX,
340
            gen_op_movl_T0_ECX,
341
            gen_op_movl_T0_EDX,
342
            gen_op_movl_T0_EBX,
343
#ifdef TARGET_X86_64
344
            gen_op_movl_T0_ESP_wrapper,
345
            gen_op_movl_T0_EBP_wrapper,
346
            gen_op_movl_T0_ESI_wrapper,
347
            gen_op_movl_T0_EDI_wrapper,
348
            gen_op_movl_T0_R8,
349
            gen_op_movl_T0_R9,
350
            gen_op_movl_T0_R10,
351
            gen_op_movl_T0_R11,
352
            gen_op_movl_T0_R12,
353
            gen_op_movl_T0_R13,
354
            gen_op_movl_T0_R14,
355
            gen_op_movl_T0_R15,
356
#else
357
            gen_op_movh_T0_EAX,
358
            gen_op_movh_T0_ECX,
359
            gen_op_movh_T0_EDX,
360
            gen_op_movh_T0_EBX,
361
#endif
362
        },
363
        {
364
            gen_op_movl_T1_EAX,
365
            gen_op_movl_T1_ECX,
366
            gen_op_movl_T1_EDX,
367
            gen_op_movl_T1_EBX,
368
#ifdef TARGET_X86_64
369
            gen_op_movl_T1_ESP_wrapper,
370
            gen_op_movl_T1_EBP_wrapper,
371
            gen_op_movl_T1_ESI_wrapper,
372
            gen_op_movl_T1_EDI_wrapper,
373
            gen_op_movl_T1_R8,
374
            gen_op_movl_T1_R9,
375
            gen_op_movl_T1_R10,
376
            gen_op_movl_T1_R11,
377
            gen_op_movl_T1_R12,
378
            gen_op_movl_T1_R13,
379
            gen_op_movl_T1_R14,
380
            gen_op_movl_T1_R15,
381
#else
382
            gen_op_movh_T1_EAX,
383
            gen_op_movh_T1_ECX,
384
            gen_op_movh_T1_EDX,
385
            gen_op_movh_T1_EBX,
386
#endif
387
        },
388
    },
389
    [OT_WORD] = {
390
        {
391
            DEF_REGS(gen_op_movl_T0_, )
392
        },
393
        {
394
            DEF_REGS(gen_op_movl_T1_, )
395
        },
396
    },
397
    [OT_LONG] = {
398
        {
399
            DEF_REGS(gen_op_movl_T0_, )
400
        },
401
        {
402
            DEF_REGS(gen_op_movl_T1_, )
403
        },
404
    },
405
#ifdef TARGET_X86_64
406
    [OT_QUAD] = {
407
        {
408
            DEF_REGS(gen_op_movl_T0_, )
409
        },
410
        {
411
            DEF_REGS(gen_op_movl_T1_, )
412
        },
413
    },
414
#endif
415
};
416

    
417
static GenOpFunc *gen_op_movl_A0_reg[CPU_NB_REGS] = {
418
    DEF_REGS(gen_op_movl_A0_, )
419
};
420

    
421
static GenOpFunc *gen_op_addl_A0_reg_sN[4][CPU_NB_REGS] = {
422
    [0] = {
423
        DEF_REGS(gen_op_addl_A0_, )
424
    },
425
    [1] = {
426
        DEF_REGS(gen_op_addl_A0_, _s1)
427
    },
428
    [2] = {
429
        DEF_REGS(gen_op_addl_A0_, _s2)
430
    },
431
    [3] = {
432
        DEF_REGS(gen_op_addl_A0_, _s3)
433
    },
434
};
435

    
436
#ifdef TARGET_X86_64
437
static GenOpFunc *gen_op_movq_A0_reg[CPU_NB_REGS] = {
438
    DEF_REGS(gen_op_movq_A0_, )
439
};
440

    
441
static GenOpFunc *gen_op_addq_A0_reg_sN[4][CPU_NB_REGS] = {
442
    [0] = {
443
        DEF_REGS(gen_op_addq_A0_, )
444
    },
445
    [1] = {
446
        DEF_REGS(gen_op_addq_A0_, _s1)
447
    },
448
    [2] = {
449
        DEF_REGS(gen_op_addq_A0_, _s2)
450
    },
451
    [3] = {
452
        DEF_REGS(gen_op_addq_A0_, _s3)
453
    },
454
};
455
#endif
456

    
457
static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
458
    [0] = {
459
        DEF_REGS(gen_op_cmovw_, _T1_T0)
460
    },
461
    [1] = {
462
        DEF_REGS(gen_op_cmovl_, _T1_T0)
463
    },
464
#ifdef TARGET_X86_64
465
    [2] = {
466
        DEF_REGS(gen_op_cmovq_, _T1_T0)
467
    },
468
#endif
469
};
470

    
471
static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
472
    NULL,
473
    gen_op_orl_T0_T1,
474
    NULL,
475
    NULL,
476
    gen_op_andl_T0_T1,
477
    NULL,
478
    gen_op_xorl_T0_T1,
479
    NULL,
480
};
481

    
482
#define DEF_ARITHC(SUFFIX)\
483
    {\
484
        gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
485
        gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
486
    },\
487
    {\
488
        gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
489
        gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
490
    },\
491
    {\
492
        gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
493
        gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
494
    },\
495
    {\
496
        X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
497
        X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
498
    },
499

    
500
static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
501
    DEF_ARITHC( )
502
};
503

    
504
static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
505
    DEF_ARITHC(_raw)
506
#ifndef CONFIG_USER_ONLY
507
    DEF_ARITHC(_kernel)
508
    DEF_ARITHC(_user)
509
#endif
510
};
511

    
512
static const int cc_op_arithb[8] = {
513
    CC_OP_ADDB,
514
    CC_OP_LOGICB,
515
    CC_OP_ADDB,
516
    CC_OP_SUBB,
517
    CC_OP_LOGICB,
518
    CC_OP_SUBB,
519
    CC_OP_LOGICB,
520
    CC_OP_SUBB,
521
};
522

    
523
#define DEF_CMPXCHG(SUFFIX)\
524
    gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
525
    gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
526
    gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
527
    X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
528

    
529
static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
530
    DEF_CMPXCHG( )
531
};
532

    
533
static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
534
    DEF_CMPXCHG(_raw)
535
#ifndef CONFIG_USER_ONLY
536
    DEF_CMPXCHG(_kernel)
537
    DEF_CMPXCHG(_user)
538
#endif
539
};
540

    
541
#define DEF_SHIFT(SUFFIX)\
542
    {\
543
        gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
544
        gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
545
        gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
546
        gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
547
        gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
548
        gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
549
        gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
550
        gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
551
    },\
552
    {\
553
        gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
554
        gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
555
        gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
556
        gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
557
        gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
558
        gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
559
        gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
560
        gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
561
    },\
562
    {\
563
        gen_op_roll ## SUFFIX ## _T0_T1_cc,\
564
        gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
565
        gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
566
        gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
567
        gen_op_shll ## SUFFIX ## _T0_T1_cc,\
568
        gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
569
        gen_op_shll ## SUFFIX ## _T0_T1_cc,\
570
        gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
571
    },\
572
    {\
573
        X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
574
        X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
575
        X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
576
        X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
577
        X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
578
        X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
579
        X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
580
        X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
581
    },
582

    
583
static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
584
    DEF_SHIFT( )
585
};
586

    
587
static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
588
    DEF_SHIFT(_raw)
589
#ifndef CONFIG_USER_ONLY
590
    DEF_SHIFT(_kernel)
591
    DEF_SHIFT(_user)
592
#endif
593
};
594

    
595
#define DEF_SHIFTD(SUFFIX, op)\
596
    {\
597
        NULL,\
598
        NULL,\
599
    },\
600
    {\
601
        gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
602
        gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
603
     },\
604
    {\
605
        gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
606
        gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
607
    },\
608
    {\
609
X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
610
           gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
611
    },
612

    
613
static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
614
    DEF_SHIFTD(, im)
615
};
616

    
617
static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
618
    DEF_SHIFTD(, ECX)
619
};
620

    
621
static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
622
    DEF_SHIFTD(_raw, im)
623
#ifndef CONFIG_USER_ONLY
624
    DEF_SHIFTD(_kernel, im)
625
    DEF_SHIFTD(_user, im)
626
#endif
627
};
628

    
629
static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
630
    DEF_SHIFTD(_raw, ECX)
631
#ifndef CONFIG_USER_ONLY
632
    DEF_SHIFTD(_kernel, ECX)
633
    DEF_SHIFTD(_user, ECX)
634
#endif
635
};
636

    
637
static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
638
    [0] = {
639
        gen_op_btw_T0_T1_cc,
640
        gen_op_btsw_T0_T1_cc,
641
        gen_op_btrw_T0_T1_cc,
642
        gen_op_btcw_T0_T1_cc,
643
    },
644
    [1] = {
645
        gen_op_btl_T0_T1_cc,
646
        gen_op_btsl_T0_T1_cc,
647
        gen_op_btrl_T0_T1_cc,
648
        gen_op_btcl_T0_T1_cc,
649
    },
650
#ifdef TARGET_X86_64
651
    [2] = {
652
        gen_op_btq_T0_T1_cc,
653
        gen_op_btsq_T0_T1_cc,
654
        gen_op_btrq_T0_T1_cc,
655
        gen_op_btcq_T0_T1_cc,
656
    },
657
#endif
658
};
659

    
660
static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
661
    gen_op_add_bitw_A0_T1,
662
    gen_op_add_bitl_A0_T1,
663
    X86_64_ONLY(gen_op_add_bitq_A0_T1),
664
};
665

    
666
static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
667
    [0] = {
668
        gen_op_bsfw_T0_cc,
669
        gen_op_bsrw_T0_cc,
670
    },
671
    [1] = {
672
        gen_op_bsfl_T0_cc,
673
        gen_op_bsrl_T0_cc,
674
    },
675
#ifdef TARGET_X86_64
676
    [2] = {
677
        gen_op_bsfq_T0_cc,
678
        gen_op_bsrq_T0_cc,
679
    },
680
#endif
681
};
682

    
683
static GenOpFunc *gen_op_lds_T0_A0[3 * 4] = {
684
    gen_op_ldsb_raw_T0_A0,
685
    gen_op_ldsw_raw_T0_A0,
686
    X86_64_ONLY(gen_op_ldsl_raw_T0_A0),
687
    NULL,
688
#ifndef CONFIG_USER_ONLY
689
    gen_op_ldsb_kernel_T0_A0,
690
    gen_op_ldsw_kernel_T0_A0,
691
    X86_64_ONLY(gen_op_ldsl_kernel_T0_A0),
692
    NULL,
693

    
694
    gen_op_ldsb_user_T0_A0,
695
    gen_op_ldsw_user_T0_A0,
696
    X86_64_ONLY(gen_op_ldsl_user_T0_A0),
697
    NULL,
698
#endif
699
};
700

    
701
static GenOpFunc *gen_op_ldu_T0_A0[3 * 4] = {
702
    gen_op_ldub_raw_T0_A0,
703
    gen_op_lduw_raw_T0_A0,
704
    NULL,
705
    NULL,
706

    
707
#ifndef CONFIG_USER_ONLY
708
    gen_op_ldub_kernel_T0_A0,
709
    gen_op_lduw_kernel_T0_A0,
710
    NULL,
711
    NULL,
712

    
713
    gen_op_ldub_user_T0_A0,
714
    gen_op_lduw_user_T0_A0,
715
    NULL,
716
    NULL,
717
#endif
718
};
719

    
720
/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
721
static GenOpFunc *gen_op_ld_T0_A0[3 * 4] = {
722
    gen_op_ldub_raw_T0_A0,
723
    gen_op_lduw_raw_T0_A0,
724
    gen_op_ldl_raw_T0_A0,
725
    X86_64_ONLY(gen_op_ldq_raw_T0_A0),
726

    
727
#ifndef CONFIG_USER_ONLY
728
    gen_op_ldub_kernel_T0_A0,
729
    gen_op_lduw_kernel_T0_A0,
730
    gen_op_ldl_kernel_T0_A0,
731
    X86_64_ONLY(gen_op_ldq_kernel_T0_A0),
732

    
733
    gen_op_ldub_user_T0_A0,
734
    gen_op_lduw_user_T0_A0,
735
    gen_op_ldl_user_T0_A0,
736
    X86_64_ONLY(gen_op_ldq_user_T0_A0),
737
#endif
738
};
739

    
740
static GenOpFunc *gen_op_ld_T1_A0[3 * 4] = {
741
    gen_op_ldub_raw_T1_A0,
742
    gen_op_lduw_raw_T1_A0,
743
    gen_op_ldl_raw_T1_A0,
744
    X86_64_ONLY(gen_op_ldq_raw_T1_A0),
745

    
746
#ifndef CONFIG_USER_ONLY
747
    gen_op_ldub_kernel_T1_A0,
748
    gen_op_lduw_kernel_T1_A0,
749
    gen_op_ldl_kernel_T1_A0,
750
    X86_64_ONLY(gen_op_ldq_kernel_T1_A0),
751

    
752
    gen_op_ldub_user_T1_A0,
753
    gen_op_lduw_user_T1_A0,
754
    gen_op_ldl_user_T1_A0,
755
    X86_64_ONLY(gen_op_ldq_user_T1_A0),
756
#endif
757
};
758

    
759
static GenOpFunc *gen_op_st_T0_A0[3 * 4] = {
760
    gen_op_stb_raw_T0_A0,
761
    gen_op_stw_raw_T0_A0,
762
    gen_op_stl_raw_T0_A0,
763
    X86_64_ONLY(gen_op_stq_raw_T0_A0),
764

    
765
#ifndef CONFIG_USER_ONLY
766
    gen_op_stb_kernel_T0_A0,
767
    gen_op_stw_kernel_T0_A0,
768
    gen_op_stl_kernel_T0_A0,
769
    X86_64_ONLY(gen_op_stq_kernel_T0_A0),
770

    
771
    gen_op_stb_user_T0_A0,
772
    gen_op_stw_user_T0_A0,
773
    gen_op_stl_user_T0_A0,
774
    X86_64_ONLY(gen_op_stq_user_T0_A0),
775
#endif
776
};
777

    
778
static GenOpFunc *gen_op_st_T1_A0[3 * 4] = {
779
    NULL,
780
    gen_op_stw_raw_T1_A0,
781
    gen_op_stl_raw_T1_A0,
782
    X86_64_ONLY(gen_op_stq_raw_T1_A0),
783

    
784
#ifndef CONFIG_USER_ONLY
785
    NULL,
786
    gen_op_stw_kernel_T1_A0,
787
    gen_op_stl_kernel_T1_A0,
788
    X86_64_ONLY(gen_op_stq_kernel_T1_A0),
789

    
790
    NULL,
791
    gen_op_stw_user_T1_A0,
792
    gen_op_stl_user_T1_A0,
793
    X86_64_ONLY(gen_op_stq_user_T1_A0),
794
#endif
795
};
796

    
797
static inline void gen_jmp_im(target_ulong pc)
798
{
799
#ifdef TARGET_X86_64
800
    if (pc == (uint32_t)pc) {
801
        gen_op_movl_eip_im(pc);
802
    } else if (pc == (int32_t)pc) {
803
        gen_op_movq_eip_im(pc);
804
    } else {
805
        gen_op_movq_eip_im64(pc >> 32, pc);
806
    }
807
#else
808
    gen_op_movl_eip_im(pc);
809
#endif
810
}
811

    
812
static inline void gen_string_movl_A0_ESI(DisasContext *s)
813
{
814
    int override;
815

    
816
    override = s->override;
817
#ifdef TARGET_X86_64
818
    if (s->aflag == 2) {
819
        if (override >= 0) {
820
            gen_op_movq_A0_seg(offsetof(CPUX86State,segs[override].base));
821
            gen_op_addq_A0_reg_sN[0][R_ESI]();
822
        } else {
823
            gen_op_movq_A0_reg[R_ESI]();
824
        }
825
    } else
826
#endif
827
    if (s->aflag) {
828
        /* 32 bit address */
829
        if (s->addseg && override < 0)
830
            override = R_DS;
831
        if (override >= 0) {
832
            gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
833
            gen_op_addl_A0_reg_sN[0][R_ESI]();
834
        } else {
835
            gen_op_movl_A0_reg[R_ESI]();
836
        }
837
    } else {
838
        /* 16 address, always override */
839
        if (override < 0)
840
            override = R_DS;
841
        gen_op_movl_A0_reg[R_ESI]();
842
        gen_op_andl_A0_ffff();
843
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
844
    }
845
}
846

    
847
static inline void gen_string_movl_A0_EDI(DisasContext *s)
848
{
849
#ifdef TARGET_X86_64
850
    if (s->aflag == 2) {
851
        gen_op_movq_A0_reg[R_EDI]();
852
    } else
853
#endif
854
    if (s->aflag) {
855
        if (s->addseg) {
856
            gen_op_movl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
857
            gen_op_addl_A0_reg_sN[0][R_EDI]();
858
        } else {
859
            gen_op_movl_A0_reg[R_EDI]();
860
        }
861
    } else {
862
        gen_op_movl_A0_reg[R_EDI]();
863
        gen_op_andl_A0_ffff();
864
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
865
    }
866
}
867

    
868
static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
869
    gen_op_movl_T0_Dshiftb,
870
    gen_op_movl_T0_Dshiftw,
871
    gen_op_movl_T0_Dshiftl,
872
    X86_64_ONLY(gen_op_movl_T0_Dshiftq),
873
};
874

    
875
static GenOpFunc1 *gen_op_jnz_ecx[3] = {
876
    gen_op_jnz_ecxw,
877
    gen_op_jnz_ecxl,
878
    X86_64_ONLY(gen_op_jnz_ecxq),
879
};
880
    
881
static GenOpFunc1 *gen_op_jz_ecx[3] = {
882
    gen_op_jz_ecxw,
883
    gen_op_jz_ecxl,
884
    X86_64_ONLY(gen_op_jz_ecxq),
885
};
886

    
887
static GenOpFunc *gen_op_dec_ECX[3] = {
888
    gen_op_decw_ECX,
889
    gen_op_decl_ECX,
890
    X86_64_ONLY(gen_op_decq_ECX),
891
};
892

    
893
static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
894
    {
895
        gen_op_jnz_subb,
896
        gen_op_jnz_subw,
897
        gen_op_jnz_subl,
898
        X86_64_ONLY(gen_op_jnz_subq),
899
    },
900
    {
901
        gen_op_jz_subb,
902
        gen_op_jz_subw,
903
        gen_op_jz_subl,
904
        X86_64_ONLY(gen_op_jz_subq),
905
    },
906
};
907

    
908
static GenOpFunc *gen_op_in_DX_T0[3] = {
909
    gen_op_inb_DX_T0,
910
    gen_op_inw_DX_T0,
911
    gen_op_inl_DX_T0,
912
};
913

    
914
static GenOpFunc *gen_op_out_DX_T0[3] = {
915
    gen_op_outb_DX_T0,
916
    gen_op_outw_DX_T0,
917
    gen_op_outl_DX_T0,
918
};
919

    
920
static GenOpFunc *gen_op_in[3] = {
921
    gen_op_inb_T0_T1,
922
    gen_op_inw_T0_T1,
923
    gen_op_inl_T0_T1,
924
};
925

    
926
static GenOpFunc *gen_op_out[3] = {
927
    gen_op_outb_T0_T1,
928
    gen_op_outw_T0_T1,
929
    gen_op_outl_T0_T1,
930
};
931

    
932
static GenOpFunc *gen_check_io_T0[3] = {
933
    gen_op_check_iob_T0,
934
    gen_op_check_iow_T0,
935
    gen_op_check_iol_T0,
936
};
937

    
938
static GenOpFunc *gen_check_io_DX[3] = {
939
    gen_op_check_iob_DX,
940
    gen_op_check_iow_DX,
941
    gen_op_check_iol_DX,
942
};
943

    
944
static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
945
{
946
    if (s->pe && (s->cpl > s->iopl || s->vm86)) {
947
        if (s->cc_op != CC_OP_DYNAMIC)
948
            gen_op_set_cc_op(s->cc_op);
949
        gen_jmp_im(cur_eip);
950
        if (use_dx)
951
            gen_check_io_DX[ot]();
952
        else
953
            gen_check_io_T0[ot]();
954
    }
955
}
956

    
957
static inline void gen_movs(DisasContext *s, int ot)
958
{
959
    gen_string_movl_A0_ESI(s);
960
    gen_op_ld_T0_A0[ot + s->mem_index]();
961
    gen_string_movl_A0_EDI(s);
962
    gen_op_st_T0_A0[ot + s->mem_index]();
963
    gen_op_movl_T0_Dshift[ot]();
964
#ifdef TARGET_X86_64
965
    if (s->aflag == 2) {
966
        gen_op_addq_ESI_T0();
967
        gen_op_addq_EDI_T0();
968
    } else 
969
#endif
970
    if (s->aflag) {
971
        gen_op_addl_ESI_T0();
972
        gen_op_addl_EDI_T0();
973
    } else {
974
        gen_op_addw_ESI_T0();
975
        gen_op_addw_EDI_T0();
976
    }
977
}
978

    
979
static inline void gen_update_cc_op(DisasContext *s)
980
{
981
    if (s->cc_op != CC_OP_DYNAMIC) {
982
        gen_op_set_cc_op(s->cc_op);
983
        s->cc_op = CC_OP_DYNAMIC;
984
    }
985
}
986

    
987
/* XXX: does not work with gdbstub "ice" single step - not a
988
   serious problem */
989
static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
990
{
991
    int l1, l2;
992

    
993
    l1 = gen_new_label();
994
    l2 = gen_new_label();
995
    gen_op_jnz_ecx[s->aflag](l1);
996
    gen_set_label(l2);
997
    gen_jmp_tb(s, next_eip, 1);
998
    gen_set_label(l1);
999
    return l2;
1000
}
1001

    
1002
static inline void gen_stos(DisasContext *s, int ot)
1003
{
1004
    gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1005
    gen_string_movl_A0_EDI(s);
1006
    gen_op_st_T0_A0[ot + s->mem_index]();
1007
    gen_op_movl_T0_Dshift[ot]();
1008
#ifdef TARGET_X86_64
1009
    if (s->aflag == 2) {
1010
        gen_op_addq_EDI_T0();
1011
    } else 
1012
#endif
1013
    if (s->aflag) {
1014
        gen_op_addl_EDI_T0();
1015
    } else {
1016
        gen_op_addw_EDI_T0();
1017
    }
1018
}
1019

    
1020
static inline void gen_lods(DisasContext *s, int ot)
1021
{
1022
    gen_string_movl_A0_ESI(s);
1023
    gen_op_ld_T0_A0[ot + s->mem_index]();
1024
    gen_op_mov_reg_T0[ot][R_EAX]();
1025
    gen_op_movl_T0_Dshift[ot]();
1026
#ifdef TARGET_X86_64
1027
    if (s->aflag == 2) {
1028
        gen_op_addq_ESI_T0();
1029
    } else 
1030
#endif
1031
    if (s->aflag) {
1032
        gen_op_addl_ESI_T0();
1033
    } else {
1034
        gen_op_addw_ESI_T0();
1035
    }
1036
}
1037

    
1038
static inline void gen_scas(DisasContext *s, int ot)
1039
{
1040
    gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1041
    gen_string_movl_A0_EDI(s);
1042
    gen_op_ld_T1_A0[ot + s->mem_index]();
1043
    gen_op_cmpl_T0_T1_cc();
1044
    gen_op_movl_T0_Dshift[ot]();
1045
#ifdef TARGET_X86_64
1046
    if (s->aflag == 2) {
1047
        gen_op_addq_EDI_T0();
1048
    } else 
1049
#endif
1050
    if (s->aflag) {
1051
        gen_op_addl_EDI_T0();
1052
    } else {
1053
        gen_op_addw_EDI_T0();
1054
    }
1055
}
1056

    
1057
static inline void gen_cmps(DisasContext *s, int ot)
1058
{
1059
    gen_string_movl_A0_ESI(s);
1060
    gen_op_ld_T0_A0[ot + s->mem_index]();
1061
    gen_string_movl_A0_EDI(s);
1062
    gen_op_ld_T1_A0[ot + s->mem_index]();
1063
    gen_op_cmpl_T0_T1_cc();
1064
    gen_op_movl_T0_Dshift[ot]();
1065
#ifdef TARGET_X86_64
1066
    if (s->aflag == 2) {
1067
        gen_op_addq_ESI_T0();
1068
        gen_op_addq_EDI_T0();
1069
    } else 
1070
#endif
1071
    if (s->aflag) {
1072
        gen_op_addl_ESI_T0();
1073
        gen_op_addl_EDI_T0();
1074
    } else {
1075
        gen_op_addw_ESI_T0();
1076
        gen_op_addw_EDI_T0();
1077
    }
1078
}
1079

    
1080
static inline void gen_ins(DisasContext *s, int ot)
1081
{
1082
    gen_string_movl_A0_EDI(s);
1083
    gen_op_movl_T0_0();
1084
    gen_op_st_T0_A0[ot + s->mem_index]();
1085
    gen_op_in_DX_T0[ot]();
1086
    gen_op_st_T0_A0[ot + s->mem_index]();
1087
    gen_op_movl_T0_Dshift[ot]();
1088
#ifdef TARGET_X86_64
1089
    if (s->aflag == 2) {
1090
        gen_op_addq_EDI_T0();
1091
    } else 
1092
#endif
1093
    if (s->aflag) {
1094
        gen_op_addl_EDI_T0();
1095
    } else {
1096
        gen_op_addw_EDI_T0();
1097
    }
1098
}
1099

    
1100
static inline void gen_outs(DisasContext *s, int ot)
1101
{
1102
    gen_string_movl_A0_ESI(s);
1103
    gen_op_ld_T0_A0[ot + s->mem_index]();
1104
    gen_op_out_DX_T0[ot]();
1105
    gen_op_movl_T0_Dshift[ot]();
1106
#ifdef TARGET_X86_64
1107
    if (s->aflag == 2) {
1108
        gen_op_addq_ESI_T0();
1109
    } else 
1110
#endif
1111
    if (s->aflag) {
1112
        gen_op_addl_ESI_T0();
1113
    } else {
1114
        gen_op_addw_ESI_T0();
1115
    }
1116
}
1117

    
1118
/* same method as Valgrind : we generate jumps to current or next
1119
   instruction */
1120
#define GEN_REPZ(op)                                                          \
1121
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1122
                                 target_ulong cur_eip, target_ulong next_eip) \
1123
{                                                                             \
1124
    int l2;\
1125
    gen_update_cc_op(s);                                                      \
1126
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1127
    gen_ ## op(s, ot);                                                        \
1128
    gen_op_dec_ECX[s->aflag]();                                               \
1129
    /* a loop would cause two single step exceptions if ECX = 1               \
1130
       before rep string_insn */                                              \
1131
    if (!s->jmp_opt)                                                          \
1132
        gen_op_jz_ecx[s->aflag](l2);                                          \
1133
    gen_jmp(s, cur_eip);                                                      \
1134
}
1135

    
1136
#define GEN_REPZ2(op)                                                         \
1137
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1138
                                   target_ulong cur_eip,                      \
1139
                                   target_ulong next_eip,                     \
1140
                                   int nz)                                    \
1141
{                                                                             \
1142
    int l2;\
1143
    gen_update_cc_op(s);                                                      \
1144
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1145
    gen_ ## op(s, ot);                                                        \
1146
    gen_op_dec_ECX[s->aflag]();                                               \
1147
    gen_op_set_cc_op(CC_OP_SUBB + ot);                                        \
1148
    gen_op_string_jnz_sub[nz][ot](l2);\
1149
    if (!s->jmp_opt)                                                          \
1150
        gen_op_jz_ecx[s->aflag](l2);                                          \
1151
    gen_jmp(s, cur_eip);                                                      \
1152
}
1153

    
1154
GEN_REPZ(movs)
1155
GEN_REPZ(stos)
1156
GEN_REPZ(lods)
1157
GEN_REPZ(ins)
1158
GEN_REPZ(outs)
1159
GEN_REPZ2(scas)
1160
GEN_REPZ2(cmps)
1161

    
1162
enum {
1163
    JCC_O,
1164
    JCC_B,
1165
    JCC_Z,
1166
    JCC_BE,
1167
    JCC_S,
1168
    JCC_P,
1169
    JCC_L,
1170
    JCC_LE,
1171
};
1172

    
1173
static GenOpFunc1 *gen_jcc_sub[4][8] = {
1174
    [OT_BYTE] = {
1175
        NULL,
1176
        gen_op_jb_subb,
1177
        gen_op_jz_subb,
1178
        gen_op_jbe_subb,
1179
        gen_op_js_subb,
1180
        NULL,
1181
        gen_op_jl_subb,
1182
        gen_op_jle_subb,
1183
    },
1184
    [OT_WORD] = {
1185
        NULL,
1186
        gen_op_jb_subw,
1187
        gen_op_jz_subw,
1188
        gen_op_jbe_subw,
1189
        gen_op_js_subw,
1190
        NULL,
1191
        gen_op_jl_subw,
1192
        gen_op_jle_subw,
1193
    },
1194
    [OT_LONG] = {
1195
        NULL,
1196
        gen_op_jb_subl,
1197
        gen_op_jz_subl,
1198
        gen_op_jbe_subl,
1199
        gen_op_js_subl,
1200
        NULL,
1201
        gen_op_jl_subl,
1202
        gen_op_jle_subl,
1203
    },
1204
#ifdef TARGET_X86_64
1205
    [OT_QUAD] = {
1206
        NULL,
1207
        BUGGY_64(gen_op_jb_subq),
1208
        gen_op_jz_subq,
1209
        BUGGY_64(gen_op_jbe_subq),
1210
        gen_op_js_subq,
1211
        NULL,
1212
        BUGGY_64(gen_op_jl_subq),
1213
        BUGGY_64(gen_op_jle_subq),
1214
    },
1215
#endif
1216
};
1217
static GenOpFunc1 *gen_op_loop[3][4] = {
1218
    [0] = {
1219
        gen_op_loopnzw,
1220
        gen_op_loopzw,
1221
        gen_op_jnz_ecxw,
1222
    },
1223
    [1] = {
1224
        gen_op_loopnzl,
1225
        gen_op_loopzl,
1226
        gen_op_jnz_ecxl,
1227
    },
1228
#ifdef TARGET_X86_64
1229
    [2] = {
1230
        gen_op_loopnzq,
1231
        gen_op_loopzq,
1232
        gen_op_jnz_ecxq,
1233
    },
1234
#endif
1235
};
1236

    
1237
static GenOpFunc *gen_setcc_slow[8] = {
1238
    gen_op_seto_T0_cc,
1239
    gen_op_setb_T0_cc,
1240
    gen_op_setz_T0_cc,
1241
    gen_op_setbe_T0_cc,
1242
    gen_op_sets_T0_cc,
1243
    gen_op_setp_T0_cc,
1244
    gen_op_setl_T0_cc,
1245
    gen_op_setle_T0_cc,
1246
};
1247

    
1248
static GenOpFunc *gen_setcc_sub[4][8] = {
1249
    [OT_BYTE] = {
1250
        NULL,
1251
        gen_op_setb_T0_subb,
1252
        gen_op_setz_T0_subb,
1253
        gen_op_setbe_T0_subb,
1254
        gen_op_sets_T0_subb,
1255
        NULL,
1256
        gen_op_setl_T0_subb,
1257
        gen_op_setle_T0_subb,
1258
    },
1259
    [OT_WORD] = {
1260
        NULL,
1261
        gen_op_setb_T0_subw,
1262
        gen_op_setz_T0_subw,
1263
        gen_op_setbe_T0_subw,
1264
        gen_op_sets_T0_subw,
1265
        NULL,
1266
        gen_op_setl_T0_subw,
1267
        gen_op_setle_T0_subw,
1268
    },
1269
    [OT_LONG] = {
1270
        NULL,
1271
        gen_op_setb_T0_subl,
1272
        gen_op_setz_T0_subl,
1273
        gen_op_setbe_T0_subl,
1274
        gen_op_sets_T0_subl,
1275
        NULL,
1276
        gen_op_setl_T0_subl,
1277
        gen_op_setle_T0_subl,
1278
    },
1279
#ifdef TARGET_X86_64
1280
    [OT_QUAD] = {
1281
        NULL,
1282
        gen_op_setb_T0_subq,
1283
        gen_op_setz_T0_subq,
1284
        gen_op_setbe_T0_subq,
1285
        gen_op_sets_T0_subq,
1286
        NULL,
1287
        gen_op_setl_T0_subq,
1288
        gen_op_setle_T0_subq,
1289
    },
1290
#endif
1291
};
1292

    
1293
static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1294
    gen_op_fadd_ST0_FT0,
1295
    gen_op_fmul_ST0_FT0,
1296
    gen_op_fcom_ST0_FT0,
1297
    gen_op_fcom_ST0_FT0,
1298
    gen_op_fsub_ST0_FT0,
1299
    gen_op_fsubr_ST0_FT0,
1300
    gen_op_fdiv_ST0_FT0,
1301
    gen_op_fdivr_ST0_FT0,
1302
};
1303

    
1304
/* NOTE the exception in "r" op ordering */
1305
static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1306
    gen_op_fadd_STN_ST0,
1307
    gen_op_fmul_STN_ST0,
1308
    NULL,
1309
    NULL,
1310
    gen_op_fsubr_STN_ST0,
1311
    gen_op_fsub_STN_ST0,
1312
    gen_op_fdivr_STN_ST0,
1313
    gen_op_fdiv_STN_ST0,
1314
};
1315

    
1316
/* if d == OR_TMP0, it means memory operand (address in A0) */
1317
static void gen_op(DisasContext *s1, int op, int ot, int d)
1318
{
1319
    GenOpFunc *gen_update_cc;
1320
    
1321
    if (d != OR_TMP0) {
1322
        gen_op_mov_TN_reg[ot][0][d]();
1323
    } else {
1324
        gen_op_ld_T0_A0[ot + s1->mem_index]();
1325
    }
1326
    switch(op) {
1327
    case OP_ADCL:
1328
    case OP_SBBL:
1329
        if (s1->cc_op != CC_OP_DYNAMIC)
1330
            gen_op_set_cc_op(s1->cc_op);
1331
        if (d != OR_TMP0) {
1332
            gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1333
            gen_op_mov_reg_T0[ot][d]();
1334
        } else {
1335
            gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1336
        }
1337
        s1->cc_op = CC_OP_DYNAMIC;
1338
        goto the_end;
1339
    case OP_ADDL:
1340
        gen_op_addl_T0_T1();
1341
        s1->cc_op = CC_OP_ADDB + ot;
1342
        gen_update_cc = gen_op_update2_cc;
1343
        break;
1344
    case OP_SUBL:
1345
        gen_op_subl_T0_T1();
1346
        s1->cc_op = CC_OP_SUBB + ot;
1347
        gen_update_cc = gen_op_update2_cc;
1348
        break;
1349
    default:
1350
    case OP_ANDL:
1351
    case OP_ORL:
1352
    case OP_XORL:
1353
        gen_op_arith_T0_T1_cc[op]();
1354
        s1->cc_op = CC_OP_LOGICB + ot;
1355
        gen_update_cc = gen_op_update1_cc;
1356
        break;
1357
    case OP_CMPL:
1358
        gen_op_cmpl_T0_T1_cc();
1359
        s1->cc_op = CC_OP_SUBB + ot;
1360
        gen_update_cc = NULL;
1361
        break;
1362
    }
1363
    if (op != OP_CMPL) {
1364
        if (d != OR_TMP0)
1365
            gen_op_mov_reg_T0[ot][d]();
1366
        else
1367
            gen_op_st_T0_A0[ot + s1->mem_index]();
1368
    }
1369
    /* the flags update must happen after the memory write (precise
1370
       exception support) */
1371
    if (gen_update_cc)
1372
        gen_update_cc();
1373
 the_end: ;
1374
}
1375

    
1376
/* if d == OR_TMP0, it means memory operand (address in A0) */
1377
static void gen_inc(DisasContext *s1, int ot, int d, int c)
1378
{
1379
    if (d != OR_TMP0)
1380
        gen_op_mov_TN_reg[ot][0][d]();
1381
    else
1382
        gen_op_ld_T0_A0[ot + s1->mem_index]();
1383
    if (s1->cc_op != CC_OP_DYNAMIC)
1384
        gen_op_set_cc_op(s1->cc_op);
1385
    if (c > 0) {
1386
        gen_op_incl_T0();
1387
        s1->cc_op = CC_OP_INCB + ot;
1388
    } else {
1389
        gen_op_decl_T0();
1390
        s1->cc_op = CC_OP_DECB + ot;
1391
    }
1392
    if (d != OR_TMP0)
1393
        gen_op_mov_reg_T0[ot][d]();
1394
    else
1395
        gen_op_st_T0_A0[ot + s1->mem_index]();
1396
    gen_op_update_inc_cc();
1397
}
1398

    
1399
static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1400
{
1401
    if (d != OR_TMP0)
1402
        gen_op_mov_TN_reg[ot][0][d]();
1403
    else
1404
        gen_op_ld_T0_A0[ot + s1->mem_index]();
1405
    if (s != OR_TMP1)
1406
        gen_op_mov_TN_reg[ot][1][s]();
1407
    /* for zero counts, flags are not updated, so must do it dynamically */
1408
    if (s1->cc_op != CC_OP_DYNAMIC)
1409
        gen_op_set_cc_op(s1->cc_op);
1410
    
1411
    if (d != OR_TMP0)
1412
        gen_op_shift_T0_T1_cc[ot][op]();
1413
    else
1414
        gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1415
    if (d != OR_TMP0)
1416
        gen_op_mov_reg_T0[ot][d]();
1417
    s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1418
}
1419

    
1420
static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1421
{
1422
    /* currently not optimized */
1423
    gen_op_movl_T1_im(c);
1424
    gen_shift(s1, op, ot, d, OR_TMP1);
1425
}
1426

    
1427
static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1428
{
1429
    target_long disp;
1430
    int havesib;
1431
    int base;
1432
    int index;
1433
    int scale;
1434
    int opreg;
1435
    int mod, rm, code, override, must_add_seg;
1436

    
1437
    override = s->override;
1438
    must_add_seg = s->addseg;
1439
    if (override >= 0)
1440
        must_add_seg = 1;
1441
    mod = (modrm >> 6) & 3;
1442
    rm = modrm & 7;
1443

    
1444
    if (s->aflag) {
1445

    
1446
        havesib = 0;
1447
        base = rm;
1448
        index = 0;
1449
        scale = 0;
1450
        
1451
        if (base == 4) {
1452
            havesib = 1;
1453
            code = ldub_code(s->pc++);
1454
            scale = (code >> 6) & 3;
1455
            index = ((code >> 3) & 7) | REX_X(s);
1456
            base = (code & 7);
1457
        }
1458
        base |= REX_B(s);
1459

    
1460
        switch (mod) {
1461
        case 0:
1462
            if ((base & 7) == 5) {
1463
                base = -1;
1464
                disp = (int32_t)ldl_code(s->pc);
1465
                s->pc += 4;
1466
                if (CODE64(s) && !havesib) {
1467
                    disp += s->pc + s->rip_offset;
1468
                }
1469
            } else {
1470
                disp = 0;
1471
            }
1472
            break;
1473
        case 1:
1474
            disp = (int8_t)ldub_code(s->pc++);
1475
            break;
1476
        default:
1477
        case 2:
1478
            disp = ldl_code(s->pc);
1479
            s->pc += 4;
1480
            break;
1481
        }
1482
        
1483
        if (base >= 0) {
1484
            /* for correct popl handling with esp */
1485
            if (base == 4 && s->popl_esp_hack)
1486
                disp += s->popl_esp_hack;
1487
#ifdef TARGET_X86_64
1488
            if (s->aflag == 2) {
1489
                gen_op_movq_A0_reg[base]();
1490
                if (disp != 0) {
1491
                    if ((int32_t)disp == disp)
1492
                        gen_op_addq_A0_im(disp);
1493
                    else
1494
                        gen_op_addq_A0_im64(disp >> 32, disp);
1495
                }
1496
            } else 
1497
#endif
1498
            {
1499
                gen_op_movl_A0_reg[base]();
1500
                if (disp != 0)
1501
                    gen_op_addl_A0_im(disp);
1502
            }
1503
        } else {
1504
#ifdef TARGET_X86_64
1505
            if (s->aflag == 2) {
1506
                if ((int32_t)disp == disp)
1507
                    gen_op_movq_A0_im(disp);
1508
                else
1509
                    gen_op_movq_A0_im64(disp >> 32, disp);
1510
            } else 
1511
#endif
1512
            {
1513
                gen_op_movl_A0_im(disp);
1514
            }
1515
        }
1516
        /* XXX: index == 4 is always invalid */
1517
        if (havesib && (index != 4 || scale != 0)) {
1518
#ifdef TARGET_X86_64
1519
            if (s->aflag == 2) {
1520
                gen_op_addq_A0_reg_sN[scale][index]();
1521
            } else 
1522
#endif
1523
            {
1524
                gen_op_addl_A0_reg_sN[scale][index]();
1525
            }
1526
        }
1527
        if (must_add_seg) {
1528
            if (override < 0) {
1529
                if (base == R_EBP || base == R_ESP)
1530
                    override = R_SS;
1531
                else
1532
                    override = R_DS;
1533
            }
1534
#ifdef TARGET_X86_64
1535
            if (s->aflag == 2) {
1536
                gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1537
            } else 
1538
#endif
1539
            {
1540
                gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1541
            }
1542
        }
1543
    } else {
1544
        switch (mod) {
1545
        case 0:
1546
            if (rm == 6) {
1547
                disp = lduw_code(s->pc);
1548
                s->pc += 2;
1549
                gen_op_movl_A0_im(disp);
1550
                rm = 0; /* avoid SS override */
1551
                goto no_rm;
1552
            } else {
1553
                disp = 0;
1554
            }
1555
            break;
1556
        case 1:
1557
            disp = (int8_t)ldub_code(s->pc++);
1558
            break;
1559
        default:
1560
        case 2:
1561
            disp = lduw_code(s->pc);
1562
            s->pc += 2;
1563
            break;
1564
        }
1565
        switch(rm) {
1566
        case 0:
1567
            gen_op_movl_A0_reg[R_EBX]();
1568
            gen_op_addl_A0_reg_sN[0][R_ESI]();
1569
            break;
1570
        case 1:
1571
            gen_op_movl_A0_reg[R_EBX]();
1572
            gen_op_addl_A0_reg_sN[0][R_EDI]();
1573
            break;
1574
        case 2:
1575
            gen_op_movl_A0_reg[R_EBP]();
1576
            gen_op_addl_A0_reg_sN[0][R_ESI]();
1577
            break;
1578
        case 3:
1579
            gen_op_movl_A0_reg[R_EBP]();
1580
            gen_op_addl_A0_reg_sN[0][R_EDI]();
1581
            break;
1582
        case 4:
1583
            gen_op_movl_A0_reg[R_ESI]();
1584
            break;
1585
        case 5:
1586
            gen_op_movl_A0_reg[R_EDI]();
1587
            break;
1588
        case 6:
1589
            gen_op_movl_A0_reg[R_EBP]();
1590
            break;
1591
        default:
1592
        case 7:
1593
            gen_op_movl_A0_reg[R_EBX]();
1594
            break;
1595
        }
1596
        if (disp != 0)
1597
            gen_op_addl_A0_im(disp);
1598
        gen_op_andl_A0_ffff();
1599
    no_rm:
1600
        if (must_add_seg) {
1601
            if (override < 0) {
1602
                if (rm == 2 || rm == 3 || rm == 6)
1603
                    override = R_SS;
1604
                else
1605
                    override = R_DS;
1606
            }
1607
            gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1608
        }
1609
    }
1610

    
1611
    opreg = OR_A0;
1612
    disp = 0;
1613
    *reg_ptr = opreg;
1614
    *offset_ptr = disp;
1615
}
1616

    
1617
/* used for LEA and MOV AX, mem */
1618
static void gen_add_A0_ds_seg(DisasContext *s)
1619
{
1620
    int override, must_add_seg;
1621
    must_add_seg = s->addseg;
1622
    override = R_DS;
1623
    if (s->override >= 0) {
1624
        override = s->override;
1625
        must_add_seg = 1;
1626
    } else {
1627
        override = R_DS;
1628
    }
1629
    if (must_add_seg) {
1630
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1631
    }
1632
}
1633

    
1634
/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1635
   OR_TMP0 */
1636
static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1637
{
1638
    int mod, rm, opreg, disp;
1639

    
1640
    mod = (modrm >> 6) & 3;
1641
    rm = (modrm & 7) | REX_B(s);
1642
    if (mod == 3) {
1643
        if (is_store) {
1644
            if (reg != OR_TMP0)
1645
                gen_op_mov_TN_reg[ot][0][reg]();
1646
            gen_op_mov_reg_T0[ot][rm]();
1647
        } else {
1648
            gen_op_mov_TN_reg[ot][0][rm]();
1649
            if (reg != OR_TMP0)
1650
                gen_op_mov_reg_T0[ot][reg]();
1651
        }
1652
    } else {
1653
        gen_lea_modrm(s, modrm, &opreg, &disp);
1654
        if (is_store) {
1655
            if (reg != OR_TMP0)
1656
                gen_op_mov_TN_reg[ot][0][reg]();
1657
            gen_op_st_T0_A0[ot + s->mem_index]();
1658
        } else {
1659
            gen_op_ld_T0_A0[ot + s->mem_index]();
1660
            if (reg != OR_TMP0)
1661
                gen_op_mov_reg_T0[ot][reg]();
1662
        }
1663
    }
1664
}
1665

    
1666
static inline uint32_t insn_get(DisasContext *s, int ot)
1667
{
1668
    uint32_t ret;
1669

    
1670
    switch(ot) {
1671
    case OT_BYTE:
1672
        ret = ldub_code(s->pc);
1673
        s->pc++;
1674
        break;
1675
    case OT_WORD:
1676
        ret = lduw_code(s->pc);
1677
        s->pc += 2;
1678
        break;
1679
    default:
1680
    case OT_LONG:
1681
        ret = ldl_code(s->pc);
1682
        s->pc += 4;
1683
        break;
1684
    }
1685
    return ret;
1686
}
1687

    
1688
static inline int insn_const_size(unsigned int ot)
1689
{
1690
    if (ot <= OT_LONG)
1691
        return 1 << ot;
1692
    else
1693
        return 4;
1694
}
1695

    
1696
static inline void gen_jcc(DisasContext *s, int b, 
1697
                           target_ulong val, target_ulong next_eip)
1698
{
1699
    TranslationBlock *tb;
1700
    int inv, jcc_op;
1701
    GenOpFunc1 *func;
1702
    target_ulong tmp;
1703
    int l1, l2;
1704

    
1705
    inv = b & 1;
1706
    jcc_op = (b >> 1) & 7;
1707
    
1708
    if (s->jmp_opt) {
1709
        switch(s->cc_op) {
1710
            /* we optimize the cmp/jcc case */
1711
        case CC_OP_SUBB:
1712
        case CC_OP_SUBW:
1713
        case CC_OP_SUBL:
1714
        case CC_OP_SUBQ:
1715
            func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1716
            break;
1717
            
1718
            /* some jumps are easy to compute */
1719
        case CC_OP_ADDB:
1720
        case CC_OP_ADDW:
1721
        case CC_OP_ADDL:
1722
        case CC_OP_ADDQ:
1723

    
1724
        case CC_OP_ADCB:
1725
        case CC_OP_ADCW:
1726
        case CC_OP_ADCL:
1727
        case CC_OP_ADCQ:
1728

    
1729
        case CC_OP_SBBB:
1730
        case CC_OP_SBBW:
1731
        case CC_OP_SBBL:
1732
        case CC_OP_SBBQ:
1733

    
1734
        case CC_OP_LOGICB:
1735
        case CC_OP_LOGICW:
1736
        case CC_OP_LOGICL:
1737
        case CC_OP_LOGICQ:
1738

    
1739
        case CC_OP_INCB:
1740
        case CC_OP_INCW:
1741
        case CC_OP_INCL:
1742
        case CC_OP_INCQ:
1743

    
1744
        case CC_OP_DECB:
1745
        case CC_OP_DECW:
1746
        case CC_OP_DECL:
1747
        case CC_OP_DECQ:
1748

    
1749
        case CC_OP_SHLB:
1750
        case CC_OP_SHLW:
1751
        case CC_OP_SHLL:
1752
        case CC_OP_SHLQ:
1753

    
1754
        case CC_OP_SARB:
1755
        case CC_OP_SARW:
1756
        case CC_OP_SARL:
1757
        case CC_OP_SARQ:
1758
            switch(jcc_op) {
1759
            case JCC_Z:
1760
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1761
                break;
1762
            case JCC_S:
1763
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1764
                break;
1765
            default:
1766
                func = NULL;
1767
                break;
1768
            }
1769
            break;
1770
        default:
1771
            func = NULL;
1772
            break;
1773
        }
1774

    
1775
        if (s->cc_op != CC_OP_DYNAMIC)
1776
            gen_op_set_cc_op(s->cc_op);
1777

    
1778
        if (!func) {
1779
            gen_setcc_slow[jcc_op]();
1780
            func = gen_op_jnz_T0_label;
1781
        }
1782
    
1783
        if (inv) {
1784
            tmp = val;
1785
            val = next_eip;
1786
            next_eip = tmp;
1787
        }
1788
        tb = s->tb;
1789

    
1790
        l1 = gen_new_label();
1791
        func(l1);
1792

    
1793
        gen_op_goto_tb0(TBPARAM(tb));
1794
        gen_jmp_im(next_eip);
1795
        gen_op_movl_T0_im((long)tb + 0);
1796
        gen_op_exit_tb();
1797

    
1798
        gen_set_label(l1);
1799
        gen_op_goto_tb1(TBPARAM(tb));
1800
        gen_jmp_im(val);
1801
        gen_op_movl_T0_im((long)tb + 1);
1802
        gen_op_exit_tb();
1803

    
1804
        s->is_jmp = 3;
1805
    } else {
1806

    
1807
        if (s->cc_op != CC_OP_DYNAMIC) {
1808
            gen_op_set_cc_op(s->cc_op);
1809
            s->cc_op = CC_OP_DYNAMIC;
1810
        }
1811
        gen_setcc_slow[jcc_op]();
1812
        if (inv) {
1813
            tmp = val;
1814
            val = next_eip;
1815
            next_eip = tmp;
1816
        }
1817
        l1 = gen_new_label();
1818
        l2 = gen_new_label();
1819
        gen_op_jnz_T0_label(l1);
1820
        gen_jmp_im(next_eip);
1821
        gen_op_jmp_label(l2);
1822
        gen_set_label(l1);
1823
        gen_jmp_im(val);
1824
        gen_set_label(l2);
1825
        gen_eob(s);
1826
    }
1827
}
1828

    
1829
static void gen_setcc(DisasContext *s, int b)
1830
{
1831
    int inv, jcc_op;
1832
    GenOpFunc *func;
1833

    
1834
    inv = b & 1;
1835
    jcc_op = (b >> 1) & 7;
1836
    switch(s->cc_op) {
1837
        /* we optimize the cmp/jcc case */
1838
    case CC_OP_SUBB:
1839
    case CC_OP_SUBW:
1840
    case CC_OP_SUBL:
1841
    case CC_OP_SUBQ:
1842
        func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1843
        if (!func)
1844
            goto slow_jcc;
1845
        break;
1846
        
1847
        /* some jumps are easy to compute */
1848
    case CC_OP_ADDB:
1849
    case CC_OP_ADDW:
1850
    case CC_OP_ADDL:
1851
    case CC_OP_ADDQ:
1852

    
1853
    case CC_OP_LOGICB:
1854
    case CC_OP_LOGICW:
1855
    case CC_OP_LOGICL:
1856
    case CC_OP_LOGICQ:
1857

    
1858
    case CC_OP_INCB:
1859
    case CC_OP_INCW:
1860
    case CC_OP_INCL:
1861
    case CC_OP_INCQ:
1862

    
1863
    case CC_OP_DECB:
1864
    case CC_OP_DECW:
1865
    case CC_OP_DECL:
1866
    case CC_OP_DECQ:
1867

    
1868
    case CC_OP_SHLB:
1869
    case CC_OP_SHLW:
1870
    case CC_OP_SHLL:
1871
    case CC_OP_SHLQ:
1872
        switch(jcc_op) {
1873
        case JCC_Z:
1874
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1875
            break;
1876
        case JCC_S:
1877
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1878
            break;
1879
        default:
1880
            goto slow_jcc;
1881
        }
1882
        break;
1883
    default:
1884
    slow_jcc:
1885
        if (s->cc_op != CC_OP_DYNAMIC)
1886
            gen_op_set_cc_op(s->cc_op);
1887
        func = gen_setcc_slow[jcc_op];
1888
        break;
1889
    }
1890
    func();
1891
    if (inv) {
1892
        gen_op_xor_T0_1();
1893
    }
1894
}
1895

    
1896
/* move T0 to seg_reg and compute if the CPU state may change. Never
1897
   call this function with seg_reg == R_CS */
1898
static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1899
{
1900
    if (s->pe && !s->vm86) {
1901
        /* XXX: optimize by finding processor state dynamically */
1902
        if (s->cc_op != CC_OP_DYNAMIC)
1903
            gen_op_set_cc_op(s->cc_op);
1904
        gen_jmp_im(cur_eip);
1905
        gen_op_movl_seg_T0(seg_reg);
1906
        /* abort translation because the addseg value may change or
1907
           because ss32 may change. For R_SS, translation must always
1908
           stop as a special handling must be done to disable hardware
1909
           interrupts for the next instruction */
1910
        if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1911
            s->is_jmp = 3;
1912
    } else {
1913
        gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1914
        if (seg_reg == R_SS)
1915
            s->is_jmp = 3;
1916
    }
1917
}
1918

    
1919
static inline void gen_stack_update(DisasContext *s, int addend)
1920
{
1921
#ifdef TARGET_X86_64
1922
    if (CODE64(s)) {
1923
        if (addend == 8)
1924
            gen_op_addq_ESP_8();
1925
        else 
1926
            gen_op_addq_ESP_im(addend);
1927
    } else
1928
#endif
1929
    if (s->ss32) {
1930
        if (addend == 2)
1931
            gen_op_addl_ESP_2();
1932
        else if (addend == 4)
1933
            gen_op_addl_ESP_4();
1934
        else 
1935
            gen_op_addl_ESP_im(addend);
1936
    } else {
1937
        if (addend == 2)
1938
            gen_op_addw_ESP_2();
1939
        else if (addend == 4)
1940
            gen_op_addw_ESP_4();
1941
        else
1942
            gen_op_addw_ESP_im(addend);
1943
    }
1944
}
1945

    
1946
/* generate a push. It depends on ss32, addseg and dflag */
1947
static void gen_push_T0(DisasContext *s)
1948
{
1949
#ifdef TARGET_X86_64
1950
    if (CODE64(s)) {
1951
        /* XXX: check 16 bit behaviour */
1952
        gen_op_movq_A0_reg[R_ESP]();
1953
        gen_op_subq_A0_8();
1954
        gen_op_st_T0_A0[OT_QUAD + s->mem_index]();
1955
        gen_op_movq_ESP_A0();
1956
    } else 
1957
#endif
1958
    {
1959
        gen_op_movl_A0_reg[R_ESP]();
1960
        if (!s->dflag)
1961
            gen_op_subl_A0_2();
1962
        else
1963
            gen_op_subl_A0_4();
1964
        if (s->ss32) {
1965
            if (s->addseg) {
1966
                gen_op_movl_T1_A0();
1967
                gen_op_addl_A0_SS();
1968
            }
1969
        } else {
1970
            gen_op_andl_A0_ffff();
1971
            gen_op_movl_T1_A0();
1972
            gen_op_addl_A0_SS();
1973
        }
1974
        gen_op_st_T0_A0[s->dflag + 1 + s->mem_index]();
1975
        if (s->ss32 && !s->addseg)
1976
            gen_op_movl_ESP_A0();
1977
        else
1978
            gen_op_mov_reg_T1[s->ss32 + 1][R_ESP]();
1979
    }
1980
}
1981

    
1982
/* generate a push. It depends on ss32, addseg and dflag */
1983
/* slower version for T1, only used for call Ev */
1984
static void gen_push_T1(DisasContext *s)
1985
{
1986
#ifdef TARGET_X86_64
1987
    if (CODE64(s)) {
1988
        /* XXX: check 16 bit behaviour */
1989
        gen_op_movq_A0_reg[R_ESP]();
1990
        gen_op_subq_A0_8();
1991
        gen_op_st_T1_A0[OT_QUAD + s->mem_index]();
1992
        gen_op_movq_ESP_A0();
1993
    } else 
1994
#endif
1995
    {
1996
        gen_op_movl_A0_reg[R_ESP]();
1997
        if (!s->dflag)
1998
            gen_op_subl_A0_2();
1999
        else
2000
            gen_op_subl_A0_4();
2001
        if (s->ss32) {
2002
            if (s->addseg) {
2003
                gen_op_addl_A0_SS();
2004
            }
2005
        } else {
2006
            gen_op_andl_A0_ffff();
2007
            gen_op_addl_A0_SS();
2008
        }
2009
        gen_op_st_T1_A0[s->dflag + 1 + s->mem_index]();
2010
        
2011
        if (s->ss32 && !s->addseg)
2012
            gen_op_movl_ESP_A0();
2013
        else
2014
            gen_stack_update(s, (-2) << s->dflag);
2015
    }
2016
}
2017

    
2018
/* two step pop is necessary for precise exceptions */
2019
static void gen_pop_T0(DisasContext *s)
2020
{
2021
#ifdef TARGET_X86_64
2022
    if (CODE64(s)) {
2023
        /* XXX: check 16 bit behaviour */
2024
        gen_op_movq_A0_reg[R_ESP]();
2025
        gen_op_ld_T0_A0[OT_QUAD + s->mem_index]();
2026
    } else 
2027
#endif
2028
    {
2029
        gen_op_movl_A0_reg[R_ESP]();
2030
        if (s->ss32) {
2031
            if (s->addseg)
2032
                gen_op_addl_A0_SS();
2033
        } else {
2034
            gen_op_andl_A0_ffff();
2035
            gen_op_addl_A0_SS();
2036
        }
2037
        gen_op_ld_T0_A0[s->dflag + 1 + s->mem_index]();
2038
    }
2039
}
2040

    
2041
static void gen_pop_update(DisasContext *s)
2042
{
2043
#ifdef TARGET_X86_64
2044
    if (CODE64(s)) {
2045
        gen_stack_update(s, 8);
2046
    } else
2047
#endif
2048
    {
2049
        gen_stack_update(s, 2 << s->dflag);
2050
    }
2051
}
2052

    
2053
static void gen_stack_A0(DisasContext *s)
2054
{
2055
    gen_op_movl_A0_ESP();
2056
    if (!s->ss32)
2057
        gen_op_andl_A0_ffff();
2058
    gen_op_movl_T1_A0();
2059
    if (s->addseg)
2060
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2061
}
2062

    
2063
/* NOTE: wrap around in 16 bit not fully handled */
2064
static void gen_pusha(DisasContext *s)
2065
{
2066
    int i;
2067
    gen_op_movl_A0_ESP();
2068
    gen_op_addl_A0_im(-16 <<  s->dflag);
2069
    if (!s->ss32)
2070
        gen_op_andl_A0_ffff();
2071
    gen_op_movl_T1_A0();
2072
    if (s->addseg)
2073
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2074
    for(i = 0;i < 8; i++) {
2075
        gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
2076
        gen_op_st_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2077
        gen_op_addl_A0_im(2 <<  s->dflag);
2078
    }
2079
    gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2080
}
2081

    
2082
/* NOTE: wrap around in 16 bit not fully handled */
2083
static void gen_popa(DisasContext *s)
2084
{
2085
    int i;
2086
    gen_op_movl_A0_ESP();
2087
    if (!s->ss32)
2088
        gen_op_andl_A0_ffff();
2089
    gen_op_movl_T1_A0();
2090
    gen_op_addl_T1_im(16 <<  s->dflag);
2091
    if (s->addseg)
2092
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2093
    for(i = 0;i < 8; i++) {
2094
        /* ESP is not reloaded */
2095
        if (i != 3) {
2096
            gen_op_ld_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2097
            gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
2098
        }
2099
        gen_op_addl_A0_im(2 <<  s->dflag);
2100
    }
2101
    gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2102
}
2103

    
2104
static void gen_enter(DisasContext *s, int esp_addend, int level)
2105
{
2106
    int ot, opsize;
2107

    
2108
    ot = s->dflag + OT_WORD;
2109
    level &= 0x1f;
2110
    opsize = 2 << s->dflag;
2111

    
2112
    gen_op_movl_A0_ESP();
2113
    gen_op_addl_A0_im(-opsize);
2114
    if (!s->ss32)
2115
        gen_op_andl_A0_ffff();
2116
    gen_op_movl_T1_A0();
2117
    if (s->addseg)
2118
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2119
    /* push bp */
2120
    gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2121
    gen_op_st_T0_A0[ot + s->mem_index]();
2122
    if (level) {
2123
        gen_op_enter_level(level, s->dflag);
2124
    }
2125
    gen_op_mov_reg_T1[ot][R_EBP]();
2126
    gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2127
    gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2128
}
2129

    
2130
static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2131
{
2132
    if (s->cc_op != CC_OP_DYNAMIC)
2133
        gen_op_set_cc_op(s->cc_op);
2134
    gen_jmp_im(cur_eip);
2135
    gen_op_raise_exception(trapno);
2136
    s->is_jmp = 3;
2137
}
2138

    
2139
/* an interrupt is different from an exception because of the
2140
   priviledge checks */
2141
static void gen_interrupt(DisasContext *s, int intno, 
2142
                          target_ulong cur_eip, target_ulong next_eip)
2143
{
2144
    if (s->cc_op != CC_OP_DYNAMIC)
2145
        gen_op_set_cc_op(s->cc_op);
2146
    gen_jmp_im(cur_eip);
2147
    gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2148
    s->is_jmp = 3;
2149
}
2150

    
2151
static void gen_debug(DisasContext *s, target_ulong cur_eip)
2152
{
2153
    if (s->cc_op != CC_OP_DYNAMIC)
2154
        gen_op_set_cc_op(s->cc_op);
2155
    gen_jmp_im(cur_eip);
2156
    gen_op_debug();
2157
    s->is_jmp = 3;
2158
}
2159

    
2160
/* generate a generic end of block. Trace exception is also generated
2161
   if needed */
2162
static void gen_eob(DisasContext *s)
2163
{
2164
    if (s->cc_op != CC_OP_DYNAMIC)
2165
        gen_op_set_cc_op(s->cc_op);
2166
    if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2167
        gen_op_reset_inhibit_irq();
2168
    }
2169
    if (s->singlestep_enabled) {
2170
        gen_op_debug();
2171
    } else if (s->tf) {
2172
        gen_op_raise_exception(EXCP01_SSTP);
2173
    } else {
2174
        gen_op_movl_T0_0();
2175
        gen_op_exit_tb();
2176
    }
2177
    s->is_jmp = 3;
2178
}
2179

    
2180
/* generate a jump to eip. No segment change must happen before as a
2181
   direct call to the next block may occur */
2182
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2183
{
2184
    TranslationBlock *tb = s->tb;
2185

    
2186
    if (s->jmp_opt) {
2187
        if (s->cc_op != CC_OP_DYNAMIC)
2188
            gen_op_set_cc_op(s->cc_op);
2189
        if (tb_num)
2190
            gen_op_goto_tb1(TBPARAM(tb));
2191
        else
2192
            gen_op_goto_tb0(TBPARAM(tb));
2193
        gen_jmp_im(eip);
2194
        gen_op_movl_T0_im((long)tb + tb_num);
2195
        gen_op_exit_tb();
2196
        s->is_jmp = 3;
2197
    } else {
2198
        gen_jmp_im(eip);
2199
        gen_eob(s);
2200
    }
2201
}
2202

    
2203
static void gen_jmp(DisasContext *s, target_ulong eip)
2204
{
2205
    gen_jmp_tb(s, eip, 0);
2206
}
2207

    
2208
static void gen_movtl_T0_im(target_ulong val)
2209
{
2210
#ifdef TARGET_X86_64    
2211
    if ((int32_t)val == val) {
2212
        gen_op_movl_T0_im(val);
2213
    } else {
2214
        gen_op_movq_T0_im64(val >> 32, val);
2215
    }
2216
#else
2217
    gen_op_movl_T0_im(val);
2218
#endif
2219
}
2220

    
2221
static void gen_movtl_T1_im(target_ulong val)
2222
{
2223
#ifdef TARGET_X86_64    
2224
    if ((int32_t)val == val) {
2225
        gen_op_movl_T1_im(val);
2226
    } else {
2227
        gen_op_movq_T1_im64(val >> 32, val);
2228
    }
2229
#else
2230
    gen_op_movl_T1_im(val);
2231
#endif
2232
}
2233

    
2234
static void gen_add_A0_im(DisasContext *s, int val)
2235
{
2236
#ifdef TARGET_X86_64
2237
    if (CODE64(s))
2238
        gen_op_addq_A0_im(val);
2239
    else
2240
#endif
2241
        gen_op_addl_A0_im(val);
2242
}
2243

    
2244
static GenOpFunc1 *gen_ldq_env_A0[3] = {
2245
    gen_op_ldq_raw_env_A0,
2246
#ifndef CONFIG_USER_ONLY
2247
    gen_op_ldq_kernel_env_A0,
2248
    gen_op_ldq_user_env_A0,
2249
#endif
2250
};
2251

    
2252
static GenOpFunc1 *gen_stq_env_A0[3] = {
2253
    gen_op_stq_raw_env_A0,
2254
#ifndef CONFIG_USER_ONLY
2255
    gen_op_stq_kernel_env_A0,
2256
    gen_op_stq_user_env_A0,
2257
#endif
2258
};
2259

    
2260
static GenOpFunc1 *gen_ldo_env_A0[3] = {
2261
    gen_op_ldo_raw_env_A0,
2262
#ifndef CONFIG_USER_ONLY
2263
    gen_op_ldo_kernel_env_A0,
2264
    gen_op_ldo_user_env_A0,
2265
#endif
2266
};
2267

    
2268
static GenOpFunc1 *gen_sto_env_A0[3] = {
2269
    gen_op_sto_raw_env_A0,
2270
#ifndef CONFIG_USER_ONLY
2271
    gen_op_sto_kernel_env_A0,
2272
    gen_op_sto_user_env_A0,
2273
#endif
2274
};
2275

    
2276
#define SSE_SPECIAL ((GenOpFunc2 *)1)
2277

    
2278
#define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2279
#define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2280
                     gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2281

    
2282
static GenOpFunc2 *sse_op_table1[256][4] = {
2283
    /* pure SSE operations */
2284
    [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2285
    [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2286
    [0x12] = { SSE_SPECIAL, SSE_SPECIAL },  /* movlps, movlpd */
2287
    [0x13] = { SSE_SPECIAL, SSE_SPECIAL },  /* movlps, movlpd */
2288
    [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2289
    [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2290
    [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd, movshdup */
2291
    [0x17] = { SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd */
2292

    
2293
    [0x28] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2294
    [0x29] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2295
    [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2296
    [0x2b] = { SSE_SPECIAL, SSE_SPECIAL },  /* movntps, movntpd */
2297
    [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2298
    [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2299
    [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2300
    [0x2f] = { gen_op_comiss, gen_op_comisd },
2301
    [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2302
    [0x51] = SSE_FOP(sqrt),
2303
    [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2304
    [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2305
    [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2306
    [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2307
    [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2308
    [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2309
    [0x58] = SSE_FOP(add),
2310
    [0x59] = SSE_FOP(mul),
2311
    [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps, 
2312
               gen_op_cvtss2sd, gen_op_cvtsd2ss },
2313
    [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2314
    [0x5c] = SSE_FOP(sub),
2315
    [0x5d] = SSE_FOP(min),
2316
    [0x5e] = SSE_FOP(div),
2317
    [0x5f] = SSE_FOP(max),
2318

    
2319
    [0xc2] = SSE_FOP(cmpeq),
2320
    [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2321

    
2322
    /* MMX ops and their SSE extensions */
2323
    [0x60] = MMX_OP2(punpcklbw),
2324
    [0x61] = MMX_OP2(punpcklwd),
2325
    [0x62] = MMX_OP2(punpckldq),
2326
    [0x63] = MMX_OP2(packsswb),
2327
    [0x64] = MMX_OP2(pcmpgtb),
2328
    [0x65] = MMX_OP2(pcmpgtw),
2329
    [0x66] = MMX_OP2(pcmpgtl),
2330
    [0x67] = MMX_OP2(packuswb),
2331
    [0x68] = MMX_OP2(punpckhbw),
2332
    [0x69] = MMX_OP2(punpckhwd),
2333
    [0x6a] = MMX_OP2(punpckhdq),
2334
    [0x6b] = MMX_OP2(packssdw),
2335
    [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2336
    [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2337
    [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2338
    [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2339
    [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx, 
2340
               (GenOpFunc2 *)gen_op_pshufd_xmm, 
2341
               (GenOpFunc2 *)gen_op_pshufhw_xmm, 
2342
               (GenOpFunc2 *)gen_op_pshuflw_xmm },
2343
    [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2344
    [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2345
    [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2346
    [0x74] = MMX_OP2(pcmpeqb),
2347
    [0x75] = MMX_OP2(pcmpeqw),
2348
    [0x76] = MMX_OP2(pcmpeql),
2349
    [0x77] = { SSE_SPECIAL }, /* emms */
2350
    [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2351
    [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2352
    [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2353
    [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2354
    [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2355
    [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2356
    [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2357
    [0xd1] = MMX_OP2(psrlw),
2358
    [0xd2] = MMX_OP2(psrld),
2359
    [0xd3] = MMX_OP2(psrlq),
2360
    [0xd4] = MMX_OP2(paddq),
2361
    [0xd5] = MMX_OP2(pmullw),
2362
    [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2363
    [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2364
    [0xd8] = MMX_OP2(psubusb),
2365
    [0xd9] = MMX_OP2(psubusw),
2366
    [0xda] = MMX_OP2(pminub),
2367
    [0xdb] = MMX_OP2(pand),
2368
    [0xdc] = MMX_OP2(paddusb),
2369
    [0xdd] = MMX_OP2(paddusw),
2370
    [0xde] = MMX_OP2(pmaxub),
2371
    [0xdf] = MMX_OP2(pandn),
2372
    [0xe0] = MMX_OP2(pavgb),
2373
    [0xe1] = MMX_OP2(psraw),
2374
    [0xe2] = MMX_OP2(psrad),
2375
    [0xe3] = MMX_OP2(pavgw),
2376
    [0xe4] = MMX_OP2(pmulhuw),
2377
    [0xe5] = MMX_OP2(pmulhw),
2378
    [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2379
    [0xe7] = { SSE_SPECIAL , SSE_SPECIAL },  /* movntq, movntq */
2380
    [0xe8] = MMX_OP2(psubsb),
2381
    [0xe9] = MMX_OP2(psubsw),
2382
    [0xea] = MMX_OP2(pminsw),
2383
    [0xeb] = MMX_OP2(por),
2384
    [0xec] = MMX_OP2(paddsb),
2385
    [0xed] = MMX_OP2(paddsw),
2386
    [0xee] = MMX_OP2(pmaxsw),
2387
    [0xef] = MMX_OP2(pxor),
2388
    [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu (PNI) */
2389
    [0xf1] = MMX_OP2(psllw),
2390
    [0xf2] = MMX_OP2(pslld),
2391
    [0xf3] = MMX_OP2(psllq),
2392
    [0xf4] = MMX_OP2(pmuludq),
2393
    [0xf5] = MMX_OP2(pmaddwd),
2394
    [0xf6] = MMX_OP2(psadbw),
2395
    [0xf7] = MMX_OP2(maskmov),
2396
    [0xf8] = MMX_OP2(psubb),
2397
    [0xf9] = MMX_OP2(psubw),
2398
    [0xfa] = MMX_OP2(psubl),
2399
    [0xfb] = MMX_OP2(psubq),
2400
    [0xfc] = MMX_OP2(paddb),
2401
    [0xfd] = MMX_OP2(paddw),
2402
    [0xfe] = MMX_OP2(paddl),
2403
};
2404

    
2405
static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2406
    [0 + 2] = MMX_OP2(psrlw),
2407
    [0 + 4] = MMX_OP2(psraw),
2408
    [0 + 6] = MMX_OP2(psllw),
2409
    [8 + 2] = MMX_OP2(psrld),
2410
    [8 + 4] = MMX_OP2(psrad),
2411
    [8 + 6] = MMX_OP2(pslld),
2412
    [16 + 2] = MMX_OP2(psrlq),
2413
    [16 + 3] = { NULL, gen_op_psrldq_xmm },
2414
    [16 + 6] = MMX_OP2(psllq),
2415
    [16 + 7] = { NULL, gen_op_pslldq_xmm },
2416
};
2417

    
2418
static GenOpFunc1 *sse_op_table3[4 * 3] = {
2419
    gen_op_cvtsi2ss,
2420
    gen_op_cvtsi2sd,
2421
    X86_64_ONLY(gen_op_cvtsq2ss),
2422
    X86_64_ONLY(gen_op_cvtsq2sd),
2423
    
2424
    gen_op_cvttss2si,
2425
    gen_op_cvttsd2si,
2426
    X86_64_ONLY(gen_op_cvttss2sq),
2427
    X86_64_ONLY(gen_op_cvttsd2sq),
2428

    
2429
    gen_op_cvtss2si,
2430
    gen_op_cvtsd2si,
2431
    X86_64_ONLY(gen_op_cvtss2sq),
2432
    X86_64_ONLY(gen_op_cvtsd2sq),
2433
};
2434
    
2435
static GenOpFunc2 *sse_op_table4[8][4] = {
2436
    SSE_FOP(cmpeq),
2437
    SSE_FOP(cmplt),
2438
    SSE_FOP(cmple),
2439
    SSE_FOP(cmpunord),
2440
    SSE_FOP(cmpneq),
2441
    SSE_FOP(cmpnlt),
2442
    SSE_FOP(cmpnle),
2443
    SSE_FOP(cmpord),
2444
};
2445
    
2446
static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2447
{
2448
    int b1, op1_offset, op2_offset, is_xmm, val, ot;
2449
    int modrm, mod, rm, reg, reg_addr, offset_addr;
2450
    GenOpFunc2 *sse_op2;
2451
    GenOpFunc3 *sse_op3;
2452

    
2453
    b &= 0xff;
2454
    if (s->prefix & PREFIX_DATA) 
2455
        b1 = 1;
2456
    else if (s->prefix & PREFIX_REPZ) 
2457
        b1 = 2;
2458
    else if (s->prefix & PREFIX_REPNZ) 
2459
        b1 = 3;
2460
    else
2461
        b1 = 0;
2462
    sse_op2 = sse_op_table1[b][b1];
2463
    if (!sse_op2) 
2464
        goto illegal_op;
2465
    if (b <= 0x5f || b == 0xc6 || b == 0xc2) {
2466
        is_xmm = 1;
2467
    } else {
2468
        if (b1 == 0) {
2469
            /* MMX case */
2470
            is_xmm = 0;
2471
        } else {
2472
            is_xmm = 1;
2473
        }
2474
    }
2475
    /* simple MMX/SSE operation */
2476
    if (s->flags & HF_TS_MASK) {
2477
        gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2478
        return;
2479
    }
2480
    if (s->flags & HF_EM_MASK) {
2481
    illegal_op:
2482
        gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2483
        return;
2484
    }
2485
    if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2486
        goto illegal_op;
2487
    if (b == 0x77) {
2488
        /* emms */
2489
        gen_op_emms();
2490
        return;
2491
    }
2492
    /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2493
       the static cpu state) */
2494
    if (!is_xmm) {
2495
        gen_op_enter_mmx();
2496
    }
2497

    
2498
    modrm = ldub_code(s->pc++);
2499
    reg = ((modrm >> 3) & 7);
2500
    if (is_xmm)
2501
        reg |= rex_r;
2502
    mod = (modrm >> 6) & 3;
2503
    if (sse_op2 == SSE_SPECIAL) {
2504
        b |= (b1 << 8);
2505
        switch(b) {
2506
        case 0x0e7: /* movntq */
2507
            if (mod == 3) 
2508
                goto illegal_op;
2509
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2510
            gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2511
            break;
2512
        case 0x1e7: /* movntdq */
2513
        case 0x02b: /* movntps */
2514
        case 0x12b: /* movntps */
2515
        case 0x2f0: /* lddqu */
2516
            if (mod == 3) 
2517
                goto illegal_op;
2518
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2519
            gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2520
            break;
2521
        case 0x6e: /* movd mm, ea */
2522
            gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2523
            gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2524
            break;
2525
        case 0x16e: /* movd xmm, ea */
2526
            gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2527
            gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2528
            break;
2529
        case 0x6f: /* movq mm, ea */
2530
            if (mod != 3) {
2531
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2532
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2533
            } else {
2534
                rm = (modrm & 7);
2535
                gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2536
                            offsetof(CPUX86State,fpregs[rm].mmx));
2537
            }
2538
            break;
2539
        case 0x010: /* movups */
2540
        case 0x110: /* movupd */
2541
        case 0x028: /* movaps */
2542
        case 0x128: /* movapd */
2543
        case 0x16f: /* movdqa xmm, ea */
2544
        case 0x26f: /* movdqu xmm, ea */
2545
            if (mod != 3) {
2546
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2547
                gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2548
            } else {
2549
                rm = (modrm & 7) | REX_B(s);
2550
                gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2551
                            offsetof(CPUX86State,xmm_regs[rm]));
2552
            }
2553
            break;
2554
        case 0x210: /* movss xmm, ea */
2555
            if (mod != 3) {
2556
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2557
                gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2558
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2559
                gen_op_movl_T0_0();
2560
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2561
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2562
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2563
            } else {
2564
                rm = (modrm & 7) | REX_B(s);
2565
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2566
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2567
            }
2568
            break;
2569
        case 0x310: /* movsd xmm, ea */
2570
            if (mod != 3) {
2571
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2572
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2573
                gen_op_movl_T0_0();
2574
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2575
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2576
            } else {
2577
                rm = (modrm & 7) | REX_B(s);
2578
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2579
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2580
            }
2581
            break;
2582
        case 0x012: /* movlps */
2583
        case 0x112: /* movlpd */
2584
            if (mod != 3) {
2585
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2586
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2587
            } else {
2588
                /* movhlps */
2589
                rm = (modrm & 7) | REX_B(s);
2590
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2591
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2592
            }
2593
            break;
2594
        case 0x016: /* movhps */
2595
        case 0x116: /* movhpd */
2596
            if (mod != 3) {
2597
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2598
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2599
            } else {
2600
                /* movlhps */
2601
                rm = (modrm & 7) | REX_B(s);
2602
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2603
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2604
            }
2605
            break;
2606
        case 0x216: /* movshdup */
2607
            if (mod != 3) {
2608
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2609
                gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2610
            } else {
2611
                rm = (modrm & 7) | REX_B(s);
2612
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2613
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2614
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2615
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2616
            }
2617
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2618
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2619
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2620
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2621
            break;
2622
        case 0x7e: /* movd ea, mm */
2623
            gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2624
            gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2625
            break;
2626
        case 0x17e: /* movd ea, xmm */
2627
            gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2628
            gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2629
            break;
2630
        case 0x27e: /* movq xmm, ea */
2631
            if (mod != 3) {
2632
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2633
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2634
            } else {
2635
                rm = (modrm & 7) | REX_B(s);
2636
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2637
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2638
            }
2639
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2640
            break;
2641
        case 0x7f: /* movq ea, mm */
2642
            if (mod != 3) {
2643
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2644
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2645
            } else {
2646
                rm = (modrm & 7);
2647
                gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2648
                            offsetof(CPUX86State,fpregs[reg].mmx));
2649
            }
2650
            break;
2651
        case 0x011: /* movups */
2652
        case 0x111: /* movupd */
2653
        case 0x029: /* movaps */
2654
        case 0x129: /* movapd */
2655
        case 0x17f: /* movdqa ea, xmm */
2656
        case 0x27f: /* movdqu ea, xmm */
2657
            if (mod != 3) {
2658
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2659
                gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2660
            } else {
2661
                rm = (modrm & 7) | REX_B(s);
2662
                gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2663
                            offsetof(CPUX86State,xmm_regs[reg]));
2664
            }
2665
            break;
2666
        case 0x211: /* movss ea, xmm */
2667
            if (mod != 3) {
2668
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2669
                gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2670
                gen_op_st_T0_A0[OT_LONG + s->mem_index]();
2671
            } else {
2672
                rm = (modrm & 7) | REX_B(s);
2673
                gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2674
                            offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2675
            }
2676
            break;
2677
        case 0x311: /* movsd ea, xmm */
2678
            if (mod != 3) {
2679
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2680
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2681
            } else {
2682
                rm = (modrm & 7) | REX_B(s);
2683
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2684
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2685
            }
2686
            break;
2687
        case 0x013: /* movlps */
2688
        case 0x113: /* movlpd */
2689
            if (mod != 3) {
2690
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2691
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2692
            } else {
2693
                goto illegal_op;
2694
            }
2695
            break;
2696
        case 0x017: /* movhps */
2697
        case 0x117: /* movhpd */
2698
            if (mod != 3) {
2699
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2700
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2701
            } else {
2702
                goto illegal_op;
2703
            }
2704
            break;
2705
        case 0x71: /* shift mm, im */
2706
        case 0x72:
2707
        case 0x73:
2708
        case 0x171: /* shift xmm, im */
2709
        case 0x172:
2710
        case 0x173:
2711
            val = ldub_code(s->pc++);
2712
            if (is_xmm) {
2713
                gen_op_movl_T0_im(val);
2714
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2715
                gen_op_movl_T0_0();
2716
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2717
                op1_offset = offsetof(CPUX86State,xmm_t0);
2718
            } else {
2719
                gen_op_movl_T0_im(val);
2720
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2721
                gen_op_movl_T0_0();
2722
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2723
                op1_offset = offsetof(CPUX86State,mmx_t0);
2724
            }
2725
            sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2726
            if (!sse_op2)
2727
                goto illegal_op;
2728
            if (is_xmm) {
2729
                rm = (modrm & 7) | REX_B(s);
2730
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2731
            } else {
2732
                rm = (modrm & 7);
2733
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2734
            }
2735
            sse_op2(op2_offset, op1_offset);
2736
            break;
2737
        case 0x050: /* movmskps */
2738
            rm = (modrm & 7) | REX_B(s);
2739
            gen_op_movmskps(offsetof(CPUX86State,xmm_regs[rm]));
2740
            gen_op_mov_reg_T0[OT_LONG][reg]();
2741
            break;
2742
        case 0x150: /* movmskpd */
2743
            rm = (modrm & 7) | REX_B(s);
2744
            gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[rm]));
2745
            gen_op_mov_reg_T0[OT_LONG][reg]();
2746
            break;
2747
        case 0x02a: /* cvtpi2ps */
2748
        case 0x12a: /* cvtpi2pd */
2749
            gen_op_enter_mmx();
2750
            if (mod != 3) {
2751
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2752
                op2_offset = offsetof(CPUX86State,mmx_t0);
2753
                gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2754
            } else {
2755
                rm = (modrm & 7);
2756
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2757
            }
2758
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2759
            switch(b >> 8) {
2760
            case 0x0:
2761
                gen_op_cvtpi2ps(op1_offset, op2_offset);
2762
                break;
2763
            default:
2764
            case 0x1:
2765
                gen_op_cvtpi2pd(op1_offset, op2_offset);
2766
                break;
2767
            }
2768
            break;
2769
        case 0x22a: /* cvtsi2ss */
2770
        case 0x32a: /* cvtsi2sd */
2771
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2772
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2773
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2774
            sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
2775
            break;
2776
        case 0x02c: /* cvttps2pi */
2777
        case 0x12c: /* cvttpd2pi */
2778
        case 0x02d: /* cvtps2pi */
2779
        case 0x12d: /* cvtpd2pi */
2780
            gen_op_enter_mmx();
2781
            if (mod != 3) {
2782
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2783
                op2_offset = offsetof(CPUX86State,xmm_t0);
2784
                gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
2785
            } else {
2786
                rm = (modrm & 7) | REX_B(s);
2787
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2788
            }
2789
            op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
2790
            switch(b) {
2791
            case 0x02c:
2792
                gen_op_cvttps2pi(op1_offset, op2_offset);
2793
                break;
2794
            case 0x12c:
2795
                gen_op_cvttpd2pi(op1_offset, op2_offset);
2796
                break;
2797
            case 0x02d:
2798
                gen_op_cvtps2pi(op1_offset, op2_offset);
2799
                break;
2800
            case 0x12d:
2801
                gen_op_cvtpd2pi(op1_offset, op2_offset);
2802
                break;
2803
            }
2804
            break;
2805
        case 0x22c: /* cvttss2si */
2806
        case 0x32c: /* cvttsd2si */
2807
        case 0x22d: /* cvtss2si */
2808
        case 0x32d: /* cvtsd2si */
2809
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2810
            if (mod != 3) {
2811
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2812
                if ((b >> 8) & 1) {
2813
                    gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
2814
                } else {
2815
                    gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2816
                    gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2817
                }
2818
                op2_offset = offsetof(CPUX86State,xmm_t0);
2819
            } else {
2820
                rm = (modrm & 7) | REX_B(s);
2821
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2822
            }
2823
            sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 + 
2824
                          (b & 1) * 4](op2_offset);
2825
            gen_op_mov_reg_T0[ot][reg]();
2826
            break;
2827
        case 0xc4: /* pinsrw */
2828
        case 0x1c4: 
2829
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
2830
            val = ldub_code(s->pc++);
2831
            if (b1) {
2832
                val &= 7;
2833
                gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
2834
            } else {
2835
                val &= 3;
2836
                gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
2837
            }
2838
            break;
2839
        case 0xc5: /* pextrw */
2840
        case 0x1c5: 
2841
            if (mod != 3)
2842
                goto illegal_op;
2843
            val = ldub_code(s->pc++);
2844
            if (b1) {
2845
                val &= 7;
2846
                rm = (modrm & 7) | REX_B(s);
2847
                gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
2848
            } else {
2849
                val &= 3;
2850
                rm = (modrm & 7);
2851
                gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
2852
            }
2853
            reg = ((modrm >> 3) & 7) | rex_r;
2854
            gen_op_mov_reg_T0[OT_LONG][reg]();
2855
            break;
2856
        case 0x1d6: /* movq ea, xmm */
2857
            if (mod != 3) {
2858
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2859
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2860
            } else {
2861
                rm = (modrm & 7) | REX_B(s);
2862
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2863
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2864
                gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2865
            }
2866
            break;
2867
        case 0x2d6: /* movq2dq */
2868
            gen_op_enter_mmx();
2869
            rm = (modrm & 7) | REX_B(s);
2870
            gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2871
                        offsetof(CPUX86State,fpregs[reg & 7].mmx));
2872
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2873
            break;
2874
        case 0x3d6: /* movdq2q */
2875
            gen_op_enter_mmx();
2876
            rm = (modrm & 7);
2877
            gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2878
                        offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2879
            break;
2880
        case 0xd7: /* pmovmskb */
2881
        case 0x1d7:
2882
            if (mod != 3)
2883
                goto illegal_op;
2884
            if (b1) {
2885
                rm = (modrm & 7) | REX_B(s);
2886
                gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
2887
            } else {
2888
                rm = (modrm & 7);
2889
                gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
2890
            }
2891
            reg = ((modrm >> 3) & 7) | rex_r;
2892
            gen_op_mov_reg_T0[OT_LONG][reg]();
2893
            break;
2894
        default:
2895
            goto illegal_op;
2896
        }
2897
    } else {
2898
        /* generic MMX or SSE operation */
2899
        if (b == 0xf7) {
2900
            /* maskmov : we must prepare A0 */
2901
            if (mod != 3) 
2902
                goto illegal_op;
2903
#ifdef TARGET_X86_64
2904
            if (CODE64(s)) {
2905
                gen_op_movq_A0_reg[R_EDI]();
2906
            } else 
2907
#endif
2908
            {
2909
                gen_op_movl_A0_reg[R_EDI]();
2910
                if (s->aflag == 0)
2911
                    gen_op_andl_A0_ffff();
2912
            }
2913
            gen_add_A0_ds_seg(s);
2914
        }
2915
        if (is_xmm) {
2916
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2917
            if (mod != 3) {
2918
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2919
                op2_offset = offsetof(CPUX86State,xmm_t0);
2920
                if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f) ||
2921
                                b == 0xc2)) {
2922
                    /* specific case for SSE single instructions */
2923
                    if (b1 == 2) {
2924
                        /* 32 bit access */
2925
                        gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2926
                        gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2927
                    } else {
2928
                        /* 64 bit access */
2929
                        gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
2930
                    }
2931
                } else {
2932
                    gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
2933
                }
2934
            } else {
2935
                rm = (modrm & 7) | REX_B(s);
2936
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2937
            }
2938
        } else {
2939
            op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
2940
            if (mod != 3) {
2941
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2942
                op2_offset = offsetof(CPUX86State,mmx_t0);
2943
                gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2944
            } else {
2945
                rm = (modrm & 7);
2946
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2947
            }
2948
        }
2949
        switch(b) {
2950
        case 0x70: /* pshufx insn */
2951
        case 0xc6: /* pshufx insn */
2952
            val = ldub_code(s->pc++);
2953
            sse_op3 = (GenOpFunc3 *)sse_op2;
2954
            sse_op3(op1_offset, op2_offset, val);
2955
            break;
2956
        case 0xc2:
2957
            /* compare insns */
2958
            val = ldub_code(s->pc++);
2959
            if (val >= 8)
2960
                goto illegal_op;
2961
            sse_op2 = sse_op_table4[val][b1];
2962
            sse_op2(op1_offset, op2_offset);
2963
            break;
2964
        default:
2965
            sse_op2(op1_offset, op2_offset);
2966
            break;
2967
        }
2968
        if (b == 0x2e || b == 0x2f) {
2969
            s->cc_op = CC_OP_EFLAGS;
2970
        }
2971
    }
2972
}
2973

    
2974

    
2975
/* convert one instruction. s->is_jmp is set if the translation must
2976
   be stopped. Return the next pc value */
2977
static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
2978
{
2979
    int b, prefixes, aflag, dflag;
2980
    int shift, ot;
2981
    int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
2982
    target_ulong next_eip, tval;
2983
    int rex_w, rex_r;
2984

    
2985
    s->pc = pc_start;
2986
    prefixes = 0;
2987
    aflag = s->code32;
2988
    dflag = s->code32;
2989
    s->override = -1;
2990
    rex_w = -1;
2991
    rex_r = 0;
2992
#ifdef TARGET_X86_64
2993
    s->rex_x = 0;
2994
    s->rex_b = 0;
2995
    x86_64_hregs = 0; 
2996
#endif
2997
    s->rip_offset = 0; /* for relative ip address */
2998
 next_byte:
2999
    b = ldub_code(s->pc);
3000
    s->pc++;
3001
    /* check prefixes */
3002
#ifdef TARGET_X86_64
3003
    if (CODE64(s)) {
3004
        switch (b) {
3005
        case 0xf3:
3006
            prefixes |= PREFIX_REPZ;
3007
            goto next_byte;
3008
        case 0xf2:
3009
            prefixes |= PREFIX_REPNZ;
3010
            goto next_byte;
3011
        case 0xf0:
3012
            prefixes |= PREFIX_LOCK;
3013
            goto next_byte;
3014
        case 0x2e:
3015
            s->override = R_CS;
3016
            goto next_byte;
3017
        case 0x36:
3018
            s->override = R_SS;
3019
            goto next_byte;
3020
        case 0x3e:
3021
            s->override = R_DS;
3022
            goto next_byte;
3023
        case 0x26:
3024
            s->override = R_ES;
3025
            goto next_byte;
3026
        case 0x64:
3027
            s->override = R_FS;
3028
            goto next_byte;
3029
        case 0x65:
3030
            s->override = R_GS;
3031
            goto next_byte;
3032
        case 0x66:
3033
            prefixes |= PREFIX_DATA;
3034
            goto next_byte;
3035
        case 0x67:
3036
            prefixes |= PREFIX_ADR;
3037
            goto next_byte;
3038
        case 0x40 ... 0x4f:
3039
            /* REX prefix */
3040
            rex_w = (b >> 3) & 1;
3041
            rex_r = (b & 0x4) << 1;
3042
            s->rex_x = (b & 0x2) << 2;
3043
            REX_B(s) = (b & 0x1) << 3;
3044
            x86_64_hregs = 1; /* select uniform byte register addressing */
3045
            goto next_byte;
3046
        }
3047
        if (rex_w == 1) {
3048
            /* 0x66 is ignored if rex.w is set */
3049
            dflag = 2;
3050
        } else {
3051
            if (prefixes & PREFIX_DATA)
3052
                dflag ^= 1;
3053
        }
3054
        if (!(prefixes & PREFIX_ADR))
3055
            aflag = 2;
3056
    } else 
3057
#endif
3058
    {
3059
        switch (b) {
3060
        case 0xf3:
3061
            prefixes |= PREFIX_REPZ;
3062
            goto next_byte;
3063
        case 0xf2:
3064
            prefixes |= PREFIX_REPNZ;
3065
            goto next_byte;
3066
        case 0xf0:
3067
            prefixes |= PREFIX_LOCK;
3068
            goto next_byte;
3069
        case 0x2e:
3070
            s->override = R_CS;
3071
            goto next_byte;
3072
        case 0x36:
3073
            s->override = R_SS;
3074
            goto next_byte;
3075
        case 0x3e:
3076
            s->override = R_DS;
3077
            goto next_byte;
3078
        case 0x26:
3079
            s->override = R_ES;
3080
            goto next_byte;
3081
        case 0x64:
3082
            s->override = R_FS;
3083
            goto next_byte;
3084
        case 0x65:
3085
            s->override = R_GS;
3086
            goto next_byte;
3087
        case 0x66:
3088
            prefixes |= PREFIX_DATA;
3089
            goto next_byte;
3090
        case 0x67:
3091
            prefixes |= PREFIX_ADR;
3092
            goto next_byte;
3093
        }
3094
        if (prefixes & PREFIX_DATA)
3095
            dflag ^= 1;
3096
        if (prefixes & PREFIX_ADR)
3097
            aflag ^= 1;
3098
    }
3099

    
3100
    s->prefix = prefixes;
3101
    s->aflag = aflag;
3102
    s->dflag = dflag;
3103

    
3104
    /* lock generation */
3105
    if (prefixes & PREFIX_LOCK)
3106
        gen_op_lock();
3107

    
3108
    /* now check op code */
3109
 reswitch:
3110
    switch(b) {
3111
    case 0x0f:
3112
        /**************************/
3113
        /* extended op code */
3114
        b = ldub_code(s->pc++) | 0x100;
3115
        goto reswitch;
3116
        
3117
        /**************************/
3118
        /* arith & logic */
3119
    case 0x00 ... 0x05:
3120
    case 0x08 ... 0x0d:
3121
    case 0x10 ... 0x15:
3122
    case 0x18 ... 0x1d:
3123
    case 0x20 ... 0x25:
3124
    case 0x28 ... 0x2d:
3125
    case 0x30 ... 0x35:
3126
    case 0x38 ... 0x3d:
3127
        {
3128
            int op, f, val;
3129
            op = (b >> 3) & 7;
3130
            f = (b >> 1) & 3;
3131

    
3132
            if ((b & 1) == 0)
3133
                ot = OT_BYTE;
3134
            else
3135
                ot = dflag + OT_WORD;
3136
            
3137
            switch(f) {
3138
            case 0: /* OP Ev, Gv */
3139
                modrm = ldub_code(s->pc++);
3140
                reg = ((modrm >> 3) & 7) | rex_r;
3141
                mod = (modrm >> 6) & 3;
3142
                rm = (modrm & 7) | REX_B(s);
3143
                if (mod != 3) {
3144
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3145
                    opreg = OR_TMP0;
3146
                } else if (op == OP_XORL && rm == reg) {
3147
                xor_zero:
3148
                    /* xor reg, reg optimisation */
3149
                    gen_op_movl_T0_0();
3150
                    s->cc_op = CC_OP_LOGICB + ot;
3151
                    gen_op_mov_reg_T0[ot][reg]();
3152
                    gen_op_update1_cc();
3153
                    break;
3154
                } else {
3155
                    opreg = rm;
3156
                }
3157
                gen_op_mov_TN_reg[ot][1][reg]();
3158
                gen_op(s, op, ot, opreg);
3159
                break;
3160
            case 1: /* OP Gv, Ev */
3161
                modrm = ldub_code(s->pc++);
3162
                mod = (modrm >> 6) & 3;
3163
                reg = ((modrm >> 3) & 7) | rex_r;
3164
                rm = (modrm & 7) | REX_B(s);
3165
                if (mod != 3) {
3166
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3167
                    gen_op_ld_T1_A0[ot + s->mem_index]();
3168
                } else if (op == OP_XORL && rm == reg) {
3169
                    goto xor_zero;
3170
                } else {
3171
                    gen_op_mov_TN_reg[ot][1][rm]();
3172
                }
3173
                gen_op(s, op, ot, reg);
3174
                break;
3175
            case 2: /* OP A, Iv */
3176
                val = insn_get(s, ot);
3177
                gen_op_movl_T1_im(val);
3178
                gen_op(s, op, ot, OR_EAX);
3179
                break;
3180
            }
3181
        }
3182
        break;
3183

    
3184
    case 0x80: /* GRP1 */
3185
    case 0x81:
3186
    case 0x82:
3187
    case 0x83:
3188
        {
3189
            int val;
3190

    
3191
            if ((b & 1) == 0)
3192
                ot = OT_BYTE;
3193
            else
3194
                ot = dflag + OT_WORD;
3195
            
3196
            modrm = ldub_code(s->pc++);
3197
            mod = (modrm >> 6) & 3;
3198
            rm = (modrm & 7) | REX_B(s);
3199
            op = (modrm >> 3) & 7;
3200
            
3201
            if (mod != 3) {
3202
                if (b == 0x83)
3203
                    s->rip_offset = 1;
3204
                else
3205
                    s->rip_offset = insn_const_size(ot);
3206
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3207
                opreg = OR_TMP0;
3208
            } else {
3209
                opreg = rm;
3210
            }
3211

    
3212
            switch(b) {
3213
            default:
3214
            case 0x80:
3215
            case 0x81:
3216
            case 0x82:
3217
                val = insn_get(s, ot);
3218
                break;
3219
            case 0x83:
3220
                val = (int8_t)insn_get(s, OT_BYTE);
3221
                break;
3222
            }
3223
            gen_op_movl_T1_im(val);
3224
            gen_op(s, op, ot, opreg);
3225
        }
3226
        break;
3227

    
3228
        /**************************/
3229
        /* inc, dec, and other misc arith */
3230
    case 0x40 ... 0x47: /* inc Gv */
3231
        ot = dflag ? OT_LONG : OT_WORD;
3232
        gen_inc(s, ot, OR_EAX + (b & 7), 1);
3233
        break;
3234
    case 0x48 ... 0x4f: /* dec Gv */
3235
        ot = dflag ? OT_LONG : OT_WORD;
3236
        gen_inc(s, ot, OR_EAX + (b & 7), -1);
3237
        break;
3238
    case 0xf6: /* GRP3 */
3239
    case 0xf7:
3240
        if ((b & 1) == 0)
3241
            ot = OT_BYTE;
3242
        else
3243
            ot = dflag + OT_WORD;
3244

    
3245
        modrm = ldub_code(s->pc++);
3246
        mod = (modrm >> 6) & 3;
3247
        rm = (modrm & 7) | REX_B(s);
3248
        op = (modrm >> 3) & 7;
3249
        if (mod != 3) {
3250
            if (op == 0)
3251
                s->rip_offset = insn_const_size(ot);
3252
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3253
            gen_op_ld_T0_A0[ot + s->mem_index]();
3254
        } else {
3255
            gen_op_mov_TN_reg[ot][0][rm]();
3256
        }
3257

    
3258
        switch(op) {
3259
        case 0: /* test */
3260
            val = insn_get(s, ot);
3261
            gen_op_movl_T1_im(val);
3262
            gen_op_testl_T0_T1_cc();
3263
            s->cc_op = CC_OP_LOGICB + ot;
3264
            break;
3265
        case 2: /* not */
3266
            gen_op_notl_T0();
3267
            if (mod != 3) {
3268
                gen_op_st_T0_A0[ot + s->mem_index]();
3269
            } else {
3270
                gen_op_mov_reg_T0[ot][rm]();
3271
            }
3272
            break;
3273
        case 3: /* neg */
3274
            gen_op_negl_T0();
3275
            if (mod != 3) {
3276
                gen_op_st_T0_A0[ot + s->mem_index]();
3277
            } else {
3278
                gen_op_mov_reg_T0[ot][rm]();
3279
            }
3280
            gen_op_update_neg_cc();
3281
            s->cc_op = CC_OP_SUBB + ot;
3282
            break;
3283
        case 4: /* mul */
3284
            switch(ot) {
3285
            case OT_BYTE:
3286
                gen_op_mulb_AL_T0();
3287
                s->cc_op = CC_OP_MULB;
3288
                break;
3289
            case OT_WORD:
3290
                gen_op_mulw_AX_T0();
3291
                s->cc_op = CC_OP_MULW;
3292
                break;
3293
            default:
3294
            case OT_LONG:
3295
                gen_op_mull_EAX_T0();
3296
                s->cc_op = CC_OP_MULL;
3297
                break;
3298
#ifdef TARGET_X86_64
3299
            case OT_QUAD:
3300
                gen_op_mulq_EAX_T0();
3301
                s->cc_op = CC_OP_MULQ;
3302
                break;
3303
#endif
3304
            }
3305
            break;
3306
        case 5: /* imul */
3307
            switch(ot) {
3308
            case OT_BYTE:
3309
                gen_op_imulb_AL_T0();
3310
                s->cc_op = CC_OP_MULB;
3311
                break;
3312
            case OT_WORD:
3313
                gen_op_imulw_AX_T0();
3314
                s->cc_op = CC_OP_MULW;
3315
                break;
3316
            default:
3317
            case OT_LONG:
3318
                gen_op_imull_EAX_T0();
3319
                s->cc_op = CC_OP_MULL;
3320
                break;
3321
#ifdef TARGET_X86_64
3322
            case OT_QUAD:
3323
                gen_op_imulq_EAX_T0();
3324
                s->cc_op = CC_OP_MULQ;
3325
                break;
3326
#endif
3327
            }
3328
            break;
3329
        case 6: /* div */
3330
            switch(ot) {
3331
            case OT_BYTE:
3332
                gen_jmp_im(pc_start - s->cs_base);
3333
                gen_op_divb_AL_T0();
3334
                break;
3335
            case OT_WORD:
3336
                gen_jmp_im(pc_start - s->cs_base);
3337
                gen_op_divw_AX_T0();
3338
                break;
3339
            default:
3340
            case OT_LONG:
3341
                gen_jmp_im(pc_start - s->cs_base);
3342
                gen_op_divl_EAX_T0();
3343
                break;
3344
#ifdef TARGET_X86_64
3345
            case OT_QUAD:
3346
                gen_jmp_im(pc_start - s->cs_base);
3347
                gen_op_divq_EAX_T0();
3348
                break;
3349
#endif
3350
            }
3351
            break;
3352
        case 7: /* idiv */
3353
            switch(ot) {
3354
            case OT_BYTE:
3355
                gen_jmp_im(pc_start - s->cs_base);
3356
                gen_op_idivb_AL_T0();
3357
                break;
3358
            case OT_WORD:
3359
                gen_jmp_im(pc_start - s->cs_base);
3360
                gen_op_idivw_AX_T0();
3361
                break;
3362
            default:
3363
            case OT_LONG:
3364
                gen_jmp_im(pc_start - s->cs_base);
3365
                gen_op_idivl_EAX_T0();
3366
                break;
3367
#ifdef TARGET_X86_64
3368
            case OT_QUAD:
3369
                gen_jmp_im(pc_start - s->cs_base);
3370
                gen_op_idivq_EAX_T0();
3371
                break;
3372
#endif
3373
            }
3374
            break;
3375
        default:
3376
            goto illegal_op;
3377
        }
3378
        break;
3379

    
3380
    case 0xfe: /* GRP4 */
3381
    case 0xff: /* GRP5 */
3382
        if ((b & 1) == 0)
3383
            ot = OT_BYTE;
3384
        else
3385
            ot = dflag + OT_WORD;
3386

    
3387
        modrm = ldub_code(s->pc++);
3388
        mod = (modrm >> 6) & 3;
3389
        rm = (modrm & 7) | REX_B(s);
3390
        op = (modrm >> 3) & 7;
3391
        if (op >= 2 && b == 0xfe) {
3392
            goto illegal_op;
3393
        }
3394
        if (CODE64(s)) {
3395
            if (op == 2 || op == 4) {
3396
                /* operand size for jumps is 64 bit */
3397
                ot = OT_QUAD;
3398
            } else if (op == 3 || op == 5) {
3399
                /* for call calls, the operand is 16 or 32 bit, even
3400
                   in long mode */
3401
                ot = dflag ? OT_LONG : OT_WORD;
3402
            } else if (op == 6) {
3403
                /* default push size is 64 bit */
3404
                ot = dflag ? OT_QUAD : OT_WORD;
3405
            }
3406
        }
3407
        if (mod != 3) {
3408
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3409
            if (op >= 2 && op != 3 && op != 5)
3410
                gen_op_ld_T0_A0[ot + s->mem_index]();
3411
        } else {
3412
            gen_op_mov_TN_reg[ot][0][rm]();
3413
        }
3414

    
3415
        switch(op) {
3416
        case 0: /* inc Ev */
3417
            if (mod != 3)
3418
                opreg = OR_TMP0;
3419
            else
3420
                opreg = rm;
3421
            gen_inc(s, ot, opreg, 1);
3422
            break;
3423
        case 1: /* dec Ev */
3424
            if (mod != 3)
3425
                opreg = OR_TMP0;
3426
            else
3427
                opreg = rm;
3428
            gen_inc(s, ot, opreg, -1);
3429
            break;
3430
        case 2: /* call Ev */
3431
            /* XXX: optimize if memory (no 'and' is necessary) */
3432
            if (s->dflag == 0)
3433
                gen_op_andl_T0_ffff();
3434
            next_eip = s->pc - s->cs_base;
3435
            gen_movtl_T1_im(next_eip);
3436
            gen_push_T1(s);
3437
            gen_op_jmp_T0();
3438
            gen_eob(s);
3439
            break;
3440
        case 3: /* lcall Ev */
3441
            gen_op_ld_T1_A0[ot + s->mem_index]();
3442
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3443
            gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3444
        do_lcall:
3445
            if (s->pe && !s->vm86) {
3446
                if (s->cc_op != CC_OP_DYNAMIC)
3447
                    gen_op_set_cc_op(s->cc_op);
3448
                gen_jmp_im(pc_start - s->cs_base);
3449
                gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3450
            } else {
3451
                gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3452
            }
3453
            gen_eob(s);
3454
            break;
3455
        case 4: /* jmp Ev */
3456
            if (s->dflag == 0)
3457
                gen_op_andl_T0_ffff();
3458
            gen_op_jmp_T0();
3459
            gen_eob(s);
3460
            break;
3461
        case 5: /* ljmp Ev */
3462
            gen_op_ld_T1_A0[ot + s->mem_index]();
3463
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3464
            gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3465
        do_ljmp:
3466
            if (s->pe && !s->vm86) {
3467
                if (s->cc_op != CC_OP_DYNAMIC)
3468
                    gen_op_set_cc_op(s->cc_op);
3469
                gen_jmp_im(pc_start - s->cs_base);
3470
                gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3471
            } else {
3472
                gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3473
                gen_op_movl_T0_T1();
3474
                gen_op_jmp_T0();
3475
            }
3476
            gen_eob(s);
3477
            break;
3478
        case 6: /* push Ev */
3479
            gen_push_T0(s);
3480
            break;
3481
        default:
3482
            goto illegal_op;
3483
        }
3484
        break;
3485

    
3486
    case 0x84: /* test Ev, Gv */
3487
    case 0x85: 
3488
        if ((b & 1) == 0)
3489
            ot = OT_BYTE;
3490
        else
3491
            ot = dflag + OT_WORD;
3492

    
3493
        modrm = ldub_code(s->pc++);
3494
        mod = (modrm >> 6) & 3;
3495
        rm = (modrm & 7) | REX_B(s);
3496
        reg = ((modrm >> 3) & 7) | rex_r;
3497
        
3498
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3499
        gen_op_mov_TN_reg[ot][1][reg]();
3500
        gen_op_testl_T0_T1_cc();
3501
        s->cc_op = CC_OP_LOGICB + ot;
3502
        break;
3503
        
3504
    case 0xa8: /* test eAX, Iv */
3505
    case 0xa9:
3506
        if ((b & 1) == 0)
3507
            ot = OT_BYTE;
3508
        else
3509
            ot = dflag + OT_WORD;
3510
        val = insn_get(s, ot);
3511

    
3512
        gen_op_mov_TN_reg[ot][0][OR_EAX]();
3513
        gen_op_movl_T1_im(val);
3514
        gen_op_testl_T0_T1_cc();
3515
        s->cc_op = CC_OP_LOGICB + ot;
3516
        break;
3517
        
3518
    case 0x98: /* CWDE/CBW */
3519
#ifdef TARGET_X86_64
3520
        if (dflag == 2) {
3521
            gen_op_movslq_RAX_EAX();
3522
        } else
3523
#endif
3524
        if (dflag == 1)
3525
            gen_op_movswl_EAX_AX();
3526
        else
3527
            gen_op_movsbw_AX_AL();
3528
        break;
3529
    case 0x99: /* CDQ/CWD */
3530
#ifdef TARGET_X86_64
3531
        if (dflag == 2) {
3532
            gen_op_movsqo_RDX_RAX();
3533
        } else
3534
#endif
3535
        if (dflag == 1)
3536
            gen_op_movslq_EDX_EAX();
3537
        else
3538
            gen_op_movswl_DX_AX();
3539
        break;
3540
    case 0x1af: /* imul Gv, Ev */
3541
    case 0x69: /* imul Gv, Ev, I */
3542
    case 0x6b:
3543
        ot = dflag + OT_WORD;
3544
        modrm = ldub_code(s->pc++);
3545
        reg = ((modrm >> 3) & 7) | rex_r;
3546
        if (b == 0x69)
3547
            s->rip_offset = insn_const_size(ot);
3548
        else if (b == 0x6b)
3549
            s->rip_offset = 1;
3550
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3551
        if (b == 0x69) {
3552
            val = insn_get(s, ot);
3553
            gen_op_movl_T1_im(val);
3554
        } else if (b == 0x6b) {
3555
            val = (int8_t)insn_get(s, OT_BYTE);
3556
            gen_op_movl_T1_im(val);
3557
        } else {
3558
            gen_op_mov_TN_reg[ot][1][reg]();
3559
        }
3560

    
3561
#ifdef TARGET_X86_64
3562
        if (ot == OT_QUAD) {
3563
            gen_op_imulq_T0_T1();
3564
        } else
3565
#endif
3566
        if (ot == OT_LONG) {
3567
            gen_op_imull_T0_T1();
3568
        } else {
3569
            gen_op_imulw_T0_T1();
3570
        }
3571
        gen_op_mov_reg_T0[ot][reg]();
3572
        s->cc_op = CC_OP_MULB + ot;
3573
        break;
3574
    case 0x1c0:
3575
    case 0x1c1: /* xadd Ev, Gv */
3576
        if ((b & 1) == 0)
3577
            ot = OT_BYTE;
3578
        else
3579
            ot = dflag + OT_WORD;
3580
        modrm = ldub_code(s->pc++);
3581
        reg = ((modrm >> 3) & 7) | rex_r;
3582
        mod = (modrm >> 6) & 3;
3583
        if (mod == 3) {
3584
            rm = (modrm & 7) | REX_B(s);
3585
            gen_op_mov_TN_reg[ot][0][reg]();
3586
            gen_op_mov_TN_reg[ot][1][rm]();
3587
            gen_op_addl_T0_T1();
3588
            gen_op_mov_reg_T1[ot][reg]();
3589
            gen_op_mov_reg_T0[ot][rm]();
3590
        } else {
3591
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3592
            gen_op_mov_TN_reg[ot][0][reg]();
3593
            gen_op_ld_T1_A0[ot + s->mem_index]();
3594
            gen_op_addl_T0_T1();
3595
            gen_op_st_T0_A0[ot + s->mem_index]();
3596
            gen_op_mov_reg_T1[ot][reg]();
3597
        }
3598
        gen_op_update2_cc();
3599
        s->cc_op = CC_OP_ADDB + ot;
3600
        break;
3601
    case 0x1b0:
3602
    case 0x1b1: /* cmpxchg Ev, Gv */
3603
        if ((b & 1) == 0)
3604
            ot = OT_BYTE;
3605
        else
3606
            ot = dflag + OT_WORD;
3607
        modrm = ldub_code(s->pc++);
3608
        reg = ((modrm >> 3) & 7) | rex_r;
3609
        mod = (modrm >> 6) & 3;
3610
        gen_op_mov_TN_reg[ot][1][reg]();
3611
        if (mod == 3) {
3612
            rm = (modrm & 7) | REX_B(s);
3613
            gen_op_mov_TN_reg[ot][0][rm]();
3614
            gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3615
            gen_op_mov_reg_T0[ot][rm]();
3616
        } else {
3617
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3618
            gen_op_ld_T0_A0[ot + s->mem_index]();
3619
            gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3620
        }
3621
        s->cc_op = CC_OP_SUBB + ot;
3622
        break;
3623
    case 0x1c7: /* cmpxchg8b */
3624
        modrm = ldub_code(s->pc++);
3625
        mod = (modrm >> 6) & 3;
3626
        if (mod == 3)
3627
            goto illegal_op;
3628
        if (s->cc_op != CC_OP_DYNAMIC)
3629
            gen_op_set_cc_op(s->cc_op);
3630
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3631
        gen_op_cmpxchg8b();
3632
        s->cc_op = CC_OP_EFLAGS;
3633
        break;
3634
        
3635
        /**************************/
3636
        /* push/pop */
3637
    case 0x50 ... 0x57: /* push */
3638
        gen_op_mov_TN_reg[OT_LONG][0][(b & 7) | REX_B(s)]();
3639
        gen_push_T0(s);
3640
        break;
3641
    case 0x58 ... 0x5f: /* pop */
3642
        if (CODE64(s)) {
3643
            ot = dflag ? OT_QUAD : OT_WORD;
3644
        } else {
3645
            ot = dflag + OT_WORD;
3646
        }
3647
        gen_pop_T0(s);
3648
        /* NOTE: order is important for pop %sp */
3649
        gen_pop_update(s);
3650
        gen_op_mov_reg_T0[ot][(b & 7) | REX_B(s)]();
3651
        break;
3652
    case 0x60: /* pusha */
3653
        if (CODE64(s))
3654
            goto illegal_op;
3655
        gen_pusha(s);
3656
        break;
3657
    case 0x61: /* popa */
3658
        if (CODE64(s))
3659
            goto illegal_op;
3660
        gen_popa(s);
3661
        break;
3662
    case 0x68: /* push Iv */
3663
    case 0x6a:
3664
        if (CODE64(s)) {
3665
            ot = dflag ? OT_QUAD : OT_WORD;
3666
        } else {
3667
            ot = dflag + OT_WORD;
3668
        }
3669
        if (b == 0x68)
3670
            val = insn_get(s, ot);
3671
        else
3672
            val = (int8_t)insn_get(s, OT_BYTE);
3673
        gen_op_movl_T0_im(val);
3674
        gen_push_T0(s);
3675
        break;
3676
    case 0x8f: /* pop Ev */
3677
        if (CODE64(s)) {
3678
            ot = dflag ? OT_QUAD : OT_WORD;
3679
        } else {
3680
            ot = dflag + OT_WORD;
3681
        }
3682
        modrm = ldub_code(s->pc++);
3683
        mod = (modrm >> 6) & 3;
3684
        gen_pop_T0(s);
3685
        if (mod == 3) {
3686
            /* NOTE: order is important for pop %sp */
3687
            gen_pop_update(s);
3688
            rm = (modrm & 7) | REX_B(s);
3689
            gen_op_mov_reg_T0[ot][rm]();
3690
        } else {
3691
            /* NOTE: order is important too for MMU exceptions */
3692
            s->popl_esp_hack = 1 << ot;
3693
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3694
            s->popl_esp_hack = 0;
3695
            gen_pop_update(s);
3696
        }
3697
        break;
3698
    case 0xc8: /* enter */
3699
        {
3700
            /* XXX: long mode support */
3701
            int level;
3702
            val = lduw_code(s->pc);
3703
            s->pc += 2;
3704
            level = ldub_code(s->pc++);
3705
            gen_enter(s, val, level);
3706
        }
3707
        break;
3708
    case 0xc9: /* leave */
3709
        /* XXX: exception not precise (ESP is updated before potential exception) */
3710
        /* XXX: may be invalid for 16 bit in long mode */
3711
        if (CODE64(s)) {
3712
            gen_op_mov_TN_reg[OT_QUAD][0][R_EBP]();
3713
            gen_op_mov_reg_T0[OT_QUAD][R_ESP]();
3714
        } else if (s->ss32) {
3715
            gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
3716
            gen_op_mov_reg_T0[OT_LONG][R_ESP]();
3717
        } else {
3718
            gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
3719
            gen_op_mov_reg_T0[OT_WORD][R_ESP]();
3720
        }
3721
        gen_pop_T0(s);
3722
        if (CODE64(s)) {
3723
            ot = dflag ? OT_QUAD : OT_WORD;
3724
        } else {
3725
            ot = dflag + OT_WORD;
3726
        }
3727
        gen_op_mov_reg_T0[ot][R_EBP]();
3728
        gen_pop_update(s);
3729
        break;
3730
    case 0x06: /* push es */
3731
    case 0x0e: /* push cs */
3732
    case 0x16: /* push ss */
3733
    case 0x1e: /* push ds */
3734
        if (CODE64(s))
3735
            goto illegal_op;
3736
        gen_op_movl_T0_seg(b >> 3);
3737
        gen_push_T0(s);
3738
        break;
3739
    case 0x1a0: /* push fs */
3740
    case 0x1a8: /* push gs */
3741
        gen_op_movl_T0_seg((b >> 3) & 7);
3742
        gen_push_T0(s);
3743
        break;
3744
    case 0x07: /* pop es */
3745
    case 0x17: /* pop ss */
3746
    case 0x1f: /* pop ds */
3747
        if (CODE64(s))
3748
            goto illegal_op;
3749
        reg = b >> 3;
3750
        gen_pop_T0(s);
3751
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3752
        gen_pop_update(s);
3753
        if (reg == R_SS) {
3754
            /* if reg == SS, inhibit interrupts/trace. */
3755
            /* If several instructions disable interrupts, only the
3756
               _first_ does it */
3757
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3758
                gen_op_set_inhibit_irq();
3759
            s->tf = 0;
3760
        }
3761
        if (s->is_jmp) {
3762
            gen_jmp_im(s->pc - s->cs_base);
3763
            gen_eob(s);
3764
        }
3765
        break;
3766
    case 0x1a1: /* pop fs */
3767
    case 0x1a9: /* pop gs */
3768
        gen_pop_T0(s);
3769
        gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
3770
        gen_pop_update(s);
3771
        if (s->is_jmp) {
3772
            gen_jmp_im(s->pc - s->cs_base);
3773
            gen_eob(s);
3774
        }
3775
        break;
3776

    
3777
        /**************************/
3778
        /* mov */
3779
    case 0x88:
3780
    case 0x89: /* mov Gv, Ev */
3781
        if ((b & 1) == 0)
3782
            ot = OT_BYTE;
3783
        else
3784
            ot = dflag + OT_WORD;
3785
        modrm = ldub_code(s->pc++);
3786
        reg = ((modrm >> 3) & 7) | rex_r;
3787
        
3788
        /* generate a generic store */
3789
        gen_ldst_modrm(s, modrm, ot, reg, 1);
3790
        break;
3791
    case 0xc6:
3792
    case 0xc7: /* mov Ev, Iv */
3793
        if ((b & 1) == 0)
3794
            ot = OT_BYTE;
3795
        else
3796
            ot = dflag + OT_WORD;
3797
        modrm = ldub_code(s->pc++);
3798
        mod = (modrm >> 6) & 3;
3799
        if (mod != 3) {
3800
            s->rip_offset = insn_const_size(ot);
3801
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3802
        }
3803
        val = insn_get(s, ot);
3804
        gen_op_movl_T0_im(val);
3805
        if (mod != 3)
3806
            gen_op_st_T0_A0[ot + s->mem_index]();
3807
        else
3808
            gen_op_mov_reg_T0[ot][(modrm & 7) | REX_B(s)]();
3809
        break;
3810
    case 0x8a:
3811
    case 0x8b: /* mov Ev, Gv */
3812
        if ((b & 1) == 0)
3813
            ot = OT_BYTE;
3814
        else
3815
            ot = OT_WORD + dflag;
3816
        modrm = ldub_code(s->pc++);
3817
        reg = ((modrm >> 3) & 7) | rex_r;
3818
        
3819
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3820
        gen_op_mov_reg_T0[ot][reg]();
3821
        break;
3822
    case 0x8e: /* mov seg, Gv */
3823
        modrm = ldub_code(s->pc++);
3824
        reg = (modrm >> 3) & 7;
3825
        if (reg >= 6 || reg == R_CS)
3826
            goto illegal_op;
3827
        gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3828
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3829
        if (reg == R_SS) {
3830
            /* if reg == SS, inhibit interrupts/trace */
3831
            /* If several instructions disable interrupts, only the
3832
               _first_ does it */
3833
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3834
                gen_op_set_inhibit_irq();
3835
            s->tf = 0;
3836
        }
3837
        if (s->is_jmp) {
3838
            gen_jmp_im(s->pc - s->cs_base);
3839
            gen_eob(s);
3840
        }
3841
        break;
3842
    case 0x8c: /* mov Gv, seg */
3843
        modrm = ldub_code(s->pc++);
3844
        reg = (modrm >> 3) & 7;
3845
        mod = (modrm >> 6) & 3;
3846
        if (reg >= 6)
3847
            goto illegal_op;
3848
        gen_op_movl_T0_seg(reg);
3849
        if (mod == 3)
3850
            ot = OT_WORD + dflag;
3851
        else
3852
            ot = OT_WORD;
3853
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3854
        break;
3855

    
3856
    case 0x1b6: /* movzbS Gv, Eb */
3857
    case 0x1b7: /* movzwS Gv, Eb */
3858
    case 0x1be: /* movsbS Gv, Eb */
3859
    case 0x1bf: /* movswS Gv, Eb */
3860
        {
3861
            int d_ot;
3862
            /* d_ot is the size of destination */
3863
            d_ot = dflag + OT_WORD;
3864
            /* ot is the size of source */
3865
            ot = (b & 1) + OT_BYTE;
3866
            modrm = ldub_code(s->pc++);
3867
            reg = ((modrm >> 3) & 7) | rex_r;
3868
            mod = (modrm >> 6) & 3;
3869
            rm = (modrm & 7) | REX_B(s);
3870
            
3871
            if (mod == 3) {
3872
                gen_op_mov_TN_reg[ot][0][rm]();
3873
                switch(ot | (b & 8)) {
3874
                case OT_BYTE:
3875
                    gen_op_movzbl_T0_T0();
3876
                    break;
3877
                case OT_BYTE | 8:
3878
                    gen_op_movsbl_T0_T0();
3879
                    break;
3880
                case OT_WORD:
3881
                    gen_op_movzwl_T0_T0();
3882
                    break;
3883
                default:
3884
                case OT_WORD | 8:
3885
                    gen_op_movswl_T0_T0();
3886
                    break;
3887
                }
3888
                gen_op_mov_reg_T0[d_ot][reg]();
3889
            } else {
3890
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3891
                if (b & 8) {
3892
                    gen_op_lds_T0_A0[ot + s->mem_index]();
3893
                } else {
3894
                    gen_op_ldu_T0_A0[ot + s->mem_index]();
3895
                }
3896
                gen_op_mov_reg_T0[d_ot][reg]();
3897
            }
3898
        }
3899
        break;
3900

    
3901
    case 0x8d: /* lea */
3902
        ot = dflag + OT_WORD;
3903
        modrm = ldub_code(s->pc++);
3904
        mod = (modrm >> 6) & 3;
3905
        if (mod == 3)
3906
            goto illegal_op;
3907
        reg = ((modrm >> 3) & 7) | rex_r;
3908
        /* we must ensure that no segment is added */
3909
        s->override = -1;
3910
        val = s->addseg;
3911
        s->addseg = 0;
3912
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3913
        s->addseg = val;
3914
        gen_op_mov_reg_A0[ot - OT_WORD][reg]();
3915
        break;
3916
        
3917
    case 0xa0: /* mov EAX, Ov */
3918
    case 0xa1:
3919
    case 0xa2: /* mov Ov, EAX */
3920
    case 0xa3:
3921
        {
3922
            target_ulong offset_addr;
3923

    
3924
            if ((b & 1) == 0)
3925
                ot = OT_BYTE;
3926
            else
3927
                ot = dflag + OT_WORD;
3928
#ifdef TARGET_X86_64
3929
            if (CODE64(s)) {
3930
                offset_addr = ldq_code(s->pc);
3931
                s->pc += 8;
3932
                if (offset_addr == (int32_t)offset_addr)
3933
                    gen_op_movq_A0_im(offset_addr);
3934
                else
3935
                    gen_op_movq_A0_im64(offset_addr >> 32, offset_addr);
3936
            } else 
3937
#endif
3938
            {
3939
                if (s->aflag) {
3940
                    offset_addr = insn_get(s, OT_LONG);
3941
                } else {
3942
                    offset_addr = insn_get(s, OT_WORD);
3943
                }
3944
                gen_op_movl_A0_im(offset_addr);
3945
            }
3946
            gen_add_A0_ds_seg(s);
3947
            if ((b & 2) == 0) {
3948
                gen_op_ld_T0_A0[ot + s->mem_index]();
3949
                gen_op_mov_reg_T0[ot][R_EAX]();
3950
            } else {
3951
                gen_op_mov_TN_reg[ot][0][R_EAX]();
3952
                gen_op_st_T0_A0[ot + s->mem_index]();
3953
            }
3954
        }
3955
        break;
3956
    case 0xd7: /* xlat */
3957
#ifdef TARGET_X86_64
3958
        if (CODE64(s)) {
3959
            gen_op_movq_A0_reg[R_EBX]();
3960
            gen_op_addq_A0_AL();
3961
        } else 
3962
#endif
3963
        {
3964
            gen_op_movl_A0_reg[R_EBX]();
3965
            gen_op_addl_A0_AL();
3966
            if (s->aflag == 0)
3967
                gen_op_andl_A0_ffff();
3968
        }
3969
        gen_add_A0_ds_seg(s);
3970
        gen_op_ldu_T0_A0[OT_BYTE + s->mem_index]();
3971
        gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
3972
        break;
3973
    case 0xb0 ... 0xb7: /* mov R, Ib */
3974
        val = insn_get(s, OT_BYTE);
3975
        gen_op_movl_T0_im(val);
3976
        gen_op_mov_reg_T0[OT_BYTE][(b & 7) | REX_B(s)]();
3977
        break;
3978
    case 0xb8 ... 0xbf: /* mov R, Iv */
3979
#ifdef TARGET_X86_64
3980
        if (dflag == 2) {
3981
            uint64_t tmp;
3982
            /* 64 bit case */
3983
            tmp = ldq_code(s->pc);
3984
            s->pc += 8;
3985
            reg = (b & 7) | REX_B(s);
3986
            gen_movtl_T0_im(tmp);
3987
            gen_op_mov_reg_T0[OT_QUAD][reg]();
3988
        } else 
3989
#endif
3990
        {
3991
            ot = dflag ? OT_LONG : OT_WORD;
3992
            val = insn_get(s, ot);
3993
            reg = (b & 7) | REX_B(s);
3994
            gen_op_movl_T0_im(val);
3995
            gen_op_mov_reg_T0[ot][reg]();
3996
        }
3997
        break;
3998

    
3999
    case 0x91 ... 0x97: /* xchg R, EAX */
4000
        ot = dflag + OT_WORD;
4001
        reg = (b & 7) | REX_B(s);
4002
        rm = R_EAX;
4003
        goto do_xchg_reg;
4004
    case 0x86:
4005
    case 0x87: /* xchg Ev, Gv */
4006
        if ((b & 1) == 0)
4007
            ot = OT_BYTE;
4008
        else
4009
            ot = dflag + OT_WORD;
4010
        modrm = ldub_code(s->pc++);
4011
        reg = ((modrm >> 3) & 7) | rex_r;
4012
        mod = (modrm >> 6) & 3;
4013
        if (mod == 3) {
4014
            rm = (modrm & 7) | REX_B(s);
4015
        do_xchg_reg:
4016
            gen_op_mov_TN_reg[ot][0][reg]();
4017
            gen_op_mov_TN_reg[ot][1][rm]();
4018
            gen_op_mov_reg_T0[ot][rm]();
4019
            gen_op_mov_reg_T1[ot][reg]();
4020
        } else {
4021
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4022
            gen_op_mov_TN_reg[ot][0][reg]();
4023
            /* for xchg, lock is implicit */
4024
            if (!(prefixes & PREFIX_LOCK))
4025
                gen_op_lock();
4026
            gen_op_ld_T1_A0[ot + s->mem_index]();
4027
            gen_op_st_T0_A0[ot + s->mem_index]();
4028
            if (!(prefixes & PREFIX_LOCK))
4029
                gen_op_unlock();
4030
            gen_op_mov_reg_T1[ot][reg]();
4031
        }
4032
        break;
4033
    case 0xc4: /* les Gv */
4034
        if (CODE64(s))
4035
            goto illegal_op;
4036
        op = R_ES;
4037
        goto do_lxx;
4038
    case 0xc5: /* lds Gv */
4039
        if (CODE64(s))
4040
            goto illegal_op;
4041
        op = R_DS;
4042
        goto do_lxx;
4043
    case 0x1b2: /* lss Gv */
4044
        op = R_SS;
4045
        goto do_lxx;
4046
    case 0x1b4: /* lfs Gv */
4047
        op = R_FS;
4048
        goto do_lxx;
4049
    case 0x1b5: /* lgs Gv */
4050
        op = R_GS;
4051
    do_lxx:
4052
        ot = dflag ? OT_LONG : OT_WORD;
4053
        modrm = ldub_code(s->pc++);
4054
        reg = ((modrm >> 3) & 7) | rex_r;
4055
        mod = (modrm >> 6) & 3;
4056
        if (mod == 3)
4057
            goto illegal_op;
4058
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4059
        gen_op_ld_T1_A0[ot + s->mem_index]();
4060
        gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4061
        /* load the segment first to handle exceptions properly */
4062
        gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
4063
        gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4064
        /* then put the data */
4065
        gen_op_mov_reg_T1[ot][reg]();
4066
        if (s->is_jmp) {
4067
            gen_jmp_im(s->pc - s->cs_base);
4068
            gen_eob(s);
4069
        }
4070
        break;
4071
        
4072
        /************************/
4073
        /* shifts */
4074
    case 0xc0:
4075
    case 0xc1:
4076
        /* shift Ev,Ib */
4077
        shift = 2;
4078
    grp2:
4079
        {
4080
            if ((b & 1) == 0)
4081
                ot = OT_BYTE;
4082
            else
4083
                ot = dflag + OT_WORD;
4084
            
4085
            modrm = ldub_code(s->pc++);
4086
            mod = (modrm >> 6) & 3;
4087
            op = (modrm >> 3) & 7;
4088
            
4089
            if (mod != 3) {
4090
                if (shift == 2) {
4091
                    s->rip_offset = 1;
4092
                }
4093
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4094
                opreg = OR_TMP0;
4095
            } else {
4096
                opreg = (modrm & 7) | REX_B(s);
4097
            }
4098

    
4099
            /* simpler op */
4100
            if (shift == 0) {
4101
                gen_shift(s, op, ot, opreg, OR_ECX);
4102
            } else {
4103
                if (shift == 2) {
4104
                    shift = ldub_code(s->pc++);
4105
                }
4106
                gen_shifti(s, op, ot, opreg, shift);
4107
            }
4108
        }
4109
        break;
4110
    case 0xd0:
4111
    case 0xd1:
4112
        /* shift Ev,1 */
4113
        shift = 1;
4114
        goto grp2;
4115
    case 0xd2:
4116
    case 0xd3:
4117
        /* shift Ev,cl */
4118
        shift = 0;
4119
        goto grp2;
4120

    
4121
    case 0x1a4: /* shld imm */
4122
        op = 0;
4123
        shift = 1;
4124
        goto do_shiftd;
4125
    case 0x1a5: /* shld cl */
4126
        op = 0;
4127
        shift = 0;
4128
        goto do_shiftd;
4129
    case 0x1ac: /* shrd imm */
4130
        op = 1;
4131
        shift = 1;
4132
        goto do_shiftd;
4133
    case 0x1ad: /* shrd cl */
4134
        op = 1;
4135
        shift = 0;
4136
    do_shiftd:
4137
        ot = dflag + OT_WORD;
4138
        modrm = ldub_code(s->pc++);
4139
        mod = (modrm >> 6) & 3;
4140
        rm = (modrm & 7) | REX_B(s);
4141
        reg = ((modrm >> 3) & 7) | rex_r;
4142
        
4143
        if (mod != 3) {
4144
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4145
            gen_op_ld_T0_A0[ot + s->mem_index]();
4146
        } else {
4147
            gen_op_mov_TN_reg[ot][0][rm]();
4148
        }
4149
        gen_op_mov_TN_reg[ot][1][reg]();
4150
        
4151
        if (shift) {
4152
            val = ldub_code(s->pc++);
4153
            if (ot == OT_QUAD)
4154
                val &= 0x3f;
4155
            else
4156
                val &= 0x1f;
4157
            if (val) {
4158
                if (mod == 3)
4159
                    gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4160
                else
4161
                    gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4162
                if (op == 0 && ot != OT_WORD)
4163
                    s->cc_op = CC_OP_SHLB + ot;
4164
                else
4165
                    s->cc_op = CC_OP_SARB + ot;
4166
            }
4167
        } else {
4168
            if (s->cc_op != CC_OP_DYNAMIC)
4169
                gen_op_set_cc_op(s->cc_op);
4170
            if (mod == 3)
4171
                gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4172
            else
4173
                gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4174
            s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4175
        }
4176
        if (mod == 3) {
4177
            gen_op_mov_reg_T0[ot][rm]();
4178
        }
4179
        break;
4180

    
4181
        /************************/
4182
        /* floats */
4183
    case 0xd8 ... 0xdf: 
4184
        if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4185
            /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4186
            /* XXX: what to do if illegal op ? */
4187
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4188
            break;
4189
        }
4190
        modrm = ldub_code(s->pc++);
4191
        mod = (modrm >> 6) & 3;
4192
        rm = modrm & 7;
4193
        op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4194
        if (mod != 3) {
4195
            /* memory op */
4196
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4197
            switch(op) {
4198
            case 0x00 ... 0x07: /* fxxxs */
4199
            case 0x10 ... 0x17: /* fixxxl */
4200
            case 0x20 ... 0x27: /* fxxxl */
4201
            case 0x30 ... 0x37: /* fixxx */
4202
                {
4203
                    int op1;
4204
                    op1 = op & 7;
4205

    
4206
                    switch(op >> 4) {
4207
                    case 0:
4208
                        gen_op_flds_FT0_A0();
4209
                        break;
4210
                    case 1:
4211
                        gen_op_fildl_FT0_A0();
4212
                        break;
4213
                    case 2:
4214
                        gen_op_fldl_FT0_A0();
4215
                        break;
4216
                    case 3:
4217
                    default:
4218
                        gen_op_fild_FT0_A0();
4219
                        break;
4220
                    }
4221
                    
4222
                    gen_op_fp_arith_ST0_FT0[op1]();
4223
                    if (op1 == 3) {
4224
                        /* fcomp needs pop */
4225
                        gen_op_fpop();
4226
                    }
4227
                }
4228
                break;
4229
            case 0x08: /* flds */
4230
            case 0x0a: /* fsts */
4231
            case 0x0b: /* fstps */
4232
            case 0x18: /* fildl */
4233
            case 0x1a: /* fistl */
4234
            case 0x1b: /* fistpl */
4235
            case 0x28: /* fldl */
4236
            case 0x2a: /* fstl */
4237
            case 0x2b: /* fstpl */
4238
            case 0x38: /* filds */
4239
            case 0x3a: /* fists */
4240
            case 0x3b: /* fistps */
4241
                
4242
                switch(op & 7) {
4243
                case 0:
4244
                    switch(op >> 4) {
4245
                    case 0:
4246
                        gen_op_flds_ST0_A0();
4247
                        break;
4248
                    case 1:
4249
                        gen_op_fildl_ST0_A0();
4250
                        break;
4251
                    case 2:
4252
                        gen_op_fldl_ST0_A0();
4253
                        break;
4254
                    case 3:
4255
                    default:
4256
                        gen_op_fild_ST0_A0();
4257
                        break;
4258
                    }
4259
                    break;
4260
                default:
4261
                    switch(op >> 4) {
4262
                    case 0:
4263
                        gen_op_fsts_ST0_A0();
4264
                        break;
4265
                    case 1:
4266
                        gen_op_fistl_ST0_A0();
4267
                        break;
4268
                    case 2:
4269
                        gen_op_fstl_ST0_A0();
4270
                        break;
4271
                    case 3:
4272
                    default:
4273
                        gen_op_fist_ST0_A0();
4274
                        break;
4275
                    }
4276
                    if ((op & 7) == 3)
4277
                        gen_op_fpop();
4278
                    break;
4279
                }
4280
                break;
4281
            case 0x0c: /* fldenv mem */
4282
                gen_op_fldenv_A0(s->dflag);
4283
                break;
4284
            case 0x0d: /* fldcw mem */
4285
                gen_op_fldcw_A0();
4286
                break;
4287
            case 0x0e: /* fnstenv mem */
4288
                gen_op_fnstenv_A0(s->dflag);
4289
                break;
4290
            case 0x0f: /* fnstcw mem */
4291
                gen_op_fnstcw_A0();
4292
                break;
4293
            case 0x1d: /* fldt mem */
4294
                gen_op_fldt_ST0_A0();
4295
                break;
4296
            case 0x1f: /* fstpt mem */
4297
                gen_op_fstt_ST0_A0();
4298
                gen_op_fpop();
4299
                break;
4300
            case 0x2c: /* frstor mem */
4301
                gen_op_frstor_A0(s->dflag);
4302
                break;
4303
            case 0x2e: /* fnsave mem */
4304
                gen_op_fnsave_A0(s->dflag);
4305
                break;
4306
            case 0x2f: /* fnstsw mem */
4307
                gen_op_fnstsw_A0();
4308
                break;
4309
            case 0x3c: /* fbld */
4310
                gen_op_fbld_ST0_A0();
4311
                break;
4312
            case 0x3e: /* fbstp */
4313
                gen_op_fbst_ST0_A0();
4314
                gen_op_fpop();
4315
                break;
4316
            case 0x3d: /* fildll */
4317
                gen_op_fildll_ST0_A0();
4318
                break;
4319
            case 0x3f: /* fistpll */
4320
                gen_op_fistll_ST0_A0();
4321
                gen_op_fpop();
4322
                break;
4323
            default:
4324
                goto illegal_op;
4325
            }
4326
        } else {
4327
            /* register float ops */
4328
            opreg = rm;
4329

    
4330
            switch(op) {
4331
            case 0x08: /* fld sti */
4332
                gen_op_fpush();
4333
                gen_op_fmov_ST0_STN((opreg + 1) & 7);
4334
                break;
4335
            case 0x09: /* fxchg sti */
4336
            case 0x29: /* fxchg4 sti, undocumented op */
4337
            case 0x39: /* fxchg7 sti, undocumented op */
4338
                gen_op_fxchg_ST0_STN(opreg);
4339
                break;
4340
            case 0x0a: /* grp d9/2 */
4341
                switch(rm) {
4342
                case 0: /* fnop */
4343
                    /* check exceptions (FreeBSD FPU probe) */
4344
                    if (s->cc_op != CC_OP_DYNAMIC)
4345
                        gen_op_set_cc_op(s->cc_op);
4346
                    gen_jmp_im(pc_start - s->cs_base);
4347
                    gen_op_fwait();
4348
                    break;
4349
                default:
4350
                    goto illegal_op;
4351
                }
4352
                break;
4353
            case 0x0c: /* grp d9/4 */
4354
                switch(rm) {
4355
                case 0: /* fchs */
4356
                    gen_op_fchs_ST0();
4357
                    break;
4358
                case 1: /* fabs */
4359
                    gen_op_fabs_ST0();
4360
                    break;
4361
                case 4: /* ftst */
4362
                    gen_op_fldz_FT0();
4363
                    gen_op_fcom_ST0_FT0();
4364
                    break;
4365
                case 5: /* fxam */
4366
                    gen_op_fxam_ST0();
4367
                    break;
4368
                default:
4369
                    goto illegal_op;
4370
                }
4371
                break;
4372
            case 0x0d: /* grp d9/5 */
4373
                {
4374
                    switch(rm) {
4375
                    case 0:
4376
                        gen_op_fpush();
4377
                        gen_op_fld1_ST0();
4378
                        break;
4379
                    case 1:
4380
                        gen_op_fpush();
4381
                        gen_op_fldl2t_ST0();
4382
                        break;
4383
                    case 2:
4384
                        gen_op_fpush();
4385
                        gen_op_fldl2e_ST0();
4386
                        break;
4387
                    case 3:
4388
                        gen_op_fpush();
4389
                        gen_op_fldpi_ST0();
4390
                        break;
4391
                    case 4:
4392
                        gen_op_fpush();
4393
                        gen_op_fldlg2_ST0();
4394
                        break;
4395
                    case 5:
4396
                        gen_op_fpush();
4397
                        gen_op_fldln2_ST0();
4398
                        break;
4399
                    case 6:
4400
                        gen_op_fpush();
4401
                        gen_op_fldz_ST0();
4402
                        break;
4403
                    default:
4404
                        goto illegal_op;
4405
                    }
4406
                }
4407
                break;
4408
            case 0x0e: /* grp d9/6 */
4409
                switch(rm) {
4410
                case 0: /* f2xm1 */
4411
                    gen_op_f2xm1();
4412
                    break;
4413
                case 1: /* fyl2x */
4414
                    gen_op_fyl2x();
4415
                    break;
4416
                case 2: /* fptan */
4417
                    gen_op_fptan();
4418
                    break;
4419
                case 3: /* fpatan */
4420
                    gen_op_fpatan();
4421
                    break;
4422
                case 4: /* fxtract */
4423
                    gen_op_fxtract();
4424
                    break;
4425
                case 5: /* fprem1 */
4426
                    gen_op_fprem1();
4427
                    break;
4428
                case 6: /* fdecstp */
4429
                    gen_op_fdecstp();
4430
                    break;
4431
                default:
4432
                case 7: /* fincstp */
4433
                    gen_op_fincstp();
4434
                    break;
4435
                }
4436
                break;
4437
            case 0x0f: /* grp d9/7 */
4438
                switch(rm) {
4439
                case 0: /* fprem */
4440
                    gen_op_fprem();
4441
                    break;
4442
                case 1: /* fyl2xp1 */
4443
                    gen_op_fyl2xp1();
4444
                    break;
4445
                case 2: /* fsqrt */
4446
                    gen_op_fsqrt();
4447
                    break;
4448
                case 3: /* fsincos */
4449
                    gen_op_fsincos();
4450
                    break;
4451
                case 5: /* fscale */
4452
                    gen_op_fscale();
4453
                    break;
4454
                case 4: /* frndint */
4455
                    gen_op_frndint();
4456
                    break;
4457
                case 6: /* fsin */
4458
                    gen_op_fsin();
4459
                    break;
4460
                default:
4461
                case 7: /* fcos */
4462
                    gen_op_fcos();
4463
                    break;
4464
                }
4465
                break;
4466
            case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4467
            case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4468
            case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4469
                {
4470
                    int op1;
4471
                    
4472
                    op1 = op & 7;
4473
                    if (op >= 0x20) {
4474
                        gen_op_fp_arith_STN_ST0[op1](opreg);
4475
                        if (op >= 0x30)
4476
                            gen_op_fpop();
4477
                    } else {
4478
                        gen_op_fmov_FT0_STN(opreg);
4479
                        gen_op_fp_arith_ST0_FT0[op1]();
4480
                    }
4481
                }
4482
                break;
4483
            case 0x02: /* fcom */
4484
            case 0x22: /* fcom2, undocumented op */
4485
                gen_op_fmov_FT0_STN(opreg);
4486
                gen_op_fcom_ST0_FT0();
4487
                break;
4488
            case 0x03: /* fcomp */
4489
            case 0x23: /* fcomp3, undocumented op */
4490
            case 0x32: /* fcomp5, undocumented op */
4491
                gen_op_fmov_FT0_STN(opreg);
4492
                gen_op_fcom_ST0_FT0();
4493
                gen_op_fpop();
4494
                break;
4495
            case 0x15: /* da/5 */
4496
                switch(rm) {
4497
                case 1: /* fucompp */
4498
                    gen_op_fmov_FT0_STN(1);
4499
                    gen_op_fucom_ST0_FT0();
4500
                    gen_op_fpop();
4501
                    gen_op_fpop();
4502
                    break;
4503
                default:
4504
                    goto illegal_op;
4505
                }
4506
                break;
4507
            case 0x1c:
4508
                switch(rm) {
4509
                case 0: /* feni (287 only, just do nop here) */
4510
                    break;
4511
                case 1: /* fdisi (287 only, just do nop here) */
4512
                    break;
4513
                case 2: /* fclex */
4514
                    gen_op_fclex();
4515
                    break;
4516
                case 3: /* fninit */
4517
                    gen_op_fninit();
4518
                    break;
4519
                case 4: /* fsetpm (287 only, just do nop here) */
4520
                    break;
4521
                default:
4522
                    goto illegal_op;
4523
                }
4524
                break;
4525
            case 0x1d: /* fucomi */
4526
                if (s->cc_op != CC_OP_DYNAMIC)
4527
                    gen_op_set_cc_op(s->cc_op);
4528
                gen_op_fmov_FT0_STN(opreg);
4529
                gen_op_fucomi_ST0_FT0();
4530
                s->cc_op = CC_OP_EFLAGS;
4531
                break;
4532
            case 0x1e: /* fcomi */
4533
                if (s->cc_op != CC_OP_DYNAMIC)
4534
                    gen_op_set_cc_op(s->cc_op);
4535
                gen_op_fmov_FT0_STN(opreg);
4536
                gen_op_fcomi_ST0_FT0();
4537
                s->cc_op = CC_OP_EFLAGS;
4538
                break;
4539
            case 0x28: /* ffree sti */
4540
                gen_op_ffree_STN(opreg);
4541
                break; 
4542
            case 0x2a: /* fst sti */
4543
                gen_op_fmov_STN_ST0(opreg);
4544
                break;
4545
            case 0x2b: /* fstp sti */
4546
            case 0x0b: /* fstp1 sti, undocumented op */
4547
            case 0x3a: /* fstp8 sti, undocumented op */
4548
            case 0x3b: /* fstp9 sti, undocumented op */
4549
                gen_op_fmov_STN_ST0(opreg);
4550
                gen_op_fpop();
4551
                break;
4552
            case 0x2c: /* fucom st(i) */
4553
                gen_op_fmov_FT0_STN(opreg);
4554
                gen_op_fucom_ST0_FT0();
4555
                break;
4556
            case 0x2d: /* fucomp st(i) */
4557
                gen_op_fmov_FT0_STN(opreg);
4558
                gen_op_fucom_ST0_FT0();
4559
                gen_op_fpop();
4560
                break;
4561
            case 0x33: /* de/3 */
4562
                switch(rm) {
4563
                case 1: /* fcompp */
4564
                    gen_op_fmov_FT0_STN(1);
4565
                    gen_op_fcom_ST0_FT0();
4566
                    gen_op_fpop();
4567
                    gen_op_fpop();
4568
                    break;
4569
                default:
4570
                    goto illegal_op;
4571
                }
4572
                break;
4573
            case 0x38: /* ffreep sti, undocumented op */
4574
                gen_op_ffree_STN(opreg);
4575
                gen_op_fpop();
4576
                break;
4577
            case 0x3c: /* df/4 */
4578
                switch(rm) {
4579
                case 0:
4580
                    gen_op_fnstsw_EAX();
4581
                    break;
4582
                default:
4583
                    goto illegal_op;
4584
                }
4585
                break;
4586
            case 0x3d: /* fucomip */
4587
                if (s->cc_op != CC_OP_DYNAMIC)
4588
                    gen_op_set_cc_op(s->cc_op);
4589
                gen_op_fmov_FT0_STN(opreg);
4590
                gen_op_fucomi_ST0_FT0();
4591
                gen_op_fpop();
4592
                s->cc_op = CC_OP_EFLAGS;
4593
                break;
4594
            case 0x3e: /* fcomip */
4595
                if (s->cc_op != CC_OP_DYNAMIC)
4596
                    gen_op_set_cc_op(s->cc_op);
4597
                gen_op_fmov_FT0_STN(opreg);
4598
                gen_op_fcomi_ST0_FT0();
4599
                gen_op_fpop();
4600
                s->cc_op = CC_OP_EFLAGS;
4601
                break;
4602
            case 0x10 ... 0x13: /* fcmovxx */
4603
            case 0x18 ... 0x1b:
4604
                {
4605
                    int op1;
4606
                    const static uint8_t fcmov_cc[8] = {
4607
                        (JCC_B << 1),
4608
                        (JCC_Z << 1),
4609
                        (JCC_BE << 1),
4610
                        (JCC_P << 1),
4611
                    };
4612
                    op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
4613
                    gen_setcc(s, op1);
4614
                    gen_op_fcmov_ST0_STN_T0(opreg);
4615
                }
4616
                break;
4617
            default:
4618
                goto illegal_op;
4619
            }
4620
        }
4621
#ifdef USE_CODE_COPY
4622
        s->tb->cflags |= CF_TB_FP_USED;
4623
#endif
4624
        break;
4625
        /************************/
4626
        /* string ops */
4627

    
4628
    case 0xa4: /* movsS */
4629
    case 0xa5:
4630
        if ((b & 1) == 0)
4631
            ot = OT_BYTE;
4632
        else
4633
            ot = dflag + OT_WORD;
4634

    
4635
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4636
            gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4637
        } else {
4638
            gen_movs(s, ot);
4639
        }
4640
        break;
4641
        
4642
    case 0xaa: /* stosS */
4643
    case 0xab:
4644
        if ((b & 1) == 0)
4645
            ot = OT_BYTE;
4646
        else
4647
            ot = dflag + OT_WORD;
4648

    
4649
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4650
            gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4651
        } else {
4652
            gen_stos(s, ot);
4653
        }
4654
        break;
4655
    case 0xac: /* lodsS */
4656
    case 0xad:
4657
        if ((b & 1) == 0)
4658
            ot = OT_BYTE;
4659
        else
4660
            ot = dflag + OT_WORD;
4661
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4662
            gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4663
        } else {
4664
            gen_lods(s, ot);
4665
        }
4666
        break;
4667
    case 0xae: /* scasS */
4668
    case 0xaf:
4669
        if ((b & 1) == 0)
4670
            ot = OT_BYTE;
4671
        else
4672
            ot = dflag + OT_WORD;
4673
        if (prefixes & PREFIX_REPNZ) {
4674
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4675
        } else if (prefixes & PREFIX_REPZ) {
4676
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4677
        } else {
4678
            gen_scas(s, ot);
4679
            s->cc_op = CC_OP_SUBB + ot;
4680
        }
4681
        break;
4682

    
4683
    case 0xa6: /* cmpsS */
4684
    case 0xa7:
4685
        if ((b & 1) == 0)
4686
            ot = OT_BYTE;
4687
        else
4688
            ot = dflag + OT_WORD;
4689
        if (prefixes & PREFIX_REPNZ) {
4690
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4691
        } else if (prefixes & PREFIX_REPZ) {
4692
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4693
        } else {
4694
            gen_cmps(s, ot);
4695
            s->cc_op = CC_OP_SUBB + ot;
4696
        }
4697
        break;
4698
    case 0x6c: /* insS */
4699
    case 0x6d:
4700
        if ((b & 1) == 0)
4701
            ot = OT_BYTE;
4702
        else
4703
            ot = dflag ? OT_LONG : OT_WORD;
4704
        gen_check_io(s, ot, 1, pc_start - s->cs_base);
4705
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4706
            gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4707
        } else {
4708
            gen_ins(s, ot);
4709
        }
4710
        break;
4711
    case 0x6e: /* outsS */
4712
    case 0x6f:
4713
        if ((b & 1) == 0)
4714
            ot = OT_BYTE;
4715
        else
4716
            ot = dflag ? OT_LONG : OT_WORD;
4717
        gen_check_io(s, ot, 1, pc_start - s->cs_base);
4718
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4719
            gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4720
        } else {
4721
            gen_outs(s, ot);
4722
        }
4723
        break;
4724

    
4725
        /************************/
4726
        /* port I/O */
4727
    case 0xe4:
4728
    case 0xe5:
4729
        if ((b & 1) == 0)
4730
            ot = OT_BYTE;
4731
        else
4732
            ot = dflag ? OT_LONG : OT_WORD;
4733
        val = ldub_code(s->pc++);
4734
        gen_op_movl_T0_im(val);
4735
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
4736
        gen_op_in[ot]();
4737
        gen_op_mov_reg_T1[ot][R_EAX]();
4738
        break;
4739
    case 0xe6:
4740
    case 0xe7:
4741
        if ((b & 1) == 0)
4742
            ot = OT_BYTE;
4743
        else
4744
            ot = dflag ? OT_LONG : OT_WORD;
4745
        val = ldub_code(s->pc++);
4746
        gen_op_movl_T0_im(val);
4747
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
4748
        gen_op_mov_TN_reg[ot][1][R_EAX]();
4749
        gen_op_out[ot]();
4750
        break;
4751
    case 0xec:
4752
    case 0xed:
4753
        if ((b & 1) == 0)
4754
            ot = OT_BYTE;
4755
        else
4756
            ot = dflag ? OT_LONG : OT_WORD;
4757
        gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4758
        gen_op_andl_T0_ffff();
4759
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
4760
        gen_op_in[ot]();
4761
        gen_op_mov_reg_T1[ot][R_EAX]();
4762
        break;
4763
    case 0xee:
4764
    case 0xef:
4765
        if ((b & 1) == 0)
4766
            ot = OT_BYTE;
4767
        else
4768
            ot = dflag ? OT_LONG : OT_WORD;
4769
        gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4770
        gen_op_andl_T0_ffff();
4771
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
4772
        gen_op_mov_TN_reg[ot][1][R_EAX]();
4773
        gen_op_out[ot]();
4774
        break;
4775

    
4776
        /************************/
4777
        /* control */
4778
    case 0xc2: /* ret im */
4779
        val = ldsw_code(s->pc);
4780
        s->pc += 2;
4781
        gen_pop_T0(s);
4782
        gen_stack_update(s, val + (2 << s->dflag));
4783
        if (s->dflag == 0)
4784
            gen_op_andl_T0_ffff();
4785
        gen_op_jmp_T0();
4786
        gen_eob(s);
4787
        break;
4788
    case 0xc3: /* ret */
4789
        gen_pop_T0(s);
4790
        gen_pop_update(s);
4791
        if (s->dflag == 0)
4792
            gen_op_andl_T0_ffff();
4793
        gen_op_jmp_T0();
4794
        gen_eob(s);
4795
        break;
4796
    case 0xca: /* lret im */
4797
        val = ldsw_code(s->pc);
4798
        s->pc += 2;
4799
    do_lret:
4800
        if (s->pe && !s->vm86) {
4801
            if (s->cc_op != CC_OP_DYNAMIC)
4802
                gen_op_set_cc_op(s->cc_op);
4803
            gen_jmp_im(pc_start - s->cs_base);
4804
            gen_op_lret_protected(s->dflag, val);
4805
        } else {
4806
            gen_stack_A0(s);
4807
            /* pop offset */
4808
            gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4809
            if (s->dflag == 0)
4810
                gen_op_andl_T0_ffff();
4811
            /* NOTE: keeping EIP updated is not a problem in case of
4812
               exception */
4813
            gen_op_jmp_T0();
4814
            /* pop selector */
4815
            gen_op_addl_A0_im(2 << s->dflag);
4816
            gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4817
            gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
4818
            /* add stack offset */
4819
            gen_stack_update(s, val + (4 << s->dflag));
4820
        }
4821
        gen_eob(s);
4822
        break;
4823
    case 0xcb: /* lret */
4824
        val = 0;
4825
        goto do_lret;
4826
    case 0xcf: /* iret */
4827
        if (!s->pe) {
4828
            /* real mode */
4829
            gen_op_iret_real(s->dflag);
4830
            s->cc_op = CC_OP_EFLAGS;
4831
        } else if (s->vm86) {
4832
            if (s->iopl != 3) {
4833
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4834
            } else {
4835
                gen_op_iret_real(s->dflag);
4836
                s->cc_op = CC_OP_EFLAGS;
4837
            }
4838
        } else {
4839
            if (s->cc_op != CC_OP_DYNAMIC)
4840
                gen_op_set_cc_op(s->cc_op);
4841
            gen_jmp_im(pc_start - s->cs_base);
4842
            gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
4843
            s->cc_op = CC_OP_EFLAGS;
4844
        }
4845
        gen_eob(s);
4846
        break;
4847
    case 0xe8: /* call im */
4848
        {
4849
            if (dflag)
4850
                tval = (int32_t)insn_get(s, OT_LONG);
4851
            else
4852
                tval = (int16_t)insn_get(s, OT_WORD);
4853
            next_eip = s->pc - s->cs_base;
4854
            tval += next_eip;
4855
            if (s->dflag == 0)
4856
                tval &= 0xffff;
4857
            gen_movtl_T0_im(next_eip);
4858
            gen_push_T0(s);
4859
            gen_jmp(s, tval);
4860
        }
4861
        break;
4862
    case 0x9a: /* lcall im */
4863
        {
4864
            unsigned int selector, offset;
4865
            
4866
            if (CODE64(s))
4867
                goto illegal_op;
4868
            ot = dflag ? OT_LONG : OT_WORD;
4869
            offset = insn_get(s, ot);
4870
            selector = insn_get(s, OT_WORD);
4871
            
4872
            gen_op_movl_T0_im(selector);
4873
            gen_op_movl_T1_imu(offset);
4874
        }
4875
        goto do_lcall;
4876
    case 0xe9: /* jmp */
4877
        if (dflag)
4878
            tval = (int32_t)insn_get(s, OT_LONG);
4879
        else
4880
            tval = (int16_t)insn_get(s, OT_WORD);
4881
        tval += s->pc - s->cs_base;
4882
        if (s->dflag == 0)
4883
            tval &= 0xffff;
4884
        gen_jmp(s, tval);
4885
        break;
4886
    case 0xea: /* ljmp im */
4887
        {
4888
            unsigned int selector, offset;
4889

    
4890
            if (CODE64(s))
4891
                goto illegal_op;
4892
            ot = dflag ? OT_LONG : OT_WORD;
4893
            offset = insn_get(s, ot);
4894
            selector = insn_get(s, OT_WORD);
4895
            
4896
            gen_op_movl_T0_im(selector);
4897
            gen_op_movl_T1_imu(offset);
4898
        }
4899
        goto do_ljmp;
4900
    case 0xeb: /* jmp Jb */
4901
        tval = (int8_t)insn_get(s, OT_BYTE);
4902
        tval += s->pc - s->cs_base;
4903
        if (s->dflag == 0)
4904
            tval &= 0xffff;
4905
        gen_jmp(s, tval);
4906
        break;
4907
    case 0x70 ... 0x7f: /* jcc Jb */
4908
        tval = (int8_t)insn_get(s, OT_BYTE);
4909
        goto do_jcc;
4910
    case 0x180 ... 0x18f: /* jcc Jv */
4911
        if (dflag) {
4912
            tval = (int32_t)insn_get(s, OT_LONG);
4913
        } else {
4914
            tval = (int16_t)insn_get(s, OT_WORD); 
4915
        }
4916
    do_jcc:
4917
        next_eip = s->pc - s->cs_base;
4918
        tval += next_eip;
4919
        if (s->dflag == 0)
4920
            tval &= 0xffff;
4921
        gen_jcc(s, b, tval, next_eip);
4922
        break;
4923

    
4924
    case 0x190 ... 0x19f: /* setcc Gv */
4925
        modrm = ldub_code(s->pc++);
4926
        gen_setcc(s, b);
4927
        gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
4928
        break;
4929
    case 0x140 ... 0x14f: /* cmov Gv, Ev */
4930
        ot = dflag + OT_WORD;
4931
        modrm = ldub_code(s->pc++);
4932
        reg = ((modrm >> 3) & 7) | rex_r;
4933
        mod = (modrm >> 6) & 3;
4934
        gen_setcc(s, b);
4935
        if (mod != 3) {
4936
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4937
            gen_op_ld_T1_A0[ot + s->mem_index]();
4938
        } else {
4939
            rm = (modrm & 7) | REX_B(s);
4940
            gen_op_mov_TN_reg[ot][1][rm]();
4941
        }
4942
        gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
4943
        break;
4944
        
4945
        /************************/
4946
        /* flags */
4947
    case 0x9c: /* pushf */
4948
        if (s->vm86 && s->iopl != 3) {
4949
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4950
        } else {
4951
            if (s->cc_op != CC_OP_DYNAMIC)
4952
                gen_op_set_cc_op(s->cc_op);
4953
            gen_op_movl_T0_eflags();
4954
            gen_push_T0(s);
4955
        }
4956
        break;
4957
    case 0x9d: /* popf */
4958
        if (s->vm86 && s->iopl != 3) {
4959
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4960
        } else {
4961
            gen_pop_T0(s);
4962
            if (s->cpl == 0) {
4963
                if (s->dflag) {
4964
                    gen_op_movl_eflags_T0_cpl0();
4965
                } else {
4966
                    gen_op_movw_eflags_T0_cpl0();
4967
                }
4968
            } else {
4969
                if (s->cpl <= s->iopl) {
4970
                    if (s->dflag) {
4971
                        gen_op_movl_eflags_T0_io();
4972
                    } else {
4973
                        gen_op_movw_eflags_T0_io();
4974
                    }
4975
                } else {
4976
                    if (s->dflag) {
4977
                        gen_op_movl_eflags_T0();
4978
                    } else {
4979
                        gen_op_movw_eflags_T0();
4980
                    }
4981
                }
4982
            }
4983
            gen_pop_update(s);
4984
            s->cc_op = CC_OP_EFLAGS;
4985
            /* abort translation because TF flag may change */
4986
            gen_jmp_im(s->pc - s->cs_base);
4987
            gen_eob(s);
4988
        }
4989
        break;
4990
    case 0x9e: /* sahf */
4991
        if (CODE64(s))
4992
            goto illegal_op;
4993
        gen_op_mov_TN_reg[OT_BYTE][0][R_AH]();
4994
        if (s->cc_op != CC_OP_DYNAMIC)
4995
            gen_op_set_cc_op(s->cc_op);
4996
        gen_op_movb_eflags_T0();
4997
        s->cc_op = CC_OP_EFLAGS;
4998
        break;
4999
    case 0x9f: /* lahf */
5000
        if (CODE64(s))
5001
            goto illegal_op;
5002
        if (s->cc_op != CC_OP_DYNAMIC)
5003
            gen_op_set_cc_op(s->cc_op);
5004
        gen_op_movl_T0_eflags();
5005
        gen_op_mov_reg_T0[OT_BYTE][R_AH]();
5006
        break;
5007
    case 0xf5: /* cmc */
5008
        if (s->cc_op != CC_OP_DYNAMIC)
5009
            gen_op_set_cc_op(s->cc_op);
5010
        gen_op_cmc();
5011
        s->cc_op = CC_OP_EFLAGS;
5012
        break;
5013
    case 0xf8: /* clc */
5014
        if (s->cc_op != CC_OP_DYNAMIC)
5015
            gen_op_set_cc_op(s->cc_op);
5016
        gen_op_clc();
5017
        s->cc_op = CC_OP_EFLAGS;
5018
        break;
5019
    case 0xf9: /* stc */
5020
        if (s->cc_op != CC_OP_DYNAMIC)
5021
            gen_op_set_cc_op(s->cc_op);
5022
        gen_op_stc();
5023
        s->cc_op = CC_OP_EFLAGS;
5024
        break;
5025
    case 0xfc: /* cld */
5026
        gen_op_cld();
5027
        break;
5028
    case 0xfd: /* std */
5029
        gen_op_std();
5030
        break;
5031

    
5032
        /************************/
5033
        /* bit operations */
5034
    case 0x1ba: /* bt/bts/btr/btc Gv, im */
5035
        ot = dflag + OT_WORD;
5036
        modrm = ldub_code(s->pc++);
5037
        op = ((modrm >> 3) & 7) | rex_r;
5038
        mod = (modrm >> 6) & 3;
5039
        rm = (modrm & 7) | REX_B(s);
5040
        if (mod != 3) {
5041
            s->rip_offset = 1;
5042
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5043
            gen_op_ld_T0_A0[ot + s->mem_index]();
5044
        } else {
5045
            gen_op_mov_TN_reg[ot][0][rm]();
5046
        }
5047
        /* load shift */
5048
        val = ldub_code(s->pc++);
5049
        gen_op_movl_T1_im(val);
5050
        if (op < 4)
5051
            goto illegal_op;
5052
        op -= 4;
5053
        gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5054
        s->cc_op = CC_OP_SARB + ot;
5055
        if (op != 0) {
5056
            if (mod != 3)
5057
                gen_op_st_T0_A0[ot + s->mem_index]();
5058
            else
5059
                gen_op_mov_reg_T0[ot][rm]();
5060
            gen_op_update_bt_cc();
5061
        }
5062
        break;
5063
    case 0x1a3: /* bt Gv, Ev */
5064
        op = 0;
5065
        goto do_btx;
5066
    case 0x1ab: /* bts */
5067
        op = 1;
5068
        goto do_btx;
5069
    case 0x1b3: /* btr */
5070
        op = 2;
5071
        goto do_btx;
5072
    case 0x1bb: /* btc */
5073
        op = 3;
5074
    do_btx:
5075
        ot = dflag + OT_WORD;
5076
        modrm = ldub_code(s->pc++);
5077
        reg = ((modrm >> 3) & 7) | rex_r;
5078
        mod = (modrm >> 6) & 3;
5079
        rm = (modrm & 7) | REX_B(s);
5080
        gen_op_mov_TN_reg[OT_LONG][1][reg]();
5081
        if (mod != 3) {
5082
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5083
            /* specific case: we need to add a displacement */
5084
            gen_op_add_bit_A0_T1[ot - OT_WORD]();
5085
            gen_op_ld_T0_A0[ot + s->mem_index]();
5086
        } else {
5087
            gen_op_mov_TN_reg[ot][0][rm]();
5088
        }
5089
        gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5090
        s->cc_op = CC_OP_SARB + ot;
5091
        if (op != 0) {
5092
            if (mod != 3)
5093
                gen_op_st_T0_A0[ot + s->mem_index]();
5094
            else
5095
                gen_op_mov_reg_T0[ot][rm]();
5096
            gen_op_update_bt_cc();
5097
        }
5098
        break;
5099
    case 0x1bc: /* bsf */
5100
    case 0x1bd: /* bsr */
5101
        ot = dflag + OT_WORD;
5102
        modrm = ldub_code(s->pc++);
5103
        reg = ((modrm >> 3) & 7) | rex_r;
5104
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5105
        /* NOTE: in order to handle the 0 case, we must load the
5106
           result. It could be optimized with a generated jump */
5107
        gen_op_mov_TN_reg[ot][1][reg]();
5108
        gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5109
        gen_op_mov_reg_T1[ot][reg]();
5110
        s->cc_op = CC_OP_LOGICB + ot;
5111
        break;
5112
        /************************/
5113
        /* bcd */
5114
    case 0x27: /* daa */
5115
        if (CODE64(s))
5116
            goto illegal_op;
5117
        if (s->cc_op != CC_OP_DYNAMIC)
5118
            gen_op_set_cc_op(s->cc_op);
5119
        gen_op_daa();
5120
        s->cc_op = CC_OP_EFLAGS;
5121
        break;
5122
    case 0x2f: /* das */
5123
        if (CODE64(s))
5124
            goto illegal_op;
5125
        if (s->cc_op != CC_OP_DYNAMIC)
5126
            gen_op_set_cc_op(s->cc_op);
5127
        gen_op_das();
5128
        s->cc_op = CC_OP_EFLAGS;
5129
        break;
5130
    case 0x37: /* aaa */
5131
        if (CODE64(s))
5132
            goto illegal_op;
5133
        if (s->cc_op != CC_OP_DYNAMIC)
5134
            gen_op_set_cc_op(s->cc_op);
5135
        gen_op_aaa();
5136
        s->cc_op = CC_OP_EFLAGS;
5137
        break;
5138
    case 0x3f: /* aas */
5139
        if (CODE64(s))
5140
            goto illegal_op;
5141
        if (s->cc_op != CC_OP_DYNAMIC)
5142
            gen_op_set_cc_op(s->cc_op);
5143
        gen_op_aas();
5144
        s->cc_op = CC_OP_EFLAGS;
5145
        break;
5146
    case 0xd4: /* aam */
5147
        if (CODE64(s))
5148
            goto illegal_op;
5149
        val = ldub_code(s->pc++);
5150
        gen_op_aam(val);
5151
        s->cc_op = CC_OP_LOGICB;
5152
        break;
5153
    case 0xd5: /* aad */
5154
        if (CODE64(s))
5155
            goto illegal_op;
5156
        val = ldub_code(s->pc++);
5157
        gen_op_aad(val);
5158
        s->cc_op = CC_OP_LOGICB;
5159
        break;
5160
        /************************/
5161
        /* misc */
5162
    case 0x90: /* nop */
5163
        /* XXX: xchg + rex handling */
5164
        /* XXX: correct lock test for all insn */
5165
        if (prefixes & PREFIX_LOCK)
5166
            goto illegal_op;
5167
        break;
5168
    case 0x9b: /* fwait */
5169
        if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) == 
5170
            (HF_MP_MASK | HF_TS_MASK)) {
5171
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5172
        } else {
5173
            if (s->cc_op != CC_OP_DYNAMIC)
5174
                gen_op_set_cc_op(s->cc_op);
5175
            gen_jmp_im(pc_start - s->cs_base);
5176
            gen_op_fwait();
5177
        }
5178
        break;
5179
    case 0xcc: /* int3 */
5180
        gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5181
        break;
5182
    case 0xcd: /* int N */
5183
        val = ldub_code(s->pc++);
5184
        if (s->vm86 && s->iopl != 3) {
5185
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base); 
5186
        } else {
5187
            gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5188
        }
5189
        break;
5190
    case 0xce: /* into */
5191
        if (CODE64(s))
5192
            goto illegal_op;
5193
        if (s->cc_op != CC_OP_DYNAMIC)
5194
            gen_op_set_cc_op(s->cc_op);
5195
        gen_jmp_im(pc_start - s->cs_base);
5196
        gen_op_into(s->pc - pc_start);
5197
        break;
5198
    case 0xf1: /* icebp (undocumented, exits to external debugger) */
5199
#if 1
5200
        gen_debug(s, pc_start - s->cs_base);
5201
#else
5202
        /* start debug */
5203
        tb_flush(cpu_single_env);
5204
        cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5205
#endif
5206
        break;
5207
    case 0xfa: /* cli */
5208
        if (!s->vm86) {
5209
            if (s->cpl <= s->iopl) {
5210
                gen_op_cli();
5211
            } else {
5212
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5213
            }
5214
        } else {
5215
            if (s->iopl == 3) {
5216
                gen_op_cli();
5217
            } else {
5218
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5219
            }
5220
        }
5221
        break;
5222
    case 0xfb: /* sti */
5223
        if (!s->vm86) {
5224
            if (s->cpl <= s->iopl) {
5225
            gen_sti:
5226
                gen_op_sti();
5227
                /* interruptions are enabled only the first insn after sti */
5228
                /* If several instructions disable interrupts, only the
5229
                   _first_ does it */
5230
                if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5231
                    gen_op_set_inhibit_irq();
5232
                /* give a chance to handle pending irqs */
5233
                gen_jmp_im(s->pc - s->cs_base);
5234
                gen_eob(s);
5235
            } else {
5236
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5237
            }
5238
        } else {
5239
            if (s->iopl == 3) {
5240
                goto gen_sti;
5241
            } else {
5242
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5243
            }
5244
        }
5245
        break;
5246
    case 0x62: /* bound */
5247
        if (CODE64(s))
5248
            goto illegal_op;
5249
        ot = dflag ? OT_LONG : OT_WORD;
5250
        modrm = ldub_code(s->pc++);
5251
        reg = (modrm >> 3) & 7;
5252
        mod = (modrm >> 6) & 3;
5253
        if (mod == 3)
5254
            goto illegal_op;
5255
        gen_op_mov_TN_reg[ot][0][reg]();
5256
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5257
        gen_jmp_im(pc_start - s->cs_base);
5258
        if (ot == OT_WORD)
5259
            gen_op_boundw();
5260
        else
5261
            gen_op_boundl();
5262
        break;
5263
    case 0x1c8 ... 0x1cf: /* bswap reg */
5264
        reg = (b & 7) | REX_B(s);
5265
#ifdef TARGET_X86_64
5266
        if (dflag == 2) {
5267
            gen_op_mov_TN_reg[OT_QUAD][0][reg]();
5268
            gen_op_bswapq_T0();
5269
            gen_op_mov_reg_T0[OT_QUAD][reg]();
5270
        } else 
5271
#endif
5272
        {
5273
            gen_op_mov_TN_reg[OT_LONG][0][reg]();
5274
            gen_op_bswapl_T0();
5275
            gen_op_mov_reg_T0[OT_LONG][reg]();
5276
        }
5277
        break;
5278
    case 0xd6: /* salc */
5279
        if (CODE64(s))
5280
            goto illegal_op;
5281
        if (s->cc_op != CC_OP_DYNAMIC)
5282
            gen_op_set_cc_op(s->cc_op);
5283
        gen_op_salc();
5284
        break;
5285
    case 0xe0: /* loopnz */
5286
    case 0xe1: /* loopz */
5287
        if (s->cc_op != CC_OP_DYNAMIC)
5288
            gen_op_set_cc_op(s->cc_op);
5289
        /* FALL THRU */
5290
    case 0xe2: /* loop */
5291
    case 0xe3: /* jecxz */
5292
        {
5293
            int l1, l2;
5294

    
5295
            tval = (int8_t)insn_get(s, OT_BYTE);
5296
            next_eip = s->pc - s->cs_base;
5297
            tval += next_eip;
5298
            if (s->dflag == 0)
5299
                tval &= 0xffff;
5300
            
5301
            l1 = gen_new_label();
5302
            l2 = gen_new_label();
5303
            b &= 3;
5304
            if (b == 3) {
5305
                gen_op_jz_ecx[s->aflag](l1);
5306
            } else {
5307
                gen_op_dec_ECX[s->aflag]();
5308
                if (b <= 1)
5309
                    gen_op_mov_T0_cc();
5310
                gen_op_loop[s->aflag][b](l1);
5311
            }
5312

    
5313
            gen_jmp_im(next_eip);
5314
            gen_op_jmp_label(l2);
5315
            gen_set_label(l1);
5316
            gen_jmp_im(tval);
5317
            gen_set_label(l2);
5318
            gen_eob(s);
5319
        }
5320
        break;
5321
    case 0x130: /* wrmsr */
5322
    case 0x132: /* rdmsr */
5323
        if (s->cpl != 0) {
5324
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5325
        } else {
5326
            if (b & 2)
5327
                gen_op_rdmsr();
5328
            else
5329
                gen_op_wrmsr();
5330
        }
5331
        break;
5332
    case 0x131: /* rdtsc */
5333
        gen_op_rdtsc();
5334
        break;
5335
    case 0x134: /* sysenter */
5336
        if (CODE64(s))
5337
            goto illegal_op;
5338
        if (!s->pe) {
5339
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5340
        } else {
5341
            if (s->cc_op != CC_OP_DYNAMIC) {
5342
                gen_op_set_cc_op(s->cc_op);
5343
                s->cc_op = CC_OP_DYNAMIC;
5344
            }
5345
            gen_jmp_im(pc_start - s->cs_base);
5346
            gen_op_sysenter();
5347
            gen_eob(s);
5348
        }
5349
        break;
5350
    case 0x135: /* sysexit */
5351
        if (CODE64(s))
5352
            goto illegal_op;
5353
        if (!s->pe) {
5354
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5355
        } else {
5356
            if (s->cc_op != CC_OP_DYNAMIC) {
5357
                gen_op_set_cc_op(s->cc_op);
5358
                s->cc_op = CC_OP_DYNAMIC;
5359
            }
5360
            gen_jmp_im(pc_start - s->cs_base);
5361
            gen_op_sysexit();
5362
            gen_eob(s);
5363
        }
5364
        break;
5365
#ifdef TARGET_X86_64
5366
    case 0x105: /* syscall */
5367
        /* XXX: is it usable in real mode ? */
5368
        if (s->cc_op != CC_OP_DYNAMIC) {
5369
            gen_op_set_cc_op(s->cc_op);
5370
            s->cc_op = CC_OP_DYNAMIC;
5371
        }
5372
        gen_jmp_im(pc_start - s->cs_base);
5373
        gen_op_syscall(s->pc - pc_start);
5374
        gen_eob(s);
5375
        break;
5376
    case 0x107: /* sysret */
5377
        if (!s->pe) {
5378
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5379
        } else {
5380
            if (s->cc_op != CC_OP_DYNAMIC) {
5381
                gen_op_set_cc_op(s->cc_op);
5382
                s->cc_op = CC_OP_DYNAMIC;
5383
            }
5384
            gen_jmp_im(pc_start - s->cs_base);
5385
            gen_op_sysret(s->dflag);
5386
            /* condition codes are modified only in long mode */
5387
            if (s->lma)
5388
                s->cc_op = CC_OP_EFLAGS;
5389
            gen_eob(s);
5390
        }
5391
        break;
5392
#endif
5393
    case 0x1a2: /* cpuid */
5394
        gen_op_cpuid();
5395
        break;
5396
    case 0xf4: /* hlt */
5397
        if (s->cpl != 0) {
5398
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5399
        } else {
5400
            if (s->cc_op != CC_OP_DYNAMIC)
5401
                gen_op_set_cc_op(s->cc_op);
5402
            gen_jmp_im(s->pc - s->cs_base);
5403
            gen_op_hlt();
5404
            s->is_jmp = 3;
5405
        }
5406
        break;
5407
    case 0x100:
5408
        modrm = ldub_code(s->pc++);
5409
        mod = (modrm >> 6) & 3;
5410
        op = (modrm >> 3) & 7;
5411
        switch(op) {
5412
        case 0: /* sldt */
5413
            if (!s->pe || s->vm86)
5414
                goto illegal_op;
5415
            gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5416
            ot = OT_WORD;
5417
            if (mod == 3)
5418
                ot += s->dflag;
5419
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5420
            break;
5421
        case 2: /* lldt */
5422
            if (!s->pe || s->vm86)
5423
                goto illegal_op;
5424
            if (s->cpl != 0) {
5425
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5426
            } else {
5427
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5428
                gen_jmp_im(pc_start - s->cs_base);
5429
                gen_op_lldt_T0();
5430
            }
5431
            break;
5432
        case 1: /* str */
5433
            if (!s->pe || s->vm86)
5434
                goto illegal_op;
5435
            gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5436
            ot = OT_WORD;
5437
            if (mod == 3)
5438
                ot += s->dflag;
5439
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5440
            break;
5441
        case 3: /* ltr */
5442
            if (!s->pe || s->vm86)
5443
                goto illegal_op;
5444
            if (s->cpl != 0) {
5445
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5446
            } else {
5447
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5448
                gen_jmp_im(pc_start - s->cs_base);
5449
                gen_op_ltr_T0();
5450
            }
5451
            break;
5452
        case 4: /* verr */
5453
        case 5: /* verw */
5454
            if (!s->pe || s->vm86)
5455
                goto illegal_op;
5456
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5457
            if (s->cc_op != CC_OP_DYNAMIC)
5458
                gen_op_set_cc_op(s->cc_op);
5459
            if (op == 4)
5460
                gen_op_verr();
5461
            else
5462
                gen_op_verw();
5463
            s->cc_op = CC_OP_EFLAGS;
5464
            break;
5465
        default:
5466
            goto illegal_op;
5467
        }
5468
        break;
5469
    case 0x101:
5470
        modrm = ldub_code(s->pc++);
5471
        mod = (modrm >> 6) & 3;
5472
        op = (modrm >> 3) & 7;
5473
        switch(op) {
5474
        case 0: /* sgdt */
5475
        case 1: /* sidt */
5476
            if (mod == 3)
5477
                goto illegal_op;
5478
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5479
            if (op == 0)
5480
                gen_op_movl_T0_env(offsetof(CPUX86State,gdt.limit));
5481
            else
5482
                gen_op_movl_T0_env(offsetof(CPUX86State,idt.limit));
5483
            gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5484
            gen_add_A0_im(s, 2);
5485
            if (op == 0)
5486
                gen_op_movtl_T0_env(offsetof(CPUX86State,gdt.base));
5487
            else
5488
                gen_op_movtl_T0_env(offsetof(CPUX86State,idt.base));
5489
            if (!s->dflag)
5490
                gen_op_andl_T0_im(0xffffff);
5491
            gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5492
            break;
5493
        case 2: /* lgdt */
5494
        case 3: /* lidt */
5495
            if (mod == 3)
5496
                goto illegal_op;
5497
            if (s->cpl != 0) {
5498
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5499
            } else {
5500
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5501
                gen_op_ld_T1_A0[OT_WORD + s->mem_index]();
5502
                gen_add_A0_im(s, 2);
5503
                gen_op_ld_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5504
                if (!s->dflag)
5505
                    gen_op_andl_T0_im(0xffffff);
5506
                if (op == 2) {
5507
                    gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
5508
                    gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
5509
                } else {
5510
                    gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
5511
                    gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
5512
                }
5513
            }
5514
            break;
5515
        case 4: /* smsw */
5516
            gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
5517
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
5518
            break;
5519
        case 6: /* lmsw */
5520
            if (s->cpl != 0) {
5521
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5522
            } else {
5523
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5524
                gen_op_lmsw_T0();
5525
                gen_jmp_im(s->pc - s->cs_base);
5526
                gen_eob(s);
5527
            }
5528
            break;
5529
        case 7: /* invlpg */
5530
            if (s->cpl != 0) {
5531
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5532
            } else {
5533
                if (mod == 3) {
5534
#ifdef TARGET_X86_64
5535
                    if (CODE64(s) && (modrm & 7) == 0) {
5536
                        /* swapgs */
5537
                        gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
5538
                        gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
5539
                        gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
5540
                        gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
5541
                    } else 
5542
#endif
5543
                    {
5544
                        goto illegal_op;
5545
                    }
5546
                } else {
5547
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5548
                    gen_op_invlpg_A0();
5549
                    gen_jmp_im(s->pc - s->cs_base);
5550
                    gen_eob(s);
5551
                }
5552
            }
5553
            break;
5554
        default:
5555
            goto illegal_op;
5556
        }
5557
        break;
5558
    case 0x108: /* invd */
5559
    case 0x109: /* wbinvd */
5560
        if (s->cpl != 0) {
5561
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5562
        } else {
5563
            /* nothing to do */
5564
        }
5565
        break;
5566
    case 0x63: /* arpl or movslS (x86_64) */
5567
#ifdef TARGET_X86_64
5568
        if (CODE64(s)) {
5569
            int d_ot;
5570
            /* d_ot is the size of destination */
5571
            d_ot = dflag + OT_WORD;
5572

    
5573
            modrm = ldub_code(s->pc++);
5574
            reg = ((modrm >> 3) & 7) | rex_r;
5575
            mod = (modrm >> 6) & 3;
5576
            rm = (modrm & 7) | REX_B(s);
5577
            
5578
            if (mod == 3) {
5579
                gen_op_mov_TN_reg[OT_LONG][0][rm]();
5580
                /* sign extend */
5581
                if (d_ot == OT_QUAD)
5582
                    gen_op_movslq_T0_T0();
5583
                gen_op_mov_reg_T0[d_ot][reg]();
5584
            } else {
5585
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5586
                if (d_ot == OT_QUAD) {
5587
                    gen_op_lds_T0_A0[OT_LONG + s->mem_index]();
5588
                } else {
5589
                    gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
5590
                }
5591
                gen_op_mov_reg_T0[d_ot][reg]();
5592
            }
5593
        } else 
5594
#endif
5595
        {
5596
            if (!s->pe || s->vm86)
5597
                goto illegal_op;
5598
            ot = dflag ? OT_LONG : OT_WORD;
5599
            modrm = ldub_code(s->pc++);
5600
            reg = (modrm >> 3) & 7;
5601
            mod = (modrm >> 6) & 3;
5602
            rm = modrm & 7;
5603
            if (mod != 3) {
5604
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5605
                gen_op_ld_T0_A0[ot + s->mem_index]();
5606
            } else {
5607
                gen_op_mov_TN_reg[ot][0][rm]();
5608
            }
5609
            if (s->cc_op != CC_OP_DYNAMIC)
5610
                gen_op_set_cc_op(s->cc_op);
5611
            gen_op_arpl();
5612
            s->cc_op = CC_OP_EFLAGS;
5613
            if (mod != 3) {
5614
                gen_op_st_T0_A0[ot + s->mem_index]();
5615
            } else {
5616
                gen_op_mov_reg_T0[ot][rm]();
5617
            }
5618
            gen_op_arpl_update();
5619
        }
5620
        break;
5621
    case 0x102: /* lar */
5622
    case 0x103: /* lsl */
5623
        if (!s->pe || s->vm86)
5624
            goto illegal_op;
5625
        ot = dflag ? OT_LONG : OT_WORD;
5626
        modrm = ldub_code(s->pc++);
5627
        reg = ((modrm >> 3) & 7) | rex_r;
5628
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5629
        gen_op_mov_TN_reg[ot][1][reg]();
5630
        if (s->cc_op != CC_OP_DYNAMIC)
5631
            gen_op_set_cc_op(s->cc_op);
5632
        if (b == 0x102)
5633
            gen_op_lar();
5634
        else
5635
            gen_op_lsl();
5636
        s->cc_op = CC_OP_EFLAGS;
5637
        gen_op_mov_reg_T1[ot][reg]();
5638
        break;
5639
    case 0x118:
5640
        modrm = ldub_code(s->pc++);
5641
        mod = (modrm >> 6) & 3;
5642
        op = (modrm >> 3) & 7;
5643
        switch(op) {
5644
        case 0: /* prefetchnta */
5645
        case 1: /* prefetchnt0 */
5646
        case 2: /* prefetchnt0 */
5647
        case 3: /* prefetchnt0 */
5648
            if (mod == 3)
5649
                goto illegal_op;
5650
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5651
            /* nothing more to do */
5652
            break;
5653
        default:
5654
            goto illegal_op;
5655
        }
5656
        break;
5657
    case 0x120: /* mov reg, crN */
5658
    case 0x122: /* mov crN, reg */
5659
        if (s->cpl != 0) {
5660
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5661
        } else {
5662
            modrm = ldub_code(s->pc++);
5663
            if ((modrm & 0xc0) != 0xc0)
5664
                goto illegal_op;
5665
            rm = (modrm & 7) | REX_B(s);
5666
            reg = ((modrm >> 3) & 7) | rex_r;
5667
            if (CODE64(s))
5668
                ot = OT_QUAD;
5669
            else
5670
                ot = OT_LONG;
5671
            switch(reg) {
5672
            case 0:
5673
            case 2:
5674
            case 3:
5675
            case 4:
5676
            case 8:
5677
                if (b & 2) {
5678
                    gen_op_mov_TN_reg[ot][0][rm]();
5679
                    gen_op_movl_crN_T0(reg);
5680
                    gen_jmp_im(s->pc - s->cs_base);
5681
                    gen_eob(s);
5682
                } else {
5683
#if !defined(CONFIG_USER_ONLY) 
5684
                    if (reg == 8)
5685
                        gen_op_movtl_T0_cr8();
5686
                    else
5687
#endif
5688
                        gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
5689
                    gen_op_mov_reg_T0[ot][rm]();
5690
                }
5691
                break;
5692
            default:
5693
                goto illegal_op;
5694
            }
5695
        }
5696
        break;
5697
    case 0x121: /* mov reg, drN */
5698
    case 0x123: /* mov drN, reg */
5699
        if (s->cpl != 0) {
5700
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5701
        } else {
5702
            modrm = ldub_code(s->pc++);
5703
            if ((modrm & 0xc0) != 0xc0)
5704
                goto illegal_op;
5705
            rm = (modrm & 7) | REX_B(s);
5706
            reg = ((modrm >> 3) & 7) | rex_r;
5707
            if (CODE64(s))
5708
                ot = OT_QUAD;
5709
            else
5710
                ot = OT_LONG;
5711
            /* XXX: do it dynamically with CR4.DE bit */
5712
            if (reg == 4 || reg == 5 || reg >= 8)
5713
                goto illegal_op;
5714
            if (b & 2) {
5715
                gen_op_mov_TN_reg[ot][0][rm]();
5716
                gen_op_movl_drN_T0(reg);
5717
                gen_jmp_im(s->pc - s->cs_base);
5718
                gen_eob(s);
5719
            } else {
5720
                gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
5721
                gen_op_mov_reg_T0[ot][rm]();
5722
            }
5723
        }
5724
        break;
5725
    case 0x106: /* clts */
5726
        if (s->cpl != 0) {
5727
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5728
        } else {
5729
            gen_op_clts();
5730
            /* abort block because static cpu state changed */
5731
            gen_jmp_im(s->pc - s->cs_base);
5732
            gen_eob(s);
5733
        }
5734
        break;
5735
    /* MMX/SSE/SSE2/PNI support */
5736
    case 0x1c3: /* MOVNTI reg, mem */
5737
        if (!(s->cpuid_features & CPUID_SSE2))
5738
            goto illegal_op;
5739
        ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
5740
        modrm = ldub_code(s->pc++);
5741
        mod = (modrm >> 6) & 3;
5742
        if (mod == 3)
5743
            goto illegal_op;
5744
        reg = ((modrm >> 3) & 7) | rex_r;
5745
        /* generate a generic store */
5746
        gen_ldst_modrm(s, modrm, ot, reg, 1);
5747
        break;
5748
    case 0x1ae:
5749
        modrm = ldub_code(s->pc++);
5750
        mod = (modrm >> 6) & 3;
5751
        op = (modrm >> 3) & 7;
5752
        switch(op) {
5753
        case 0: /* fxsave */
5754
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR))
5755
                goto illegal_op;
5756
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5757
            gen_op_fxsave_A0((s->dflag == 2));
5758
            break;
5759
        case 1: /* fxrstor */
5760
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR))
5761
                goto illegal_op;
5762
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5763
            gen_op_fxrstor_A0((s->dflag == 2));
5764
            break;
5765
        case 2: /* ldmxcsr */
5766
        case 3: /* stmxcsr */
5767
            if (s->flags & HF_TS_MASK) {
5768
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5769
                break;
5770
            }
5771
            if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
5772
                mod == 3)
5773
                goto illegal_op;
5774
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5775
            if (op == 2) {
5776
                gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
5777
                gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
5778
            } else {
5779
                gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
5780
                gen_op_st_T0_A0[OT_LONG + s->mem_index]();
5781
            }
5782
            break;
5783
        case 5: /* lfence */
5784
        case 6: /* mfence */
5785
        case 7: /* sfence */
5786
            if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
5787
                goto illegal_op;
5788
            break;
5789
        default:
5790
            goto illegal_op;
5791
        }
5792
        break;
5793
    case 0x110 ... 0x117:
5794
    case 0x128 ... 0x12f:
5795
    case 0x150 ... 0x177:
5796
    case 0x17c ... 0x17f:
5797
    case 0x1c2:
5798
    case 0x1c4 ... 0x1c6:
5799
    case 0x1d0 ... 0x1fe:
5800
        gen_sse(s, b, pc_start, rex_r);
5801
        break;
5802
    default:
5803
        goto illegal_op;
5804
    }
5805
    /* lock generation */
5806
    if (s->prefix & PREFIX_LOCK)
5807
        gen_op_unlock();
5808
    return s->pc;
5809
 illegal_op:
5810
    if (s->prefix & PREFIX_LOCK)
5811
        gen_op_unlock();
5812
    /* XXX: ensure that no lock was generated */
5813
    gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
5814
    return s->pc;
5815
}
5816

    
5817
#define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
5818
#define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
5819

    
5820
/* flags read by an operation */
5821
static uint16_t opc_read_flags[NB_OPS] = { 
5822
    [INDEX_op_aas] = CC_A,
5823
    [INDEX_op_aaa] = CC_A,
5824
    [INDEX_op_das] = CC_A | CC_C,
5825
    [INDEX_op_daa] = CC_A | CC_C,
5826

    
5827
    /* subtle: due to the incl/decl implementation, C is used */
5828
    [INDEX_op_update_inc_cc] = CC_C, 
5829

    
5830
    [INDEX_op_into] = CC_O,
5831

    
5832
    [INDEX_op_jb_subb] = CC_C,
5833
    [INDEX_op_jb_subw] = CC_C,
5834
    [INDEX_op_jb_subl] = CC_C,
5835

    
5836
    [INDEX_op_jz_subb] = CC_Z,
5837
    [INDEX_op_jz_subw] = CC_Z,
5838
    [INDEX_op_jz_subl] = CC_Z,
5839

    
5840
    [INDEX_op_jbe_subb] = CC_Z | CC_C,
5841
    [INDEX_op_jbe_subw] = CC_Z | CC_C,
5842
    [INDEX_op_jbe_subl] = CC_Z | CC_C,
5843

    
5844
    [INDEX_op_js_subb] = CC_S,
5845
    [INDEX_op_js_subw] = CC_S,
5846
    [INDEX_op_js_subl] = CC_S,
5847

    
5848
    [INDEX_op_jl_subb] = CC_O | CC_S,
5849
    [INDEX_op_jl_subw] = CC_O | CC_S,
5850
    [INDEX_op_jl_subl] = CC_O | CC_S,
5851

    
5852
    [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
5853
    [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
5854
    [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
5855

    
5856
    [INDEX_op_loopnzw] = CC_Z,
5857
    [INDEX_op_loopnzl] = CC_Z,
5858
    [INDEX_op_loopzw] = CC_Z,
5859
    [INDEX_op_loopzl] = CC_Z,
5860

    
5861
    [INDEX_op_seto_T0_cc] = CC_O,
5862
    [INDEX_op_setb_T0_cc] = CC_C,
5863
    [INDEX_op_setz_T0_cc] = CC_Z,
5864
    [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
5865
    [INDEX_op_sets_T0_cc] = CC_S,
5866
    [INDEX_op_setp_T0_cc] = CC_P,
5867
    [INDEX_op_setl_T0_cc] = CC_O | CC_S,
5868
    [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
5869

    
5870
    [INDEX_op_setb_T0_subb] = CC_C,
5871
    [INDEX_op_setb_T0_subw] = CC_C,
5872
    [INDEX_op_setb_T0_subl] = CC_C,
5873

    
5874
    [INDEX_op_setz_T0_subb] = CC_Z,
5875
    [INDEX_op_setz_T0_subw] = CC_Z,
5876
    [INDEX_op_setz_T0_subl] = CC_Z,
5877

    
5878
    [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
5879
    [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
5880
    [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
5881

    
5882
    [INDEX_op_sets_T0_subb] = CC_S,
5883
    [INDEX_op_sets_T0_subw] = CC_S,
5884
    [INDEX_op_sets_T0_subl] = CC_S,
5885

    
5886
    [INDEX_op_setl_T0_subb] = CC_O | CC_S,
5887
    [INDEX_op_setl_T0_subw] = CC_O | CC_S,
5888
    [INDEX_op_setl_T0_subl] = CC_O | CC_S,
5889

    
5890
    [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
5891
    [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
5892
    [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
5893

    
5894
    [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
5895
    [INDEX_op_cmc] = CC_C,
5896
    [INDEX_op_salc] = CC_C,
5897

    
5898
    /* needed for correct flag optimisation before string ops */
5899
    [INDEX_op_jnz_ecxw] = CC_OSZAPC,
5900
    [INDEX_op_jnz_ecxl] = CC_OSZAPC,
5901
    [INDEX_op_jz_ecxw] = CC_OSZAPC,
5902
    [INDEX_op_jz_ecxl] = CC_OSZAPC,
5903

    
5904
#ifdef TARGET_X86_64
5905
    [INDEX_op_jb_subq] = CC_C,
5906
    [INDEX_op_jz_subq] = CC_Z,
5907
    [INDEX_op_jbe_subq] = CC_Z | CC_C,
5908
    [INDEX_op_js_subq] = CC_S,
5909
    [INDEX_op_jl_subq] = CC_O | CC_S,
5910
    [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
5911

    
5912
    [INDEX_op_loopnzq] = CC_Z,
5913
    [INDEX_op_loopzq] = CC_Z,
5914

    
5915
    [INDEX_op_setb_T0_subq] = CC_C,
5916
    [INDEX_op_setz_T0_subq] = CC_Z,
5917
    [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
5918
    [INDEX_op_sets_T0_subq] = CC_S,
5919
    [INDEX_op_setl_T0_subq] = CC_O | CC_S,
5920
    [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
5921

    
5922
    [INDEX_op_jnz_ecxq] = CC_OSZAPC,
5923
    [INDEX_op_jz_ecxq] = CC_OSZAPC,
5924
#endif
5925

    
5926
#define DEF_READF(SUFFIX)\
5927
    [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5928
    [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5929
    [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5930
    X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5931
    [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5932
    [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5933
    [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5934
    X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5935
\
5936
    [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5937
    [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5938
    [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
5939
    X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5940
    [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5941
    [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5942
    [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5943
    X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
5944

    
5945
    DEF_READF( )
5946
    DEF_READF(_raw)
5947
#ifndef CONFIG_USER_ONLY
5948
    DEF_READF(_kernel)
5949
    DEF_READF(_user)
5950
#endif
5951
};
5952

    
5953
/* flags written by an operation */
5954
static uint16_t opc_write_flags[NB_OPS] = { 
5955
    [INDEX_op_update2_cc] = CC_OSZAPC,
5956
    [INDEX_op_update1_cc] = CC_OSZAPC,
5957
    [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
5958
    [INDEX_op_update_neg_cc] = CC_OSZAPC,
5959
    /* subtle: due to the incl/decl implementation, C is used */
5960
    [INDEX_op_update_inc_cc] = CC_OSZAPC, 
5961
    [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
5962

    
5963
    [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
5964
    [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
5965
    [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
5966
    X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
5967
    [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
5968
    [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
5969
    [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
5970
    X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
5971
    [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
5972
    [INDEX_op_imull_T0_T1] = CC_OSZAPC,
5973
    X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
5974

    
5975
    /* sse */
5976
    [INDEX_op_ucomiss] = CC_OSZAPC,
5977
    [INDEX_op_ucomisd] = CC_OSZAPC,
5978
    [INDEX_op_comiss] = CC_OSZAPC,
5979
    [INDEX_op_comisd] = CC_OSZAPC,
5980

    
5981
    /* bcd */
5982
    [INDEX_op_aam] = CC_OSZAPC,
5983
    [INDEX_op_aad] = CC_OSZAPC,
5984
    [INDEX_op_aas] = CC_OSZAPC,
5985
    [INDEX_op_aaa] = CC_OSZAPC,
5986
    [INDEX_op_das] = CC_OSZAPC,
5987
    [INDEX_op_daa] = CC_OSZAPC,
5988

    
5989
    [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
5990
    [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
5991
    [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
5992
    [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
5993
    [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
5994
    [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
5995
    [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
5996
    [INDEX_op_clc] = CC_C,
5997
    [INDEX_op_stc] = CC_C,
5998
    [INDEX_op_cmc] = CC_C,
5999

    
6000
    [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6001
    [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6002
    X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6003
    [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6004
    [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6005
    X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6006
    [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6007
    [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6008
    X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6009
    [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6010
    [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6011
    X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6012

    
6013
    [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6014
    [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6015
    X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6016
    [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6017
    [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6018
    X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6019

    
6020
    [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6021
    [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6022
    [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6023
    X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6024

    
6025
    [INDEX_op_cmpxchg8b] = CC_Z,
6026
    [INDEX_op_lar] = CC_Z,
6027
    [INDEX_op_lsl] = CC_Z,
6028
    [INDEX_op_verr] = CC_Z,
6029
    [INDEX_op_verw] = CC_Z,
6030
    [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6031
    [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6032

    
6033
#define DEF_WRITEF(SUFFIX)\
6034
    [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6035
    [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6036
    [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6037
    X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6038
    [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6039
    [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6040
    [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6041
    X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6042
\
6043
    [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6044
    [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6045
    [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6046
    X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6047
    [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6048
    [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6049
    [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6050
    X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6051
\
6052
    [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6053
    [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6054
    [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6055
    X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6056
    [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6057
    [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6058
    [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6059
    X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6060
\
6061
    [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6062
    [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6063
    [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6064
    X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6065
\
6066
    [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6067
    [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6068
    [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6069
    X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6070
\
6071
    [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6072
    [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6073
    [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6074
    X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6075
\
6076
    [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6077
    [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6078
    X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6079
    [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6080
    [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6081
    X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6082
\
6083
    [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6084
    [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6085
    X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6086
    [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6087
    [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6088
    X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6089
\
6090
    [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6091
    [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6092
    [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6093
    X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6094

    
6095

    
6096
    DEF_WRITEF( )
6097
    DEF_WRITEF(_raw)
6098
#ifndef CONFIG_USER_ONLY
6099
    DEF_WRITEF(_kernel)
6100
    DEF_WRITEF(_user)
6101
#endif
6102
};
6103

    
6104
/* simpler form of an operation if no flags need to be generated */
6105
static uint16_t opc_simpler[NB_OPS] = { 
6106
    [INDEX_op_update2_cc] = INDEX_op_nop,
6107
    [INDEX_op_update1_cc] = INDEX_op_nop,
6108
    [INDEX_op_update_neg_cc] = INDEX_op_nop,
6109
#if 0
6110
    /* broken: CC_OP logic must be rewritten */
6111
    [INDEX_op_update_inc_cc] = INDEX_op_nop,
6112
#endif
6113

    
6114
    [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6115
    [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6116
    [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6117
    X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6118

    
6119
    [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6120
    [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6121
    [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6122
    X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6123

    
6124
    [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6125
    [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6126
    [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6127
    X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6128

    
6129
#define DEF_SIMPLER(SUFFIX)\
6130
    [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6131
    [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6132
    [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6133
    X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6134
\
6135
    [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6136
    [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6137
    [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6138
    X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6139

    
6140
    DEF_SIMPLER( )
6141
    DEF_SIMPLER(_raw)
6142
#ifndef CONFIG_USER_ONLY
6143
    DEF_SIMPLER(_kernel)
6144
    DEF_SIMPLER(_user)
6145
#endif
6146
};
6147

    
6148
void optimize_flags_init(void)
6149
{
6150
    int i;
6151
    /* put default values in arrays */
6152
    for(i = 0; i < NB_OPS; i++) {
6153
        if (opc_simpler[i] == 0)
6154
            opc_simpler[i] = i;
6155
    }
6156
}
6157

    
6158
/* CPU flags computation optimization: we move backward thru the
6159
   generated code to see which flags are needed. The operation is
6160
   modified if suitable */
6161
static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6162
{
6163
    uint16_t *opc_ptr;
6164
    int live_flags, write_flags, op;
6165

    
6166
    opc_ptr = opc_buf + opc_buf_len;
6167
    /* live_flags contains the flags needed by the next instructions
6168
       in the code. At the end of the bloc, we consider that all the
6169
       flags are live. */
6170
    live_flags = CC_OSZAPC;
6171
    while (opc_ptr > opc_buf) {
6172
        op = *--opc_ptr;
6173
        /* if none of the flags written by the instruction is used,
6174
           then we can try to find a simpler instruction */
6175
        write_flags = opc_write_flags[op];
6176
        if ((live_flags & write_flags) == 0) {
6177
            *opc_ptr = opc_simpler[op];
6178
        }
6179
        /* compute the live flags before the instruction */
6180
        live_flags &= ~write_flags;
6181
        live_flags |= opc_read_flags[op];
6182
    }
6183
}
6184

    
6185
/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6186
   basic block 'tb'. If search_pc is TRUE, also generate PC
6187
   information for each intermediate instruction. */
6188
static inline int gen_intermediate_code_internal(CPUState *env,
6189
                                                 TranslationBlock *tb, 
6190
                                                 int search_pc)
6191
{
6192
    DisasContext dc1, *dc = &dc1;
6193
    target_ulong pc_ptr;
6194
    uint16_t *gen_opc_end;
6195
    int flags, j, lj, cflags;
6196
    target_ulong pc_start;
6197
    target_ulong cs_base;
6198
    
6199
    /* generate intermediate code */
6200
    pc_start = tb->pc;
6201
    cs_base = tb->cs_base;
6202
    flags = tb->flags;
6203
    cflags = tb->cflags;
6204

    
6205
    dc->pe = (flags >> HF_PE_SHIFT) & 1;
6206
    dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6207
    dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6208
    dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6209
    dc->f_st = 0;
6210
    dc->vm86 = (flags >> VM_SHIFT) & 1;
6211
    dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6212
    dc->iopl = (flags >> IOPL_SHIFT) & 3;
6213
    dc->tf = (flags >> TF_SHIFT) & 1;
6214
    dc->singlestep_enabled = env->singlestep_enabled;
6215
    dc->cc_op = CC_OP_DYNAMIC;
6216
    dc->cs_base = cs_base;
6217
    dc->tb = tb;
6218
    dc->popl_esp_hack = 0;
6219
    /* select memory access functions */
6220
    dc->mem_index = 0;
6221
    if (flags & HF_SOFTMMU_MASK) {
6222
        if (dc->cpl == 3)
6223
            dc->mem_index = 2 * 4;
6224
        else
6225
            dc->mem_index = 1 * 4;
6226
    }
6227
    dc->cpuid_features = env->cpuid_features;
6228
#ifdef TARGET_X86_64
6229
    dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6230
    dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6231
#endif
6232
    dc->flags = flags;
6233
    dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6234
                    (flags & HF_INHIBIT_IRQ_MASK)
6235
#ifndef CONFIG_SOFTMMU
6236
                    || (flags & HF_SOFTMMU_MASK)
6237
#endif
6238
                    );
6239
#if 0
6240
    /* check addseg logic */
6241
    if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6242
        printf("ERROR addseg\n");
6243
#endif
6244

    
6245
    gen_opc_ptr = gen_opc_buf;
6246
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6247
    gen_opparam_ptr = gen_opparam_buf;
6248
    nb_gen_labels = 0;
6249

    
6250
    dc->is_jmp = DISAS_NEXT;
6251
    pc_ptr = pc_start;
6252
    lj = -1;
6253

    
6254
    for(;;) {
6255
        if (env->nb_breakpoints > 0) {
6256
            for(j = 0; j < env->nb_breakpoints; j++) {
6257
                if (env->breakpoints[j] == pc_ptr) {
6258
                    gen_debug(dc, pc_ptr - dc->cs_base);
6259
                    break;
6260
                }
6261
            }
6262
        }
6263
        if (search_pc) {
6264
            j = gen_opc_ptr - gen_opc_buf;
6265
            if (lj < j) {
6266
                lj++;
6267
                while (lj < j)
6268
                    gen_opc_instr_start[lj++] = 0;
6269
            }
6270
            gen_opc_pc[lj] = pc_ptr;
6271
            gen_opc_cc_op[lj] = dc->cc_op;
6272
            gen_opc_instr_start[lj] = 1;
6273
        }
6274
        pc_ptr = disas_insn(dc, pc_ptr);
6275
        /* stop translation if indicated */
6276
        if (dc->is_jmp)
6277
            break;
6278
        /* if single step mode, we generate only one instruction and
6279
           generate an exception */
6280
        /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6281
           the flag and abort the translation to give the irqs a
6282
           change to be happen */
6283
        if (dc->tf || dc->singlestep_enabled || 
6284
            (flags & HF_INHIBIT_IRQ_MASK) ||
6285
            (cflags & CF_SINGLE_INSN)) {
6286
            gen_jmp_im(pc_ptr - dc->cs_base);
6287
            gen_eob(dc);
6288
            break;
6289
        }
6290
        /* if too long translation, stop generation too */
6291
        if (gen_opc_ptr >= gen_opc_end ||
6292
            (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6293
            gen_jmp_im(pc_ptr - dc->cs_base);
6294
            gen_eob(dc);
6295
            break;
6296
        }
6297
    }
6298
    *gen_opc_ptr = INDEX_op_end;
6299
    /* we don't forget to fill the last values */
6300
    if (search_pc) {
6301
        j = gen_opc_ptr - gen_opc_buf;
6302
        lj++;
6303
        while (lj <= j)
6304
            gen_opc_instr_start[lj++] = 0;
6305
    }
6306
        
6307
#ifdef DEBUG_DISAS
6308
    if (loglevel & CPU_LOG_TB_CPU) {
6309
        cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6310
    }
6311
    if (loglevel & CPU_LOG_TB_IN_ASM) {
6312
        int disas_flags;
6313
        fprintf(logfile, "----------------\n");
6314
        fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6315
#ifdef TARGET_X86_64
6316
        if (dc->code64)
6317
            disas_flags = 2;
6318
        else
6319
#endif
6320
            disas_flags = !dc->code32;
6321
        target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6322
        fprintf(logfile, "\n");
6323
        if (loglevel & CPU_LOG_TB_OP) {
6324
            fprintf(logfile, "OP:\n");
6325
            dump_ops(gen_opc_buf, gen_opparam_buf);
6326
            fprintf(logfile, "\n");
6327
        }
6328
    }
6329
#endif
6330

    
6331
    /* optimize flag computations */
6332
    optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6333

    
6334
#ifdef DEBUG_DISAS
6335
    if (loglevel & CPU_LOG_TB_OP_OPT) {
6336
        fprintf(logfile, "AFTER FLAGS OPT:\n");
6337
        dump_ops(gen_opc_buf, gen_opparam_buf);
6338
        fprintf(logfile, "\n");
6339
    }
6340
#endif
6341
    if (!search_pc)
6342
        tb->size = pc_ptr - pc_start;
6343
    return 0;
6344
}
6345

    
6346
int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6347
{
6348
    return gen_intermediate_code_internal(env, tb, 0);
6349
}
6350

    
6351
int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6352
{
6353
    return gen_intermediate_code_internal(env, tb, 1);
6354
}
6355