Statistics
| Branch: | Revision:

root / target-i386 / translate.c @ d52cf7a6

History | View | Annotate | Download (186.7 kB)

1
/*
2
 *  i386 translation
3
 * 
4
 *  Copyright (c) 2003 Fabrice Bellard
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20
#include <stdarg.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23
#include <string.h>
24
#include <inttypes.h>
25
#include <signal.h>
26
#include <assert.h>
27

    
28
#include "cpu.h"
29
#include "exec-all.h"
30
#include "disas.h"
31

    
32
/* XXX: move that elsewhere */
33
static uint16_t *gen_opc_ptr;
34
static uint32_t *gen_opparam_ptr;
35

    
36
#define PREFIX_REPZ   0x01
37
#define PREFIX_REPNZ  0x02
38
#define PREFIX_LOCK   0x04
39
#define PREFIX_DATA   0x08
40
#define PREFIX_ADR    0x10
41

    
42
#ifdef TARGET_X86_64
43
#define X86_64_ONLY(x) x
44
#define X86_64_DEF(x...) x
45
#define CODE64(s) ((s)->code64)
46
#define REX_X(s) ((s)->rex_x)
47
#define REX_B(s) ((s)->rex_b)
48
/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
49
#if 1
50
#define BUGGY_64(x) NULL
51
#endif
52
#else
53
#define X86_64_ONLY(x) NULL
54
#define X86_64_DEF(x...)
55
#define CODE64(s) 0
56
#define REX_X(s) 0
57
#define REX_B(s) 0
58
#endif
59

    
60
#ifdef TARGET_X86_64
61
static int x86_64_hregs;
62
#endif
63

    
64
#ifdef USE_DIRECT_JUMP
65
#define TBPARAM(x)
66
#else
67
#define TBPARAM(x) (long)(x)
68
#endif
69

    
70
typedef struct DisasContext {
71
    /* current insn context */
72
    int override; /* -1 if no override */
73
    int prefix;
74
    int aflag, dflag;
75
    target_ulong pc; /* pc = eip + cs_base */
76
    int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
77
                   static state change (stop translation) */
78
    /* current block context */
79
    target_ulong cs_base; /* base of CS segment */
80
    int pe;     /* protected mode */
81
    int code32; /* 32 bit code segment */
82
#ifdef TARGET_X86_64
83
    int lma;    /* long mode active */
84
    int code64; /* 64 bit code segment */
85
    int rex_x, rex_b;
86
#endif
87
    int ss32;   /* 32 bit stack segment */
88
    int cc_op;  /* current CC operation */
89
    int addseg; /* non zero if either DS/ES/SS have a non zero base */
90
    int f_st;   /* currently unused */
91
    int vm86;   /* vm86 mode */
92
    int cpl;
93
    int iopl;
94
    int tf;     /* TF cpu flag */
95
    int singlestep_enabled; /* "hardware" single step enabled */
96
    int jmp_opt; /* use direct block chaining for direct jumps */
97
    int mem_index; /* select memory access functions */
98
    int flags; /* all execution flags */
99
    struct TranslationBlock *tb;
100
    int popl_esp_hack; /* for correct popl with esp base handling */
101
    int rip_offset; /* only used in x86_64, but left for simplicity */
102
    int cpuid_features;
103
} DisasContext;
104

    
105
static void gen_eob(DisasContext *s);
106
static void gen_jmp(DisasContext *s, target_ulong eip);
107
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
108

    
109
/* i386 arith/logic operations */
110
enum {
111
    OP_ADDL, 
112
    OP_ORL, 
113
    OP_ADCL, 
114
    OP_SBBL,
115
    OP_ANDL, 
116
    OP_SUBL, 
117
    OP_XORL, 
118
    OP_CMPL,
119
};
120

    
121
/* i386 shift ops */
122
enum {
123
    OP_ROL, 
124
    OP_ROR, 
125
    OP_RCL, 
126
    OP_RCR, 
127
    OP_SHL, 
128
    OP_SHR, 
129
    OP_SHL1, /* undocumented */
130
    OP_SAR = 7,
131
};
132

    
133
enum {
134
#define DEF(s, n, copy_size) INDEX_op_ ## s,
135
#include "opc.h"
136
#undef DEF
137
    NB_OPS,
138
};
139

    
140
#include "gen-op.h"
141

    
142
/* operand size */
143
enum {
144
    OT_BYTE = 0,
145
    OT_WORD,
146
    OT_LONG, 
147
    OT_QUAD,
148
};
149

    
150
enum {
151
    /* I386 int registers */
152
    OR_EAX,   /* MUST be even numbered */
153
    OR_ECX,
154
    OR_EDX,
155
    OR_EBX,
156
    OR_ESP,
157
    OR_EBP,
158
    OR_ESI,
159
    OR_EDI,
160

    
161
    OR_TMP0 = 16,    /* temporary operand register */
162
    OR_TMP1,
163
    OR_A0, /* temporary register used when doing address evaluation */
164
};
165

    
166
#ifdef TARGET_X86_64
167

    
168
#define NB_OP_SIZES 4
169

    
170
#define DEF_REGS(prefix, suffix) \
171
  prefix ## EAX ## suffix,\
172
  prefix ## ECX ## suffix,\
173
  prefix ## EDX ## suffix,\
174
  prefix ## EBX ## suffix,\
175
  prefix ## ESP ## suffix,\
176
  prefix ## EBP ## suffix,\
177
  prefix ## ESI ## suffix,\
178
  prefix ## EDI ## suffix,\
179
  prefix ## R8 ## suffix,\
180
  prefix ## R9 ## suffix,\
181
  prefix ## R10 ## suffix,\
182
  prefix ## R11 ## suffix,\
183
  prefix ## R12 ## suffix,\
184
  prefix ## R13 ## suffix,\
185
  prefix ## R14 ## suffix,\
186
  prefix ## R15 ## suffix,
187

    
188
#define DEF_BREGS(prefixb, prefixh, suffix)             \
189
                                                        \
190
static void prefixb ## ESP ## suffix ## _wrapper(void)  \
191
{                                                       \
192
    if (x86_64_hregs)                                 \
193
        prefixb ## ESP ## suffix ();                    \
194
    else                                                \
195
        prefixh ## EAX ## suffix ();                    \
196
}                                                       \
197
                                                        \
198
static void prefixb ## EBP ## suffix ## _wrapper(void)  \
199
{                                                       \
200
    if (x86_64_hregs)                                 \
201
        prefixb ## EBP ## suffix ();                    \
202
    else                                                \
203
        prefixh ## ECX ## suffix ();                    \
204
}                                                       \
205
                                                        \
206
static void prefixb ## ESI ## suffix ## _wrapper(void)  \
207
{                                                       \
208
    if (x86_64_hregs)                                 \
209
        prefixb ## ESI ## suffix ();                    \
210
    else                                                \
211
        prefixh ## EDX ## suffix ();                    \
212
}                                                       \
213
                                                        \
214
static void prefixb ## EDI ## suffix ## _wrapper(void)  \
215
{                                                       \
216
    if (x86_64_hregs)                                 \
217
        prefixb ## EDI ## suffix ();                    \
218
    else                                                \
219
        prefixh ## EBX ## suffix ();                    \
220
}
221

    
222
DEF_BREGS(gen_op_movb_, gen_op_movh_, _T0)
223
DEF_BREGS(gen_op_movb_, gen_op_movh_, _T1)
224
DEF_BREGS(gen_op_movl_T0_, gen_op_movh_T0_, )
225
DEF_BREGS(gen_op_movl_T1_, gen_op_movh_T1_, )
226

    
227
#else /* !TARGET_X86_64 */
228

    
229
#define NB_OP_SIZES 3
230

    
231
#define DEF_REGS(prefix, suffix) \
232
  prefix ## EAX ## suffix,\
233
  prefix ## ECX ## suffix,\
234
  prefix ## EDX ## suffix,\
235
  prefix ## EBX ## suffix,\
236
  prefix ## ESP ## suffix,\
237
  prefix ## EBP ## suffix,\
238
  prefix ## ESI ## suffix,\
239
  prefix ## EDI ## suffix,
240

    
241
#endif /* !TARGET_X86_64 */
242

    
243
static GenOpFunc *gen_op_mov_reg_T0[NB_OP_SIZES][CPU_NB_REGS] = {
244
    [OT_BYTE] = {
245
        gen_op_movb_EAX_T0,
246
        gen_op_movb_ECX_T0,
247
        gen_op_movb_EDX_T0,
248
        gen_op_movb_EBX_T0,
249
#ifdef TARGET_X86_64
250
        gen_op_movb_ESP_T0_wrapper,
251
        gen_op_movb_EBP_T0_wrapper,
252
        gen_op_movb_ESI_T0_wrapper,
253
        gen_op_movb_EDI_T0_wrapper,
254
        gen_op_movb_R8_T0,
255
        gen_op_movb_R9_T0,
256
        gen_op_movb_R10_T0,
257
        gen_op_movb_R11_T0,
258
        gen_op_movb_R12_T0,
259
        gen_op_movb_R13_T0,
260
        gen_op_movb_R14_T0,
261
        gen_op_movb_R15_T0,
262
#else
263
        gen_op_movh_EAX_T0,
264
        gen_op_movh_ECX_T0,
265
        gen_op_movh_EDX_T0,
266
        gen_op_movh_EBX_T0,
267
#endif
268
    },
269
    [OT_WORD] = {
270
        DEF_REGS(gen_op_movw_, _T0)
271
    },
272
    [OT_LONG] = {
273
        DEF_REGS(gen_op_movl_, _T0)
274
    },
275
#ifdef TARGET_X86_64
276
    [OT_QUAD] = {
277
        DEF_REGS(gen_op_movq_, _T0)
278
    },
279
#endif
280
};
281

    
282
static GenOpFunc *gen_op_mov_reg_T1[NB_OP_SIZES][CPU_NB_REGS] = {
283
    [OT_BYTE] = {
284
        gen_op_movb_EAX_T1,
285
        gen_op_movb_ECX_T1,
286
        gen_op_movb_EDX_T1,
287
        gen_op_movb_EBX_T1,
288
#ifdef TARGET_X86_64
289
        gen_op_movb_ESP_T1_wrapper,
290
        gen_op_movb_EBP_T1_wrapper,
291
        gen_op_movb_ESI_T1_wrapper,
292
        gen_op_movb_EDI_T1_wrapper,
293
        gen_op_movb_R8_T1,
294
        gen_op_movb_R9_T1,
295
        gen_op_movb_R10_T1,
296
        gen_op_movb_R11_T1,
297
        gen_op_movb_R12_T1,
298
        gen_op_movb_R13_T1,
299
        gen_op_movb_R14_T1,
300
        gen_op_movb_R15_T1,
301
#else
302
        gen_op_movh_EAX_T1,
303
        gen_op_movh_ECX_T1,
304
        gen_op_movh_EDX_T1,
305
        gen_op_movh_EBX_T1,
306
#endif
307
    },
308
    [OT_WORD] = {
309
        DEF_REGS(gen_op_movw_, _T1)
310
    },
311
    [OT_LONG] = {
312
        DEF_REGS(gen_op_movl_, _T1)
313
    },
314
#ifdef TARGET_X86_64
315
    [OT_QUAD] = {
316
        DEF_REGS(gen_op_movq_, _T1)
317
    },
318
#endif
319
};
320

    
321
static GenOpFunc *gen_op_mov_reg_A0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
322
    [0] = {
323
        DEF_REGS(gen_op_movw_, _A0)
324
    },
325
    [1] = {
326
        DEF_REGS(gen_op_movl_, _A0)
327
    },
328
#ifdef TARGET_X86_64
329
    [2] = {
330
        DEF_REGS(gen_op_movq_, _A0)
331
    },
332
#endif
333
};
334

    
335
static GenOpFunc *gen_op_mov_TN_reg[NB_OP_SIZES][2][CPU_NB_REGS] = 
336
{
337
    [OT_BYTE] = {
338
        {
339
            gen_op_movl_T0_EAX,
340
            gen_op_movl_T0_ECX,
341
            gen_op_movl_T0_EDX,
342
            gen_op_movl_T0_EBX,
343
#ifdef TARGET_X86_64
344
            gen_op_movl_T0_ESP_wrapper,
345
            gen_op_movl_T0_EBP_wrapper,
346
            gen_op_movl_T0_ESI_wrapper,
347
            gen_op_movl_T0_EDI_wrapper,
348
            gen_op_movl_T0_R8,
349
            gen_op_movl_T0_R9,
350
            gen_op_movl_T0_R10,
351
            gen_op_movl_T0_R11,
352
            gen_op_movl_T0_R12,
353
            gen_op_movl_T0_R13,
354
            gen_op_movl_T0_R14,
355
            gen_op_movl_T0_R15,
356
#else
357
            gen_op_movh_T0_EAX,
358
            gen_op_movh_T0_ECX,
359
            gen_op_movh_T0_EDX,
360
            gen_op_movh_T0_EBX,
361
#endif
362
        },
363
        {
364
            gen_op_movl_T1_EAX,
365
            gen_op_movl_T1_ECX,
366
            gen_op_movl_T1_EDX,
367
            gen_op_movl_T1_EBX,
368
#ifdef TARGET_X86_64
369
            gen_op_movl_T1_ESP_wrapper,
370
            gen_op_movl_T1_EBP_wrapper,
371
            gen_op_movl_T1_ESI_wrapper,
372
            gen_op_movl_T1_EDI_wrapper,
373
            gen_op_movl_T1_R8,
374
            gen_op_movl_T1_R9,
375
            gen_op_movl_T1_R10,
376
            gen_op_movl_T1_R11,
377
            gen_op_movl_T1_R12,
378
            gen_op_movl_T1_R13,
379
            gen_op_movl_T1_R14,
380
            gen_op_movl_T1_R15,
381
#else
382
            gen_op_movh_T1_EAX,
383
            gen_op_movh_T1_ECX,
384
            gen_op_movh_T1_EDX,
385
            gen_op_movh_T1_EBX,
386
#endif
387
        },
388
    },
389
    [OT_WORD] = {
390
        {
391
            DEF_REGS(gen_op_movl_T0_, )
392
        },
393
        {
394
            DEF_REGS(gen_op_movl_T1_, )
395
        },
396
    },
397
    [OT_LONG] = {
398
        {
399
            DEF_REGS(gen_op_movl_T0_, )
400
        },
401
        {
402
            DEF_REGS(gen_op_movl_T1_, )
403
        },
404
    },
405
#ifdef TARGET_X86_64
406
    [OT_QUAD] = {
407
        {
408
            DEF_REGS(gen_op_movl_T0_, )
409
        },
410
        {
411
            DEF_REGS(gen_op_movl_T1_, )
412
        },
413
    },
414
#endif
415
};
416

    
417
static GenOpFunc *gen_op_movl_A0_reg[CPU_NB_REGS] = {
418
    DEF_REGS(gen_op_movl_A0_, )
419
};
420

    
421
static GenOpFunc *gen_op_addl_A0_reg_sN[4][CPU_NB_REGS] = {
422
    [0] = {
423
        DEF_REGS(gen_op_addl_A0_, )
424
    },
425
    [1] = {
426
        DEF_REGS(gen_op_addl_A0_, _s1)
427
    },
428
    [2] = {
429
        DEF_REGS(gen_op_addl_A0_, _s2)
430
    },
431
    [3] = {
432
        DEF_REGS(gen_op_addl_A0_, _s3)
433
    },
434
};
435

    
436
#ifdef TARGET_X86_64
437
static GenOpFunc *gen_op_movq_A0_reg[CPU_NB_REGS] = {
438
    DEF_REGS(gen_op_movq_A0_, )
439
};
440

    
441
static GenOpFunc *gen_op_addq_A0_reg_sN[4][CPU_NB_REGS] = {
442
    [0] = {
443
        DEF_REGS(gen_op_addq_A0_, )
444
    },
445
    [1] = {
446
        DEF_REGS(gen_op_addq_A0_, _s1)
447
    },
448
    [2] = {
449
        DEF_REGS(gen_op_addq_A0_, _s2)
450
    },
451
    [3] = {
452
        DEF_REGS(gen_op_addq_A0_, _s3)
453
    },
454
};
455
#endif
456

    
457
static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
458
    [0] = {
459
        DEF_REGS(gen_op_cmovw_, _T1_T0)
460
    },
461
    [1] = {
462
        DEF_REGS(gen_op_cmovl_, _T1_T0)
463
    },
464
#ifdef TARGET_X86_64
465
    [2] = {
466
        DEF_REGS(gen_op_cmovq_, _T1_T0)
467
    },
468
#endif
469
};
470

    
471
static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
472
    NULL,
473
    gen_op_orl_T0_T1,
474
    NULL,
475
    NULL,
476
    gen_op_andl_T0_T1,
477
    NULL,
478
    gen_op_xorl_T0_T1,
479
    NULL,
480
};
481

    
482
#define DEF_ARITHC(SUFFIX)\
483
    {\
484
        gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
485
        gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
486
    },\
487
    {\
488
        gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
489
        gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
490
    },\
491
    {\
492
        gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
493
        gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
494
    },\
495
    {\
496
        X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
497
        X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
498
    },
499

    
500
static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
501
    DEF_ARITHC( )
502
};
503

    
504
static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
505
    DEF_ARITHC(_raw)
506
#ifndef CONFIG_USER_ONLY
507
    DEF_ARITHC(_kernel)
508
    DEF_ARITHC(_user)
509
#endif
510
};
511

    
512
static const int cc_op_arithb[8] = {
513
    CC_OP_ADDB,
514
    CC_OP_LOGICB,
515
    CC_OP_ADDB,
516
    CC_OP_SUBB,
517
    CC_OP_LOGICB,
518
    CC_OP_SUBB,
519
    CC_OP_LOGICB,
520
    CC_OP_SUBB,
521
};
522

    
523
#define DEF_CMPXCHG(SUFFIX)\
524
    gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
525
    gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
526
    gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
527
    X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
528

    
529
static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
530
    DEF_CMPXCHG( )
531
};
532

    
533
static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
534
    DEF_CMPXCHG(_raw)
535
#ifndef CONFIG_USER_ONLY
536
    DEF_CMPXCHG(_kernel)
537
    DEF_CMPXCHG(_user)
538
#endif
539
};
540

    
541
#define DEF_SHIFT(SUFFIX)\
542
    {\
543
        gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
544
        gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
545
        gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
546
        gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
547
        gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
548
        gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
549
        gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
550
        gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
551
    },\
552
    {\
553
        gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
554
        gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
555
        gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
556
        gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
557
        gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
558
        gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
559
        gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
560
        gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
561
    },\
562
    {\
563
        gen_op_roll ## SUFFIX ## _T0_T1_cc,\
564
        gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
565
        gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
566
        gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
567
        gen_op_shll ## SUFFIX ## _T0_T1_cc,\
568
        gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
569
        gen_op_shll ## SUFFIX ## _T0_T1_cc,\
570
        gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
571
    },\
572
    {\
573
        X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
574
        X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
575
        X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
576
        X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
577
        X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
578
        X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
579
        X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
580
        X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
581
    },
582

    
583
static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
584
    DEF_SHIFT( )
585
};
586

    
587
static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
588
    DEF_SHIFT(_raw)
589
#ifndef CONFIG_USER_ONLY
590
    DEF_SHIFT(_kernel)
591
    DEF_SHIFT(_user)
592
#endif
593
};
594

    
595
#define DEF_SHIFTD(SUFFIX, op)\
596
    {\
597
        NULL,\
598
        NULL,\
599
    },\
600
    {\
601
        gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
602
        gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
603
    },\
604
    {\
605
        gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
606
        gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
607
    },\
608
    {\
609
    },
610

    
611
static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
612
    DEF_SHIFTD(, im)
613
};
614

    
615
static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
616
    DEF_SHIFTD(, ECX)
617
};
618

    
619
static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
620
    DEF_SHIFTD(_raw, im)
621
#ifndef CONFIG_USER_ONLY
622
    DEF_SHIFTD(_kernel, im)
623
    DEF_SHIFTD(_user, im)
624
#endif
625
};
626

    
627
static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
628
    DEF_SHIFTD(_raw, ECX)
629
#ifndef CONFIG_USER_ONLY
630
    DEF_SHIFTD(_kernel, ECX)
631
    DEF_SHIFTD(_user, ECX)
632
#endif
633
};
634

    
635
static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
636
    [0] = {
637
        gen_op_btw_T0_T1_cc,
638
        gen_op_btsw_T0_T1_cc,
639
        gen_op_btrw_T0_T1_cc,
640
        gen_op_btcw_T0_T1_cc,
641
    },
642
    [1] = {
643
        gen_op_btl_T0_T1_cc,
644
        gen_op_btsl_T0_T1_cc,
645
        gen_op_btrl_T0_T1_cc,
646
        gen_op_btcl_T0_T1_cc,
647
    },
648
#ifdef TARGET_X86_64
649
    [2] = {
650
        gen_op_btq_T0_T1_cc,
651
        gen_op_btsq_T0_T1_cc,
652
        gen_op_btrq_T0_T1_cc,
653
        gen_op_btcq_T0_T1_cc,
654
    },
655
#endif
656
};
657

    
658
static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
659
    gen_op_add_bitw_A0_T1,
660
    gen_op_add_bitl_A0_T1,
661
    X86_64_ONLY(gen_op_add_bitq_A0_T1),
662
};
663

    
664
static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
665
    [0] = {
666
        gen_op_bsfw_T0_cc,
667
        gen_op_bsrw_T0_cc,
668
    },
669
    [1] = {
670
        gen_op_bsfl_T0_cc,
671
        gen_op_bsrl_T0_cc,
672
    },
673
#ifdef TARGET_X86_64
674
    [2] = {
675
        gen_op_bsfq_T0_cc,
676
        gen_op_bsrq_T0_cc,
677
    },
678
#endif
679
};
680

    
681
static GenOpFunc *gen_op_lds_T0_A0[3 * 4] = {
682
    gen_op_ldsb_raw_T0_A0,
683
    gen_op_ldsw_raw_T0_A0,
684
    X86_64_ONLY(gen_op_ldsl_raw_T0_A0),
685
    NULL,
686
#ifndef CONFIG_USER_ONLY
687
    gen_op_ldsb_kernel_T0_A0,
688
    gen_op_ldsw_kernel_T0_A0,
689
    X86_64_ONLY(gen_op_ldsl_kernel_T0_A0),
690
    NULL,
691

    
692
    gen_op_ldsb_user_T0_A0,
693
    gen_op_ldsw_user_T0_A0,
694
    X86_64_ONLY(gen_op_ldsl_user_T0_A0),
695
    NULL,
696
#endif
697
};
698

    
699
static GenOpFunc *gen_op_ldu_T0_A0[3 * 4] = {
700
    gen_op_ldub_raw_T0_A0,
701
    gen_op_lduw_raw_T0_A0,
702
    NULL,
703
    NULL,
704

    
705
#ifndef CONFIG_USER_ONLY
706
    gen_op_ldub_kernel_T0_A0,
707
    gen_op_lduw_kernel_T0_A0,
708
    NULL,
709
    NULL,
710

    
711
    gen_op_ldub_user_T0_A0,
712
    gen_op_lduw_user_T0_A0,
713
    NULL,
714
    NULL,
715
#endif
716
};
717

    
718
/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
719
static GenOpFunc *gen_op_ld_T0_A0[3 * 4] = {
720
    gen_op_ldub_raw_T0_A0,
721
    gen_op_lduw_raw_T0_A0,
722
    gen_op_ldl_raw_T0_A0,
723
    X86_64_ONLY(gen_op_ldq_raw_T0_A0),
724

    
725
#ifndef CONFIG_USER_ONLY
726
    gen_op_ldub_kernel_T0_A0,
727
    gen_op_lduw_kernel_T0_A0,
728
    gen_op_ldl_kernel_T0_A0,
729
    X86_64_ONLY(gen_op_ldq_kernel_T0_A0),
730

    
731
    gen_op_ldub_user_T0_A0,
732
    gen_op_lduw_user_T0_A0,
733
    gen_op_ldl_user_T0_A0,
734
    X86_64_ONLY(gen_op_ldq_user_T0_A0),
735
#endif
736
};
737

    
738
static GenOpFunc *gen_op_ld_T1_A0[3 * 4] = {
739
    gen_op_ldub_raw_T1_A0,
740
    gen_op_lduw_raw_T1_A0,
741
    gen_op_ldl_raw_T1_A0,
742
    X86_64_ONLY(gen_op_ldq_raw_T1_A0),
743

    
744
#ifndef CONFIG_USER_ONLY
745
    gen_op_ldub_kernel_T1_A0,
746
    gen_op_lduw_kernel_T1_A0,
747
    gen_op_ldl_kernel_T1_A0,
748
    X86_64_ONLY(gen_op_ldq_kernel_T1_A0),
749

    
750
    gen_op_ldub_user_T1_A0,
751
    gen_op_lduw_user_T1_A0,
752
    gen_op_ldl_user_T1_A0,
753
    X86_64_ONLY(gen_op_ldq_user_T1_A0),
754
#endif
755
};
756

    
757
static GenOpFunc *gen_op_st_T0_A0[3 * 4] = {
758
    gen_op_stb_raw_T0_A0,
759
    gen_op_stw_raw_T0_A0,
760
    gen_op_stl_raw_T0_A0,
761
    X86_64_ONLY(gen_op_stq_raw_T0_A0),
762

    
763
#ifndef CONFIG_USER_ONLY
764
    gen_op_stb_kernel_T0_A0,
765
    gen_op_stw_kernel_T0_A0,
766
    gen_op_stl_kernel_T0_A0,
767
    X86_64_ONLY(gen_op_stq_kernel_T0_A0),
768

    
769
    gen_op_stb_user_T0_A0,
770
    gen_op_stw_user_T0_A0,
771
    gen_op_stl_user_T0_A0,
772
    X86_64_ONLY(gen_op_stq_user_T0_A0),
773
#endif
774
};
775

    
776
static GenOpFunc *gen_op_st_T1_A0[3 * 4] = {
777
    NULL,
778
    gen_op_stw_raw_T1_A0,
779
    gen_op_stl_raw_T1_A0,
780
    X86_64_ONLY(gen_op_stq_raw_T1_A0),
781

    
782
#ifndef CONFIG_USER_ONLY
783
    NULL,
784
    gen_op_stw_kernel_T1_A0,
785
    gen_op_stl_kernel_T1_A0,
786
    X86_64_ONLY(gen_op_stq_kernel_T1_A0),
787

    
788
    NULL,
789
    gen_op_stw_user_T1_A0,
790
    gen_op_stl_user_T1_A0,
791
    X86_64_ONLY(gen_op_stq_user_T1_A0),
792
#endif
793
};
794

    
795
static inline void gen_jmp_im(target_ulong pc)
796
{
797
#ifdef TARGET_X86_64
798
    if (pc == (uint32_t)pc) {
799
        gen_op_movl_eip_im(pc);
800
    } else if (pc == (int32_t)pc) {
801
        gen_op_movq_eip_im(pc);
802
    } else {
803
        gen_op_movq_eip_im64(pc >> 32, pc);
804
    }
805
#else
806
    gen_op_movl_eip_im(pc);
807
#endif
808
}
809

    
810
static inline void gen_string_movl_A0_ESI(DisasContext *s)
811
{
812
    int override;
813

    
814
    override = s->override;
815
#ifdef TARGET_X86_64
816
    if (s->aflag == 2) {
817
        if (override >= 0) {
818
            gen_op_movq_A0_seg(offsetof(CPUX86State,segs[override].base));
819
            gen_op_addq_A0_reg_sN[0][R_ESI]();
820
        } else {
821
            gen_op_movq_A0_reg[R_ESI]();
822
        }
823
    } else
824
#endif
825
    if (s->aflag) {
826
        /* 32 bit address */
827
        if (s->addseg && override < 0)
828
            override = R_DS;
829
        if (override >= 0) {
830
            gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
831
            gen_op_addl_A0_reg_sN[0][R_ESI]();
832
        } else {
833
            gen_op_movl_A0_reg[R_ESI]();
834
        }
835
    } else {
836
        /* 16 address, always override */
837
        if (override < 0)
838
            override = R_DS;
839
        gen_op_movl_A0_reg[R_ESI]();
840
        gen_op_andl_A0_ffff();
841
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
842
    }
843
}
844

    
845
static inline void gen_string_movl_A0_EDI(DisasContext *s)
846
{
847
#ifdef TARGET_X86_64
848
    if (s->aflag == 2) {
849
        gen_op_movq_A0_reg[R_EDI]();
850
    } else
851
#endif
852
    if (s->aflag) {
853
        if (s->addseg) {
854
            gen_op_movl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
855
            gen_op_addl_A0_reg_sN[0][R_EDI]();
856
        } else {
857
            gen_op_movl_A0_reg[R_EDI]();
858
        }
859
    } else {
860
        gen_op_movl_A0_reg[R_EDI]();
861
        gen_op_andl_A0_ffff();
862
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
863
    }
864
}
865

    
866
static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
867
    gen_op_movl_T0_Dshiftb,
868
    gen_op_movl_T0_Dshiftw,
869
    gen_op_movl_T0_Dshiftl,
870
    X86_64_ONLY(gen_op_movl_T0_Dshiftq),
871
};
872

    
873
static GenOpFunc1 *gen_op_jnz_ecx[3] = {
874
    gen_op_jnz_ecxw,
875
    gen_op_jnz_ecxl,
876
    X86_64_ONLY(gen_op_jnz_ecxq),
877
};
878
    
879
static GenOpFunc1 *gen_op_jz_ecx[3] = {
880
    gen_op_jz_ecxw,
881
    gen_op_jz_ecxl,
882
    X86_64_ONLY(gen_op_jz_ecxq),
883
};
884

    
885
static GenOpFunc *gen_op_dec_ECX[3] = {
886
    gen_op_decw_ECX,
887
    gen_op_decl_ECX,
888
    X86_64_ONLY(gen_op_decq_ECX),
889
};
890

    
891
static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
892
    {
893
        gen_op_jnz_subb,
894
        gen_op_jnz_subw,
895
        gen_op_jnz_subl,
896
        X86_64_ONLY(gen_op_jnz_subq),
897
    },
898
    {
899
        gen_op_jz_subb,
900
        gen_op_jz_subw,
901
        gen_op_jz_subl,
902
        X86_64_ONLY(gen_op_jz_subq),
903
    },
904
};
905

    
906
static GenOpFunc *gen_op_in_DX_T0[3] = {
907
    gen_op_inb_DX_T0,
908
    gen_op_inw_DX_T0,
909
    gen_op_inl_DX_T0,
910
};
911

    
912
static GenOpFunc *gen_op_out_DX_T0[3] = {
913
    gen_op_outb_DX_T0,
914
    gen_op_outw_DX_T0,
915
    gen_op_outl_DX_T0,
916
};
917

    
918
static GenOpFunc *gen_op_in[3] = {
919
    gen_op_inb_T0_T1,
920
    gen_op_inw_T0_T1,
921
    gen_op_inl_T0_T1,
922
};
923

    
924
static GenOpFunc *gen_op_out[3] = {
925
    gen_op_outb_T0_T1,
926
    gen_op_outw_T0_T1,
927
    gen_op_outl_T0_T1,
928
};
929

    
930
static GenOpFunc *gen_check_io_T0[3] = {
931
    gen_op_check_iob_T0,
932
    gen_op_check_iow_T0,
933
    gen_op_check_iol_T0,
934
};
935

    
936
static GenOpFunc *gen_check_io_DX[3] = {
937
    gen_op_check_iob_DX,
938
    gen_op_check_iow_DX,
939
    gen_op_check_iol_DX,
940
};
941

    
942
static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
943
{
944
    if (s->pe && (s->cpl > s->iopl || s->vm86)) {
945
        if (s->cc_op != CC_OP_DYNAMIC)
946
            gen_op_set_cc_op(s->cc_op);
947
        gen_jmp_im(cur_eip);
948
        if (use_dx)
949
            gen_check_io_DX[ot]();
950
        else
951
            gen_check_io_T0[ot]();
952
    }
953
}
954

    
955
static inline void gen_movs(DisasContext *s, int ot)
956
{
957
    gen_string_movl_A0_ESI(s);
958
    gen_op_ld_T0_A0[ot + s->mem_index]();
959
    gen_string_movl_A0_EDI(s);
960
    gen_op_st_T0_A0[ot + s->mem_index]();
961
    gen_op_movl_T0_Dshift[ot]();
962
#ifdef TARGET_X86_64
963
    if (s->aflag == 2) {
964
        gen_op_addq_ESI_T0();
965
        gen_op_addq_EDI_T0();
966
    } else 
967
#endif
968
    if (s->aflag) {
969
        gen_op_addl_ESI_T0();
970
        gen_op_addl_EDI_T0();
971
    } else {
972
        gen_op_addw_ESI_T0();
973
        gen_op_addw_EDI_T0();
974
    }
975
}
976

    
977
static inline void gen_update_cc_op(DisasContext *s)
978
{
979
    if (s->cc_op != CC_OP_DYNAMIC) {
980
        gen_op_set_cc_op(s->cc_op);
981
        s->cc_op = CC_OP_DYNAMIC;
982
    }
983
}
984

    
985
/* XXX: does not work with gdbstub "ice" single step - not a
986
   serious problem */
987
static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
988
{
989
    int l1, l2;
990

    
991
    l1 = gen_new_label();
992
    l2 = gen_new_label();
993
    gen_op_jnz_ecx[s->aflag](l1);
994
    gen_set_label(l2);
995
    gen_jmp_tb(s, next_eip, 1);
996
    gen_set_label(l1);
997
    return l2;
998
}
999

    
1000
static inline void gen_stos(DisasContext *s, int ot)
1001
{
1002
    gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1003
    gen_string_movl_A0_EDI(s);
1004
    gen_op_st_T0_A0[ot + s->mem_index]();
1005
    gen_op_movl_T0_Dshift[ot]();
1006
#ifdef TARGET_X86_64
1007
    if (s->aflag == 2) {
1008
        gen_op_addq_EDI_T0();
1009
    } else 
1010
#endif
1011
    if (s->aflag) {
1012
        gen_op_addl_EDI_T0();
1013
    } else {
1014
        gen_op_addw_EDI_T0();
1015
    }
1016
}
1017

    
1018
static inline void gen_lods(DisasContext *s, int ot)
1019
{
1020
    gen_string_movl_A0_ESI(s);
1021
    gen_op_ld_T0_A0[ot + s->mem_index]();
1022
    gen_op_mov_reg_T0[ot][R_EAX]();
1023
    gen_op_movl_T0_Dshift[ot]();
1024
#ifdef TARGET_X86_64
1025
    if (s->aflag == 2) {
1026
        gen_op_addq_ESI_T0();
1027
    } else 
1028
#endif
1029
    if (s->aflag) {
1030
        gen_op_addl_ESI_T0();
1031
    } else {
1032
        gen_op_addw_ESI_T0();
1033
    }
1034
}
1035

    
1036
static inline void gen_scas(DisasContext *s, int ot)
1037
{
1038
    gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1039
    gen_string_movl_A0_EDI(s);
1040
    gen_op_ld_T1_A0[ot + s->mem_index]();
1041
    gen_op_cmpl_T0_T1_cc();
1042
    gen_op_movl_T0_Dshift[ot]();
1043
#ifdef TARGET_X86_64
1044
    if (s->aflag == 2) {
1045
        gen_op_addq_EDI_T0();
1046
    } else 
1047
#endif
1048
    if (s->aflag) {
1049
        gen_op_addl_EDI_T0();
1050
    } else {
1051
        gen_op_addw_EDI_T0();
1052
    }
1053
}
1054

    
1055
static inline void gen_cmps(DisasContext *s, int ot)
1056
{
1057
    gen_string_movl_A0_ESI(s);
1058
    gen_op_ld_T0_A0[ot + s->mem_index]();
1059
    gen_string_movl_A0_EDI(s);
1060
    gen_op_ld_T1_A0[ot + s->mem_index]();
1061
    gen_op_cmpl_T0_T1_cc();
1062
    gen_op_movl_T0_Dshift[ot]();
1063
#ifdef TARGET_X86_64
1064
    if (s->aflag == 2) {
1065
        gen_op_addq_ESI_T0();
1066
        gen_op_addq_EDI_T0();
1067
    } else 
1068
#endif
1069
    if (s->aflag) {
1070
        gen_op_addl_ESI_T0();
1071
        gen_op_addl_EDI_T0();
1072
    } else {
1073
        gen_op_addw_ESI_T0();
1074
        gen_op_addw_EDI_T0();
1075
    }
1076
}
1077

    
1078
static inline void gen_ins(DisasContext *s, int ot)
1079
{
1080
    gen_string_movl_A0_EDI(s);
1081
    gen_op_movl_T0_0();
1082
    gen_op_st_T0_A0[ot + s->mem_index]();
1083
    gen_op_in_DX_T0[ot]();
1084
    gen_op_st_T0_A0[ot + s->mem_index]();
1085
    gen_op_movl_T0_Dshift[ot]();
1086
#ifdef TARGET_X86_64
1087
    if (s->aflag == 2) {
1088
        gen_op_addq_EDI_T0();
1089
    } else 
1090
#endif
1091
    if (s->aflag) {
1092
        gen_op_addl_EDI_T0();
1093
    } else {
1094
        gen_op_addw_EDI_T0();
1095
    }
1096
}
1097

    
1098
static inline void gen_outs(DisasContext *s, int ot)
1099
{
1100
    gen_string_movl_A0_ESI(s);
1101
    gen_op_ld_T0_A0[ot + s->mem_index]();
1102
    gen_op_out_DX_T0[ot]();
1103
    gen_op_movl_T0_Dshift[ot]();
1104
#ifdef TARGET_X86_64
1105
    if (s->aflag == 2) {
1106
        gen_op_addq_ESI_T0();
1107
    } else 
1108
#endif
1109
    if (s->aflag) {
1110
        gen_op_addl_ESI_T0();
1111
    } else {
1112
        gen_op_addw_ESI_T0();
1113
    }
1114
}
1115

    
1116
/* same method as Valgrind : we generate jumps to current or next
1117
   instruction */
1118
#define GEN_REPZ(op)                                                          \
1119
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1120
                                 target_ulong cur_eip, target_ulong next_eip) \
1121
{                                                                             \
1122
    int l2;\
1123
    gen_update_cc_op(s);                                                      \
1124
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1125
    gen_ ## op(s, ot);                                                        \
1126
    gen_op_dec_ECX[s->aflag]();                                               \
1127
    /* a loop would cause two single step exceptions if ECX = 1               \
1128
       before rep string_insn */                                              \
1129
    if (!s->jmp_opt)                                                          \
1130
        gen_op_jz_ecx[s->aflag](l2);                                          \
1131
    gen_jmp(s, cur_eip);                                                      \
1132
}
1133

    
1134
#define GEN_REPZ2(op)                                                         \
1135
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1136
                                   target_ulong cur_eip,                      \
1137
                                   target_ulong next_eip,                     \
1138
                                   int nz)                                    \
1139
{                                                                             \
1140
    int l2;\
1141
    gen_update_cc_op(s);                                                      \
1142
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1143
    gen_ ## op(s, ot);                                                        \
1144
    gen_op_dec_ECX[s->aflag]();                                               \
1145
    gen_op_set_cc_op(CC_OP_SUBB + ot);                                        \
1146
    gen_op_string_jnz_sub[nz][ot](l2);\
1147
    if (!s->jmp_opt)                                                          \
1148
        gen_op_jz_ecx[s->aflag](l2);                                          \
1149
    gen_jmp(s, cur_eip);                                                      \
1150
}
1151

    
1152
GEN_REPZ(movs)
1153
GEN_REPZ(stos)
1154
GEN_REPZ(lods)
1155
GEN_REPZ(ins)
1156
GEN_REPZ(outs)
1157
GEN_REPZ2(scas)
1158
GEN_REPZ2(cmps)
1159

    
1160
enum {
1161
    JCC_O,
1162
    JCC_B,
1163
    JCC_Z,
1164
    JCC_BE,
1165
    JCC_S,
1166
    JCC_P,
1167
    JCC_L,
1168
    JCC_LE,
1169
};
1170

    
1171
static GenOpFunc1 *gen_jcc_sub[4][8] = {
1172
    [OT_BYTE] = {
1173
        NULL,
1174
        gen_op_jb_subb,
1175
        gen_op_jz_subb,
1176
        gen_op_jbe_subb,
1177
        gen_op_js_subb,
1178
        NULL,
1179
        gen_op_jl_subb,
1180
        gen_op_jle_subb,
1181
    },
1182
    [OT_WORD] = {
1183
        NULL,
1184
        gen_op_jb_subw,
1185
        gen_op_jz_subw,
1186
        gen_op_jbe_subw,
1187
        gen_op_js_subw,
1188
        NULL,
1189
        gen_op_jl_subw,
1190
        gen_op_jle_subw,
1191
    },
1192
    [OT_LONG] = {
1193
        NULL,
1194
        gen_op_jb_subl,
1195
        gen_op_jz_subl,
1196
        gen_op_jbe_subl,
1197
        gen_op_js_subl,
1198
        NULL,
1199
        gen_op_jl_subl,
1200
        gen_op_jle_subl,
1201
    },
1202
#ifdef TARGET_X86_64
1203
    [OT_QUAD] = {
1204
        NULL,
1205
        BUGGY_64(gen_op_jb_subq),
1206
        gen_op_jz_subq,
1207
        BUGGY_64(gen_op_jbe_subq),
1208
        gen_op_js_subq,
1209
        NULL,
1210
        BUGGY_64(gen_op_jl_subq),
1211
        BUGGY_64(gen_op_jle_subq),
1212
    },
1213
#endif
1214
};
1215
static GenOpFunc1 *gen_op_loop[3][4] = {
1216
    [0] = {
1217
        gen_op_loopnzw,
1218
        gen_op_loopzw,
1219
        gen_op_jnz_ecxw,
1220
    },
1221
    [1] = {
1222
        gen_op_loopnzl,
1223
        gen_op_loopzl,
1224
        gen_op_jnz_ecxl,
1225
    },
1226
#ifdef TARGET_X86_64
1227
    [2] = {
1228
        gen_op_loopnzq,
1229
        gen_op_loopzq,
1230
        gen_op_jnz_ecxq,
1231
    },
1232
#endif
1233
};
1234

    
1235
static GenOpFunc *gen_setcc_slow[8] = {
1236
    gen_op_seto_T0_cc,
1237
    gen_op_setb_T0_cc,
1238
    gen_op_setz_T0_cc,
1239
    gen_op_setbe_T0_cc,
1240
    gen_op_sets_T0_cc,
1241
    gen_op_setp_T0_cc,
1242
    gen_op_setl_T0_cc,
1243
    gen_op_setle_T0_cc,
1244
};
1245

    
1246
static GenOpFunc *gen_setcc_sub[4][8] = {
1247
    [OT_BYTE] = {
1248
        NULL,
1249
        gen_op_setb_T0_subb,
1250
        gen_op_setz_T0_subb,
1251
        gen_op_setbe_T0_subb,
1252
        gen_op_sets_T0_subb,
1253
        NULL,
1254
        gen_op_setl_T0_subb,
1255
        gen_op_setle_T0_subb,
1256
    },
1257
    [OT_WORD] = {
1258
        NULL,
1259
        gen_op_setb_T0_subw,
1260
        gen_op_setz_T0_subw,
1261
        gen_op_setbe_T0_subw,
1262
        gen_op_sets_T0_subw,
1263
        NULL,
1264
        gen_op_setl_T0_subw,
1265
        gen_op_setle_T0_subw,
1266
    },
1267
    [OT_LONG] = {
1268
        NULL,
1269
        gen_op_setb_T0_subl,
1270
        gen_op_setz_T0_subl,
1271
        gen_op_setbe_T0_subl,
1272
        gen_op_sets_T0_subl,
1273
        NULL,
1274
        gen_op_setl_T0_subl,
1275
        gen_op_setle_T0_subl,
1276
    },
1277
#ifdef TARGET_X86_64
1278
    [OT_QUAD] = {
1279
        NULL,
1280
        gen_op_setb_T0_subq,
1281
        gen_op_setz_T0_subq,
1282
        gen_op_setbe_T0_subq,
1283
        gen_op_sets_T0_subq,
1284
        NULL,
1285
        gen_op_setl_T0_subq,
1286
        gen_op_setle_T0_subq,
1287
    },
1288
#endif
1289
};
1290

    
1291
static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1292
    gen_op_fadd_ST0_FT0,
1293
    gen_op_fmul_ST0_FT0,
1294
    gen_op_fcom_ST0_FT0,
1295
    gen_op_fcom_ST0_FT0,
1296
    gen_op_fsub_ST0_FT0,
1297
    gen_op_fsubr_ST0_FT0,
1298
    gen_op_fdiv_ST0_FT0,
1299
    gen_op_fdivr_ST0_FT0,
1300
};
1301

    
1302
/* NOTE the exception in "r" op ordering */
1303
static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1304
    gen_op_fadd_STN_ST0,
1305
    gen_op_fmul_STN_ST0,
1306
    NULL,
1307
    NULL,
1308
    gen_op_fsubr_STN_ST0,
1309
    gen_op_fsub_STN_ST0,
1310
    gen_op_fdivr_STN_ST0,
1311
    gen_op_fdiv_STN_ST0,
1312
};
1313

    
1314
/* if d == OR_TMP0, it means memory operand (address in A0) */
1315
static void gen_op(DisasContext *s1, int op, int ot, int d)
1316
{
1317
    GenOpFunc *gen_update_cc;
1318
    
1319
    if (d != OR_TMP0) {
1320
        gen_op_mov_TN_reg[ot][0][d]();
1321
    } else {
1322
        gen_op_ld_T0_A0[ot + s1->mem_index]();
1323
    }
1324
    switch(op) {
1325
    case OP_ADCL:
1326
    case OP_SBBL:
1327
        if (s1->cc_op != CC_OP_DYNAMIC)
1328
            gen_op_set_cc_op(s1->cc_op);
1329
        if (d != OR_TMP0) {
1330
            gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1331
            gen_op_mov_reg_T0[ot][d]();
1332
        } else {
1333
            gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1334
        }
1335
        s1->cc_op = CC_OP_DYNAMIC;
1336
        goto the_end;
1337
    case OP_ADDL:
1338
        gen_op_addl_T0_T1();
1339
        s1->cc_op = CC_OP_ADDB + ot;
1340
        gen_update_cc = gen_op_update2_cc;
1341
        break;
1342
    case OP_SUBL:
1343
        gen_op_subl_T0_T1();
1344
        s1->cc_op = CC_OP_SUBB + ot;
1345
        gen_update_cc = gen_op_update2_cc;
1346
        break;
1347
    default:
1348
    case OP_ANDL:
1349
    case OP_ORL:
1350
    case OP_XORL:
1351
        gen_op_arith_T0_T1_cc[op]();
1352
        s1->cc_op = CC_OP_LOGICB + ot;
1353
        gen_update_cc = gen_op_update1_cc;
1354
        break;
1355
    case OP_CMPL:
1356
        gen_op_cmpl_T0_T1_cc();
1357
        s1->cc_op = CC_OP_SUBB + ot;
1358
        gen_update_cc = NULL;
1359
        break;
1360
    }
1361
    if (op != OP_CMPL) {
1362
        if (d != OR_TMP0)
1363
            gen_op_mov_reg_T0[ot][d]();
1364
        else
1365
            gen_op_st_T0_A0[ot + s1->mem_index]();
1366
    }
1367
    /* the flags update must happen after the memory write (precise
1368
       exception support) */
1369
    if (gen_update_cc)
1370
        gen_update_cc();
1371
 the_end: ;
1372
}
1373

    
1374
/* if d == OR_TMP0, it means memory operand (address in A0) */
1375
static void gen_inc(DisasContext *s1, int ot, int d, int c)
1376
{
1377
    if (d != OR_TMP0)
1378
        gen_op_mov_TN_reg[ot][0][d]();
1379
    else
1380
        gen_op_ld_T0_A0[ot + s1->mem_index]();
1381
    if (s1->cc_op != CC_OP_DYNAMIC)
1382
        gen_op_set_cc_op(s1->cc_op);
1383
    if (c > 0) {
1384
        gen_op_incl_T0();
1385
        s1->cc_op = CC_OP_INCB + ot;
1386
    } else {
1387
        gen_op_decl_T0();
1388
        s1->cc_op = CC_OP_DECB + ot;
1389
    }
1390
    if (d != OR_TMP0)
1391
        gen_op_mov_reg_T0[ot][d]();
1392
    else
1393
        gen_op_st_T0_A0[ot + s1->mem_index]();
1394
    gen_op_update_inc_cc();
1395
}
1396

    
1397
static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1398
{
1399
    if (d != OR_TMP0)
1400
        gen_op_mov_TN_reg[ot][0][d]();
1401
    else
1402
        gen_op_ld_T0_A0[ot + s1->mem_index]();
1403
    if (s != OR_TMP1)
1404
        gen_op_mov_TN_reg[ot][1][s]();
1405
    /* for zero counts, flags are not updated, so must do it dynamically */
1406
    if (s1->cc_op != CC_OP_DYNAMIC)
1407
        gen_op_set_cc_op(s1->cc_op);
1408
    
1409
    if (d != OR_TMP0)
1410
        gen_op_shift_T0_T1_cc[ot][op]();
1411
    else
1412
        gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1413
    if (d != OR_TMP0)
1414
        gen_op_mov_reg_T0[ot][d]();
1415
    s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1416
}
1417

    
1418
static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1419
{
1420
    /* currently not optimized */
1421
    gen_op_movl_T1_im(c);
1422
    gen_shift(s1, op, ot, d, OR_TMP1);
1423
}
1424

    
1425
static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1426
{
1427
    target_long disp;
1428
    int havesib;
1429
    int base;
1430
    int index;
1431
    int scale;
1432
    int opreg;
1433
    int mod, rm, code, override, must_add_seg;
1434

    
1435
    override = s->override;
1436
    must_add_seg = s->addseg;
1437
    if (override >= 0)
1438
        must_add_seg = 1;
1439
    mod = (modrm >> 6) & 3;
1440
    rm = modrm & 7;
1441

    
1442
    if (s->aflag) {
1443

    
1444
        havesib = 0;
1445
        base = rm;
1446
        index = 0;
1447
        scale = 0;
1448
        
1449
        if (base == 4) {
1450
            havesib = 1;
1451
            code = ldub_code(s->pc++);
1452
            scale = (code >> 6) & 3;
1453
            index = ((code >> 3) & 7) | REX_X(s);
1454
            base = (code & 7);
1455
        }
1456
        base |= REX_B(s);
1457

    
1458
        switch (mod) {
1459
        case 0:
1460
            if ((base & 7) == 5) {
1461
                base = -1;
1462
                disp = (int32_t)ldl_code(s->pc);
1463
                s->pc += 4;
1464
                if (CODE64(s) && !havesib) {
1465
                    disp += s->pc + s->rip_offset;
1466
                }
1467
            } else {
1468
                disp = 0;
1469
            }
1470
            break;
1471
        case 1:
1472
            disp = (int8_t)ldub_code(s->pc++);
1473
            break;
1474
        default:
1475
        case 2:
1476
            disp = ldl_code(s->pc);
1477
            s->pc += 4;
1478
            break;
1479
        }
1480
        
1481
        if (base >= 0) {
1482
            /* for correct popl handling with esp */
1483
            if (base == 4 && s->popl_esp_hack)
1484
                disp += s->popl_esp_hack;
1485
#ifdef TARGET_X86_64
1486
            if (s->aflag == 2) {
1487
                gen_op_movq_A0_reg[base]();
1488
                if (disp != 0) {
1489
                    if ((int32_t)disp == disp)
1490
                        gen_op_addq_A0_im(disp);
1491
                    else
1492
                        gen_op_addq_A0_im64(disp >> 32, disp);
1493
                }
1494
            } else 
1495
#endif
1496
            {
1497
                gen_op_movl_A0_reg[base]();
1498
                if (disp != 0)
1499
                    gen_op_addl_A0_im(disp);
1500
            }
1501
        } else {
1502
#ifdef TARGET_X86_64
1503
            if (s->aflag == 2) {
1504
                if ((int32_t)disp == disp)
1505
                    gen_op_movq_A0_im(disp);
1506
                else
1507
                    gen_op_movq_A0_im64(disp >> 32, disp);
1508
            } else 
1509
#endif
1510
            {
1511
                gen_op_movl_A0_im(disp);
1512
            }
1513
        }
1514
        /* XXX: index == 4 is always invalid */
1515
        if (havesib && (index != 4 || scale != 0)) {
1516
#ifdef TARGET_X86_64
1517
            if (s->aflag == 2) {
1518
                gen_op_addq_A0_reg_sN[scale][index]();
1519
            } else 
1520
#endif
1521
            {
1522
                gen_op_addl_A0_reg_sN[scale][index]();
1523
            }
1524
        }
1525
        if (must_add_seg) {
1526
            if (override < 0) {
1527
                if (base == R_EBP || base == R_ESP)
1528
                    override = R_SS;
1529
                else
1530
                    override = R_DS;
1531
            }
1532
#ifdef TARGET_X86_64
1533
            if (s->aflag == 2) {
1534
                gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1535
            } else 
1536
#endif
1537
            {
1538
                gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1539
            }
1540
        }
1541
    } else {
1542
        switch (mod) {
1543
        case 0:
1544
            if (rm == 6) {
1545
                disp = lduw_code(s->pc);
1546
                s->pc += 2;
1547
                gen_op_movl_A0_im(disp);
1548
                rm = 0; /* avoid SS override */
1549
                goto no_rm;
1550
            } else {
1551
                disp = 0;
1552
            }
1553
            break;
1554
        case 1:
1555
            disp = (int8_t)ldub_code(s->pc++);
1556
            break;
1557
        default:
1558
        case 2:
1559
            disp = lduw_code(s->pc);
1560
            s->pc += 2;
1561
            break;
1562
        }
1563
        switch(rm) {
1564
        case 0:
1565
            gen_op_movl_A0_reg[R_EBX]();
1566
            gen_op_addl_A0_reg_sN[0][R_ESI]();
1567
            break;
1568
        case 1:
1569
            gen_op_movl_A0_reg[R_EBX]();
1570
            gen_op_addl_A0_reg_sN[0][R_EDI]();
1571
            break;
1572
        case 2:
1573
            gen_op_movl_A0_reg[R_EBP]();
1574
            gen_op_addl_A0_reg_sN[0][R_ESI]();
1575
            break;
1576
        case 3:
1577
            gen_op_movl_A0_reg[R_EBP]();
1578
            gen_op_addl_A0_reg_sN[0][R_EDI]();
1579
            break;
1580
        case 4:
1581
            gen_op_movl_A0_reg[R_ESI]();
1582
            break;
1583
        case 5:
1584
            gen_op_movl_A0_reg[R_EDI]();
1585
            break;
1586
        case 6:
1587
            gen_op_movl_A0_reg[R_EBP]();
1588
            break;
1589
        default:
1590
        case 7:
1591
            gen_op_movl_A0_reg[R_EBX]();
1592
            break;
1593
        }
1594
        if (disp != 0)
1595
            gen_op_addl_A0_im(disp);
1596
        gen_op_andl_A0_ffff();
1597
    no_rm:
1598
        if (must_add_seg) {
1599
            if (override < 0) {
1600
                if (rm == 2 || rm == 3 || rm == 6)
1601
                    override = R_SS;
1602
                else
1603
                    override = R_DS;
1604
            }
1605
            gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1606
        }
1607
    }
1608

    
1609
    opreg = OR_A0;
1610
    disp = 0;
1611
    *reg_ptr = opreg;
1612
    *offset_ptr = disp;
1613
}
1614

    
1615
/* used for LEA and MOV AX, mem */
1616
static void gen_add_A0_ds_seg(DisasContext *s)
1617
{
1618
    int override, must_add_seg;
1619
    must_add_seg = s->addseg;
1620
    override = R_DS;
1621
    if (s->override >= 0) {
1622
        override = s->override;
1623
        must_add_seg = 1;
1624
    } else {
1625
        override = R_DS;
1626
    }
1627
    if (must_add_seg) {
1628
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1629
    }
1630
}
1631

    
1632
/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1633
   OR_TMP0 */
1634
static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1635
{
1636
    int mod, rm, opreg, disp;
1637

    
1638
    mod = (modrm >> 6) & 3;
1639
    rm = (modrm & 7) | REX_B(s);
1640
    if (mod == 3) {
1641
        if (is_store) {
1642
            if (reg != OR_TMP0)
1643
                gen_op_mov_TN_reg[ot][0][reg]();
1644
            gen_op_mov_reg_T0[ot][rm]();
1645
        } else {
1646
            gen_op_mov_TN_reg[ot][0][rm]();
1647
            if (reg != OR_TMP0)
1648
                gen_op_mov_reg_T0[ot][reg]();
1649
        }
1650
    } else {
1651
        gen_lea_modrm(s, modrm, &opreg, &disp);
1652
        if (is_store) {
1653
            if (reg != OR_TMP0)
1654
                gen_op_mov_TN_reg[ot][0][reg]();
1655
            gen_op_st_T0_A0[ot + s->mem_index]();
1656
        } else {
1657
            gen_op_ld_T0_A0[ot + s->mem_index]();
1658
            if (reg != OR_TMP0)
1659
                gen_op_mov_reg_T0[ot][reg]();
1660
        }
1661
    }
1662
}
1663

    
1664
static inline uint32_t insn_get(DisasContext *s, int ot)
1665
{
1666
    uint32_t ret;
1667

    
1668
    switch(ot) {
1669
    case OT_BYTE:
1670
        ret = ldub_code(s->pc);
1671
        s->pc++;
1672
        break;
1673
    case OT_WORD:
1674
        ret = lduw_code(s->pc);
1675
        s->pc += 2;
1676
        break;
1677
    default:
1678
    case OT_LONG:
1679
        ret = ldl_code(s->pc);
1680
        s->pc += 4;
1681
        break;
1682
    }
1683
    return ret;
1684
}
1685

    
1686
static inline int insn_const_size(unsigned int ot)
1687
{
1688
    if (ot <= OT_LONG)
1689
        return 1 << ot;
1690
    else
1691
        return 4;
1692
}
1693

    
1694
static inline void gen_jcc(DisasContext *s, int b, 
1695
                           target_ulong val, target_ulong next_eip)
1696
{
1697
    TranslationBlock *tb;
1698
    int inv, jcc_op;
1699
    GenOpFunc1 *func;
1700
    target_ulong tmp;
1701
    int l1, l2;
1702

    
1703
    inv = b & 1;
1704
    jcc_op = (b >> 1) & 7;
1705
    
1706
    if (s->jmp_opt) {
1707
        switch(s->cc_op) {
1708
            /* we optimize the cmp/jcc case */
1709
        case CC_OP_SUBB:
1710
        case CC_OP_SUBW:
1711
        case CC_OP_SUBL:
1712
        case CC_OP_SUBQ:
1713
            func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1714
            break;
1715
            
1716
            /* some jumps are easy to compute */
1717
        case CC_OP_ADDB:
1718
        case CC_OP_ADDW:
1719
        case CC_OP_ADDL:
1720
        case CC_OP_ADDQ:
1721

    
1722
        case CC_OP_ADCB:
1723
        case CC_OP_ADCW:
1724
        case CC_OP_ADCL:
1725
        case CC_OP_ADCQ:
1726

    
1727
        case CC_OP_SBBB:
1728
        case CC_OP_SBBW:
1729
        case CC_OP_SBBL:
1730
        case CC_OP_SBBQ:
1731

    
1732
        case CC_OP_LOGICB:
1733
        case CC_OP_LOGICW:
1734
        case CC_OP_LOGICL:
1735
        case CC_OP_LOGICQ:
1736

    
1737
        case CC_OP_INCB:
1738
        case CC_OP_INCW:
1739
        case CC_OP_INCL:
1740
        case CC_OP_INCQ:
1741

    
1742
        case CC_OP_DECB:
1743
        case CC_OP_DECW:
1744
        case CC_OP_DECL:
1745
        case CC_OP_DECQ:
1746

    
1747
        case CC_OP_SHLB:
1748
        case CC_OP_SHLW:
1749
        case CC_OP_SHLL:
1750
        case CC_OP_SHLQ:
1751

    
1752
        case CC_OP_SARB:
1753
        case CC_OP_SARW:
1754
        case CC_OP_SARL:
1755
        case CC_OP_SARQ:
1756
            switch(jcc_op) {
1757
            case JCC_Z:
1758
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1759
                break;
1760
            case JCC_S:
1761
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1762
                break;
1763
            default:
1764
                func = NULL;
1765
                break;
1766
            }
1767
            break;
1768
        default:
1769
            func = NULL;
1770
            break;
1771
        }
1772

    
1773
        if (s->cc_op != CC_OP_DYNAMIC)
1774
            gen_op_set_cc_op(s->cc_op);
1775

    
1776
        if (!func) {
1777
            gen_setcc_slow[jcc_op]();
1778
            func = gen_op_jnz_T0_label;
1779
        }
1780
    
1781
        if (inv) {
1782
            tmp = val;
1783
            val = next_eip;
1784
            next_eip = tmp;
1785
        }
1786
        tb = s->tb;
1787

    
1788
        l1 = gen_new_label();
1789
        func(l1);
1790

    
1791
        gen_op_goto_tb0(TBPARAM(tb));
1792
        gen_jmp_im(next_eip);
1793
        gen_op_movl_T0_im((long)tb + 0);
1794
        gen_op_exit_tb();
1795

    
1796
        gen_set_label(l1);
1797
        gen_op_goto_tb1(TBPARAM(tb));
1798
        gen_jmp_im(val);
1799
        gen_op_movl_T0_im((long)tb + 1);
1800
        gen_op_exit_tb();
1801

    
1802
        s->is_jmp = 3;
1803
    } else {
1804

    
1805
        if (s->cc_op != CC_OP_DYNAMIC) {
1806
            gen_op_set_cc_op(s->cc_op);
1807
            s->cc_op = CC_OP_DYNAMIC;
1808
        }
1809
        gen_setcc_slow[jcc_op]();
1810
        if (inv) {
1811
            tmp = val;
1812
            val = next_eip;
1813
            next_eip = tmp;
1814
        }
1815
        l1 = gen_new_label();
1816
        l2 = gen_new_label();
1817
        gen_op_jnz_T0_label(l1);
1818
        gen_jmp_im(next_eip);
1819
        gen_op_jmp_label(l2);
1820
        gen_set_label(l1);
1821
        gen_jmp_im(val);
1822
        gen_set_label(l2);
1823
        gen_eob(s);
1824
    }
1825
}
1826

    
1827
static void gen_setcc(DisasContext *s, int b)
1828
{
1829
    int inv, jcc_op;
1830
    GenOpFunc *func;
1831

    
1832
    inv = b & 1;
1833
    jcc_op = (b >> 1) & 7;
1834
    switch(s->cc_op) {
1835
        /* we optimize the cmp/jcc case */
1836
    case CC_OP_SUBB:
1837
    case CC_OP_SUBW:
1838
    case CC_OP_SUBL:
1839
    case CC_OP_SUBQ:
1840
        func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1841
        if (!func)
1842
            goto slow_jcc;
1843
        break;
1844
        
1845
        /* some jumps are easy to compute */
1846
    case CC_OP_ADDB:
1847
    case CC_OP_ADDW:
1848
    case CC_OP_ADDL:
1849
    case CC_OP_ADDQ:
1850

    
1851
    case CC_OP_LOGICB:
1852
    case CC_OP_LOGICW:
1853
    case CC_OP_LOGICL:
1854
    case CC_OP_LOGICQ:
1855

    
1856
    case CC_OP_INCB:
1857
    case CC_OP_INCW:
1858
    case CC_OP_INCL:
1859
    case CC_OP_INCQ:
1860

    
1861
    case CC_OP_DECB:
1862
    case CC_OP_DECW:
1863
    case CC_OP_DECL:
1864
    case CC_OP_DECQ:
1865

    
1866
    case CC_OP_SHLB:
1867
    case CC_OP_SHLW:
1868
    case CC_OP_SHLL:
1869
    case CC_OP_SHLQ:
1870
        switch(jcc_op) {
1871
        case JCC_Z:
1872
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1873
            break;
1874
        case JCC_S:
1875
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1876
            break;
1877
        default:
1878
            goto slow_jcc;
1879
        }
1880
        break;
1881
    default:
1882
    slow_jcc:
1883
        if (s->cc_op != CC_OP_DYNAMIC)
1884
            gen_op_set_cc_op(s->cc_op);
1885
        func = gen_setcc_slow[jcc_op];
1886
        break;
1887
    }
1888
    func();
1889
    if (inv) {
1890
        gen_op_xor_T0_1();
1891
    }
1892
}
1893

    
1894
/* move T0 to seg_reg and compute if the CPU state may change. Never
1895
   call this function with seg_reg == R_CS */
1896
static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1897
{
1898
    if (s->pe && !s->vm86) {
1899
        /* XXX: optimize by finding processor state dynamically */
1900
        if (s->cc_op != CC_OP_DYNAMIC)
1901
            gen_op_set_cc_op(s->cc_op);
1902
        gen_jmp_im(cur_eip);
1903
        gen_op_movl_seg_T0(seg_reg);
1904
        /* abort translation because the addseg value may change or
1905
           because ss32 may change. For R_SS, translation must always
1906
           stop as a special handling must be done to disable hardware
1907
           interrupts for the next instruction */
1908
        if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1909
            s->is_jmp = 3;
1910
    } else {
1911
        gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1912
        if (seg_reg == R_SS)
1913
            s->is_jmp = 3;
1914
    }
1915
}
1916

    
1917
static inline void gen_stack_update(DisasContext *s, int addend)
1918
{
1919
#ifdef TARGET_X86_64
1920
    if (CODE64(s)) {
1921
        if (addend == 8)
1922
            gen_op_addq_ESP_8();
1923
        else 
1924
            gen_op_addq_ESP_im(addend);
1925
    } else
1926
#endif
1927
    if (s->ss32) {
1928
        if (addend == 2)
1929
            gen_op_addl_ESP_2();
1930
        else if (addend == 4)
1931
            gen_op_addl_ESP_4();
1932
        else 
1933
            gen_op_addl_ESP_im(addend);
1934
    } else {
1935
        if (addend == 2)
1936
            gen_op_addw_ESP_2();
1937
        else if (addend == 4)
1938
            gen_op_addw_ESP_4();
1939
        else
1940
            gen_op_addw_ESP_im(addend);
1941
    }
1942
}
1943

    
1944
/* generate a push. It depends on ss32, addseg and dflag */
1945
static void gen_push_T0(DisasContext *s)
1946
{
1947
#ifdef TARGET_X86_64
1948
    if (CODE64(s)) {
1949
        /* XXX: check 16 bit behaviour */
1950
        gen_op_movq_A0_reg[R_ESP]();
1951
        gen_op_subq_A0_8();
1952
        gen_op_st_T0_A0[OT_QUAD + s->mem_index]();
1953
        gen_op_movq_ESP_A0();
1954
    } else 
1955
#endif
1956
    {
1957
        gen_op_movl_A0_reg[R_ESP]();
1958
        if (!s->dflag)
1959
            gen_op_subl_A0_2();
1960
        else
1961
            gen_op_subl_A0_4();
1962
        if (s->ss32) {
1963
            if (s->addseg) {
1964
                gen_op_movl_T1_A0();
1965
                gen_op_addl_A0_SS();
1966
            }
1967
        } else {
1968
            gen_op_andl_A0_ffff();
1969
            gen_op_movl_T1_A0();
1970
            gen_op_addl_A0_SS();
1971
        }
1972
        gen_op_st_T0_A0[s->dflag + 1 + s->mem_index]();
1973
        if (s->ss32 && !s->addseg)
1974
            gen_op_movl_ESP_A0();
1975
        else
1976
            gen_op_mov_reg_T1[s->ss32 + 1][R_ESP]();
1977
    }
1978
}
1979

    
1980
/* generate a push. It depends on ss32, addseg and dflag */
1981
/* slower version for T1, only used for call Ev */
1982
static void gen_push_T1(DisasContext *s)
1983
{
1984
#ifdef TARGET_X86_64
1985
    if (CODE64(s)) {
1986
        /* XXX: check 16 bit behaviour */
1987
        gen_op_movq_A0_reg[R_ESP]();
1988
        gen_op_subq_A0_8();
1989
        gen_op_st_T1_A0[OT_QUAD + s->mem_index]();
1990
        gen_op_movq_ESP_A0();
1991
    } else 
1992
#endif
1993
    {
1994
        gen_op_movl_A0_reg[R_ESP]();
1995
        if (!s->dflag)
1996
            gen_op_subl_A0_2();
1997
        else
1998
            gen_op_subl_A0_4();
1999
        if (s->ss32) {
2000
            if (s->addseg) {
2001
                gen_op_addl_A0_SS();
2002
            }
2003
        } else {
2004
            gen_op_andl_A0_ffff();
2005
            gen_op_addl_A0_SS();
2006
        }
2007
        gen_op_st_T1_A0[s->dflag + 1 + s->mem_index]();
2008
        
2009
        if (s->ss32 && !s->addseg)
2010
            gen_op_movl_ESP_A0();
2011
        else
2012
            gen_stack_update(s, (-2) << s->dflag);
2013
    }
2014
}
2015

    
2016
/* two step pop is necessary for precise exceptions */
2017
static void gen_pop_T0(DisasContext *s)
2018
{
2019
#ifdef TARGET_X86_64
2020
    if (CODE64(s)) {
2021
        /* XXX: check 16 bit behaviour */
2022
        gen_op_movq_A0_reg[R_ESP]();
2023
        gen_op_ld_T0_A0[OT_QUAD + s->mem_index]();
2024
    } else 
2025
#endif
2026
    {
2027
        gen_op_movl_A0_reg[R_ESP]();
2028
        if (s->ss32) {
2029
            if (s->addseg)
2030
                gen_op_addl_A0_SS();
2031
        } else {
2032
            gen_op_andl_A0_ffff();
2033
            gen_op_addl_A0_SS();
2034
        }
2035
        gen_op_ld_T0_A0[s->dflag + 1 + s->mem_index]();
2036
    }
2037
}
2038

    
2039
static void gen_pop_update(DisasContext *s)
2040
{
2041
#ifdef TARGET_X86_64
2042
    if (CODE64(s)) {
2043
        gen_stack_update(s, 8);
2044
    } else
2045
#endif
2046
    {
2047
        gen_stack_update(s, 2 << s->dflag);
2048
    }
2049
}
2050

    
2051
static void gen_stack_A0(DisasContext *s)
2052
{
2053
    gen_op_movl_A0_ESP();
2054
    if (!s->ss32)
2055
        gen_op_andl_A0_ffff();
2056
    gen_op_movl_T1_A0();
2057
    if (s->addseg)
2058
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2059
}
2060

    
2061
/* NOTE: wrap around in 16 bit not fully handled */
2062
static void gen_pusha(DisasContext *s)
2063
{
2064
    int i;
2065
    gen_op_movl_A0_ESP();
2066
    gen_op_addl_A0_im(-16 <<  s->dflag);
2067
    if (!s->ss32)
2068
        gen_op_andl_A0_ffff();
2069
    gen_op_movl_T1_A0();
2070
    if (s->addseg)
2071
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2072
    for(i = 0;i < 8; i++) {
2073
        gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
2074
        gen_op_st_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2075
        gen_op_addl_A0_im(2 <<  s->dflag);
2076
    }
2077
    gen_op_mov_reg_T1[OT_WORD + s->dflag][R_ESP]();
2078
}
2079

    
2080
/* NOTE: wrap around in 16 bit not fully handled */
2081
static void gen_popa(DisasContext *s)
2082
{
2083
    int i;
2084
    gen_op_movl_A0_ESP();
2085
    if (!s->ss32)
2086
        gen_op_andl_A0_ffff();
2087
    gen_op_movl_T1_A0();
2088
    gen_op_addl_T1_im(16 <<  s->dflag);
2089
    if (s->addseg)
2090
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2091
    for(i = 0;i < 8; i++) {
2092
        /* ESP is not reloaded */
2093
        if (i != 3) {
2094
            gen_op_ld_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2095
            gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
2096
        }
2097
        gen_op_addl_A0_im(2 <<  s->dflag);
2098
    }
2099
    gen_op_mov_reg_T1[OT_WORD + s->dflag][R_ESP]();
2100
}
2101

    
2102
static void gen_enter(DisasContext *s, int esp_addend, int level)
2103
{
2104
    int ot, opsize;
2105

    
2106
    ot = s->dflag + OT_WORD;
2107
    level &= 0x1f;
2108
    opsize = 2 << s->dflag;
2109

    
2110
    gen_op_movl_A0_ESP();
2111
    gen_op_addl_A0_im(-opsize);
2112
    if (!s->ss32)
2113
        gen_op_andl_A0_ffff();
2114
    gen_op_movl_T1_A0();
2115
    if (s->addseg)
2116
        gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2117
    /* push bp */
2118
    gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2119
    gen_op_st_T0_A0[ot + s->mem_index]();
2120
    if (level) {
2121
        gen_op_enter_level(level, s->dflag);
2122
    }
2123
    gen_op_mov_reg_T1[ot][R_EBP]();
2124
    gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2125
    gen_op_mov_reg_T1[ot][R_ESP]();
2126
}
2127

    
2128
static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2129
{
2130
    if (s->cc_op != CC_OP_DYNAMIC)
2131
        gen_op_set_cc_op(s->cc_op);
2132
    gen_jmp_im(cur_eip);
2133
    gen_op_raise_exception(trapno);
2134
    s->is_jmp = 3;
2135
}
2136

    
2137
/* an interrupt is different from an exception because of the
2138
   priviledge checks */
2139
static void gen_interrupt(DisasContext *s, int intno, 
2140
                          target_ulong cur_eip, target_ulong next_eip)
2141
{
2142
    if (s->cc_op != CC_OP_DYNAMIC)
2143
        gen_op_set_cc_op(s->cc_op);
2144
    gen_jmp_im(cur_eip);
2145
    gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2146
    s->is_jmp = 3;
2147
}
2148

    
2149
static void gen_debug(DisasContext *s, target_ulong cur_eip)
2150
{
2151
    if (s->cc_op != CC_OP_DYNAMIC)
2152
        gen_op_set_cc_op(s->cc_op);
2153
    gen_jmp_im(cur_eip);
2154
    gen_op_debug();
2155
    s->is_jmp = 3;
2156
}
2157

    
2158
/* generate a generic end of block. Trace exception is also generated
2159
   if needed */
2160
static void gen_eob(DisasContext *s)
2161
{
2162
    if (s->cc_op != CC_OP_DYNAMIC)
2163
        gen_op_set_cc_op(s->cc_op);
2164
    if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2165
        gen_op_reset_inhibit_irq();
2166
    }
2167
    if (s->singlestep_enabled) {
2168
        gen_op_debug();
2169
    } else if (s->tf) {
2170
        gen_op_raise_exception(EXCP01_SSTP);
2171
    } else {
2172
        gen_op_movl_T0_0();
2173
        gen_op_exit_tb();
2174
    }
2175
    s->is_jmp = 3;
2176
}
2177

    
2178
/* generate a jump to eip. No segment change must happen before as a
2179
   direct call to the next block may occur */
2180
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2181
{
2182
    TranslationBlock *tb = s->tb;
2183

    
2184
    if (s->jmp_opt) {
2185
        if (s->cc_op != CC_OP_DYNAMIC)
2186
            gen_op_set_cc_op(s->cc_op);
2187
        if (tb_num)
2188
            gen_op_goto_tb1(TBPARAM(tb));
2189
        else
2190
            gen_op_goto_tb0(TBPARAM(tb));
2191
        gen_jmp_im(eip);
2192
        gen_op_movl_T0_im((long)tb + tb_num);
2193
        gen_op_exit_tb();
2194
        s->is_jmp = 3;
2195
    } else {
2196
        gen_jmp_im(eip);
2197
        gen_eob(s);
2198
    }
2199
}
2200

    
2201
static void gen_jmp(DisasContext *s, target_ulong eip)
2202
{
2203
    gen_jmp_tb(s, eip, 0);
2204
}
2205

    
2206
static void gen_movtl_T0_im(target_ulong val)
2207
{
2208
#ifdef TARGET_X86_64    
2209
    if ((int32_t)val == val) {
2210
        gen_op_movl_T0_im(val);
2211
    } else {
2212
        gen_op_movq_T0_im64(val >> 32, val);
2213
    }
2214
#else
2215
    gen_op_movl_T0_im(val);
2216
#endif
2217
}
2218

    
2219
static GenOpFunc1 *gen_ldq_env_A0[3] = {
2220
    gen_op_ldq_raw_env_A0,
2221
#ifndef CONFIG_USER_ONLY
2222
    gen_op_ldq_kernel_env_A0,
2223
    gen_op_ldq_user_env_A0,
2224
#endif
2225
};
2226

    
2227
static GenOpFunc1 *gen_stq_env_A0[3] = {
2228
    gen_op_stq_raw_env_A0,
2229
#ifndef CONFIG_USER_ONLY
2230
    gen_op_stq_kernel_env_A0,
2231
    gen_op_stq_user_env_A0,
2232
#endif
2233
};
2234

    
2235
static GenOpFunc1 *gen_ldo_env_A0[3] = {
2236
    gen_op_ldo_raw_env_A0,
2237
#ifndef CONFIG_USER_ONLY
2238
    gen_op_ldo_kernel_env_A0,
2239
    gen_op_ldo_user_env_A0,
2240
#endif
2241
};
2242

    
2243
static GenOpFunc1 *gen_sto_env_A0[3] = {
2244
    gen_op_sto_raw_env_A0,
2245
#ifndef CONFIG_USER_ONLY
2246
    gen_op_sto_kernel_env_A0,
2247
    gen_op_sto_user_env_A0,
2248
#endif
2249
};
2250

    
2251
#define SSE_SPECIAL ((GenOpFunc2 *)1)
2252

    
2253
#define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2254
#define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2255
                     gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2256

    
2257
static GenOpFunc2 *sse_op_table1[256][4] = {
2258
    /* pure SSE operations */
2259
    [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2260
    [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2261
    [0x12] = { SSE_SPECIAL, SSE_SPECIAL },  /* movlps, movlpd */
2262
    [0x13] = { SSE_SPECIAL, SSE_SPECIAL },  /* movlps, movlpd */
2263
    [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2264
    [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2265
    [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd, movshdup */
2266
    [0x17] = { SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd */
2267

    
2268
    [0x28] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2269
    [0x29] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2270
    [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2271
    [0x2b] = { SSE_SPECIAL, SSE_SPECIAL },  /* movntps, movntpd */
2272
    [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2273
    [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2274
    [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2275
    [0x2f] = { gen_op_comiss, gen_op_comisd },
2276
    [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2277
    [0x51] = SSE_FOP(sqrt),
2278
    [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2279
    [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2280
    [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2281
    [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2282
    [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2283
    [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2284
    [0x58] = SSE_FOP(add),
2285
    [0x59] = SSE_FOP(mul),
2286
    [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps, 
2287
               gen_op_cvtss2sd, gen_op_cvtsd2ss },
2288
    [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2289
    [0x5c] = SSE_FOP(sub),
2290
    [0x5d] = SSE_FOP(min),
2291
    [0x5e] = SSE_FOP(div),
2292
    [0x5f] = SSE_FOP(max),
2293

    
2294
    [0xc2] = SSE_FOP(cmpeq),
2295
    [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2296

    
2297
    /* MMX ops and their SSE extensions */
2298
    [0x60] = MMX_OP2(punpcklbw),
2299
    [0x61] = MMX_OP2(punpcklwd),
2300
    [0x62] = MMX_OP2(punpckldq),
2301
    [0x63] = MMX_OP2(packsswb),
2302
    [0x64] = MMX_OP2(pcmpgtb),
2303
    [0x65] = MMX_OP2(pcmpgtw),
2304
    [0x66] = MMX_OP2(pcmpgtl),
2305
    [0x67] = MMX_OP2(packuswb),
2306
    [0x68] = MMX_OP2(punpckhbw),
2307
    [0x69] = MMX_OP2(punpckhwd),
2308
    [0x6a] = MMX_OP2(punpckhdq),
2309
    [0x6b] = MMX_OP2(packssdw),
2310
    [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2311
    [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2312
    [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2313
    [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2314
    [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx, 
2315
               (GenOpFunc2 *)gen_op_pshufd_xmm, 
2316
               (GenOpFunc2 *)gen_op_pshufhw_xmm, 
2317
               (GenOpFunc2 *)gen_op_pshuflw_xmm },
2318
    [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2319
    [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2320
    [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2321
    [0x74] = MMX_OP2(pcmpeqb),
2322
    [0x75] = MMX_OP2(pcmpeqw),
2323
    [0x76] = MMX_OP2(pcmpeql),
2324
    [0x77] = { SSE_SPECIAL }, /* emms */
2325
    [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2326
    [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2327
    [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2328
    [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2329
    [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2330
    [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2331
    [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2332
    [0xd1] = MMX_OP2(psrlw),
2333
    [0xd2] = MMX_OP2(psrld),
2334
    [0xd3] = MMX_OP2(psrlq),
2335
    [0xd4] = MMX_OP2(paddq),
2336
    [0xd5] = MMX_OP2(pmullw),
2337
    [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2338
    [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2339
    [0xd8] = MMX_OP2(psubusb),
2340
    [0xd9] = MMX_OP2(psubusw),
2341
    [0xda] = MMX_OP2(pminub),
2342
    [0xdb] = MMX_OP2(pand),
2343
    [0xdc] = MMX_OP2(paddusb),
2344
    [0xdd] = MMX_OP2(paddusw),
2345
    [0xde] = MMX_OP2(pmaxub),
2346
    [0xdf] = MMX_OP2(pandn),
2347
    [0xe0] = MMX_OP2(pavgb),
2348
    [0xe1] = MMX_OP2(psraw),
2349
    [0xe2] = MMX_OP2(psrad),
2350
    [0xe3] = MMX_OP2(pavgw),
2351
    [0xe4] = MMX_OP2(pmulhuw),
2352
    [0xe5] = MMX_OP2(pmulhw),
2353
    [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2354
    [0xe7] = { SSE_SPECIAL , SSE_SPECIAL },  /* movntq, movntq */
2355
    [0xe8] = MMX_OP2(psubsb),
2356
    [0xe9] = MMX_OP2(psubsw),
2357
    [0xea] = MMX_OP2(pminsw),
2358
    [0xeb] = MMX_OP2(por),
2359
    [0xec] = MMX_OP2(paddsb),
2360
    [0xed] = MMX_OP2(paddsw),
2361
    [0xee] = MMX_OP2(pmaxsw),
2362
    [0xef] = MMX_OP2(pxor),
2363
    [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu (PNI) */
2364
    [0xf1] = MMX_OP2(psllw),
2365
    [0xf2] = MMX_OP2(pslld),
2366
    [0xf3] = MMX_OP2(psllq),
2367
    [0xf4] = MMX_OP2(pmuludq),
2368
    [0xf5] = MMX_OP2(pmaddwd),
2369
    [0xf6] = MMX_OP2(psadbw),
2370
    [0xf7] = MMX_OP2(maskmov),
2371
    [0xf8] = MMX_OP2(psubb),
2372
    [0xf9] = MMX_OP2(psubw),
2373
    [0xfa] = MMX_OP2(psubl),
2374
    [0xfb] = MMX_OP2(psubq),
2375
    [0xfc] = MMX_OP2(paddb),
2376
    [0xfd] = MMX_OP2(paddw),
2377
    [0xfe] = MMX_OP2(paddl),
2378
};
2379

    
2380
static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2381
    [0 + 2] = MMX_OP2(psrlw),
2382
    [0 + 4] = MMX_OP2(psraw),
2383
    [0 + 6] = MMX_OP2(psllw),
2384
    [8 + 2] = MMX_OP2(psrld),
2385
    [8 + 4] = MMX_OP2(psrad),
2386
    [8 + 6] = MMX_OP2(pslld),
2387
    [16 + 2] = MMX_OP2(psrlq),
2388
    [16 + 3] = { NULL, gen_op_psrldq_xmm },
2389
    [16 + 6] = MMX_OP2(psllq),
2390
    [16 + 7] = { NULL, gen_op_pslldq_xmm },
2391
};
2392

    
2393
static GenOpFunc1 *sse_op_table3[4 * 3] = {
2394
    gen_op_cvtsi2ss,
2395
    gen_op_cvtsi2sd,
2396
    X86_64_ONLY(gen_op_cvtsq2ss),
2397
    X86_64_ONLY(gen_op_cvtsq2sd),
2398
    
2399
    gen_op_cvttss2si,
2400
    gen_op_cvttsd2si,
2401
    X86_64_ONLY(gen_op_cvttss2sq),
2402
    X86_64_ONLY(gen_op_cvttsd2sq),
2403

    
2404
    gen_op_cvtss2si,
2405
    gen_op_cvtsd2si,
2406
    X86_64_ONLY(gen_op_cvtss2sq),
2407
    X86_64_ONLY(gen_op_cvtsd2sq),
2408
};
2409
    
2410
static GenOpFunc2 *sse_op_table4[8][4] = {
2411
    SSE_FOP(cmpeq),
2412
    SSE_FOP(cmplt),
2413
    SSE_FOP(cmple),
2414
    SSE_FOP(cmpunord),
2415
    SSE_FOP(cmpneq),
2416
    SSE_FOP(cmpnlt),
2417
    SSE_FOP(cmpnle),
2418
    SSE_FOP(cmpord),
2419
};
2420
    
2421
static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2422
{
2423
    int b1, op1_offset, op2_offset, is_xmm, val, ot;
2424
    int modrm, mod, rm, reg, reg_addr, offset_addr;
2425
    GenOpFunc2 *sse_op2;
2426
    GenOpFunc3 *sse_op3;
2427

    
2428
    b &= 0xff;
2429
    if (s->prefix & PREFIX_DATA) 
2430
        b1 = 1;
2431
    else if (s->prefix & PREFIX_REPZ) 
2432
        b1 = 2;
2433
    else if (s->prefix & PREFIX_REPNZ) 
2434
        b1 = 3;
2435
    else
2436
        b1 = 0;
2437
    sse_op2 = sse_op_table1[b][b1];
2438
    if (!sse_op2) 
2439
        goto illegal_op;
2440
    if (b <= 0x5f || b == 0xc6 || b == 0xc2) {
2441
        is_xmm = 1;
2442
    } else {
2443
        if (b1 == 0) {
2444
            /* MMX case */
2445
            is_xmm = 0;
2446
        } else {
2447
            is_xmm = 1;
2448
        }
2449
    }
2450
    /* simple MMX/SSE operation */
2451
    if (s->flags & HF_TS_MASK) {
2452
        gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2453
        return;
2454
    }
2455
    if (s->flags & HF_EM_MASK) {
2456
    illegal_op:
2457
        gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2458
        return;
2459
    }
2460
    if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2461
        goto illegal_op;
2462
    if (b == 0x77) {
2463
        /* emms */
2464
        gen_op_emms();
2465
        return;
2466
    }
2467
    /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2468
       the static cpu state) */
2469
    if (!is_xmm) {
2470
        gen_op_enter_mmx();
2471
    }
2472

    
2473
    modrm = ldub_code(s->pc++);
2474
    reg = ((modrm >> 3) & 7);
2475
    if (is_xmm)
2476
        reg |= rex_r;
2477
    mod = (modrm >> 6) & 3;
2478
    if (sse_op2 == SSE_SPECIAL) {
2479
        b |= (b1 << 8);
2480
        switch(b) {
2481
        case 0x0e7: /* movntq */
2482
            if (mod == 3) 
2483
                goto illegal_op;
2484
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2485
            gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2486
            break;
2487
        case 0x1e7: /* movntdq */
2488
        case 0x02b: /* movntps */
2489
        case 0x12b: /* movntps */
2490
        case 0x2f0: /* lddqu */
2491
            if (mod == 3) 
2492
                goto illegal_op;
2493
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2494
            gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2495
            break;
2496
        case 0x6e: /* movd mm, ea */
2497
            gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2498
            gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2499
            break;
2500
        case 0x16e: /* movd xmm, ea */
2501
            gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2502
            gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2503
            break;
2504
        case 0x6f: /* movq mm, ea */
2505
            if (mod != 3) {
2506
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2507
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2508
            } else {
2509
                rm = (modrm & 7);
2510
                gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2511
                            offsetof(CPUX86State,fpregs[rm].mmx));
2512
            }
2513
            break;
2514
        case 0x010: /* movups */
2515
        case 0x110: /* movupd */
2516
        case 0x028: /* movaps */
2517
        case 0x128: /* movapd */
2518
        case 0x16f: /* movdqa xmm, ea */
2519
        case 0x26f: /* movdqu xmm, ea */
2520
            if (mod != 3) {
2521
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2522
                gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2523
            } else {
2524
                rm = (modrm & 7) | REX_B(s);
2525
                gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2526
                            offsetof(CPUX86State,xmm_regs[rm]));
2527
            }
2528
            break;
2529
        case 0x210: /* movss xmm, ea */
2530
            if (mod != 3) {
2531
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2532
                gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2533
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2534
                gen_op_movl_T0_0();
2535
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2536
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2537
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2538
            } else {
2539
                rm = (modrm & 7) | REX_B(s);
2540
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2541
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2542
            }
2543
            break;
2544
        case 0x310: /* movsd xmm, ea */
2545
            if (mod != 3) {
2546
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2547
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2548
                gen_op_movl_T0_0();
2549
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2550
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2551
            } else {
2552
                rm = (modrm & 7) | REX_B(s);
2553
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2554
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2555
            }
2556
            break;
2557
        case 0x012: /* movlps */
2558
        case 0x112: /* movlpd */
2559
            if (mod != 3) {
2560
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2561
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2562
            } else {
2563
                /* movhlps */
2564
                rm = (modrm & 7) | REX_B(s);
2565
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2566
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2567
            }
2568
            break;
2569
        case 0x016: /* movhps */
2570
        case 0x116: /* movhpd */
2571
            if (mod != 3) {
2572
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2573
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2574
            } else {
2575
                /* movlhps */
2576
                rm = (modrm & 7) | REX_B(s);
2577
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2578
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2579
            }
2580
            break;
2581
        case 0x216: /* movshdup */
2582
            if (mod != 3) {
2583
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2584
                gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2585
            } else {
2586
                rm = (modrm & 7) | REX_B(s);
2587
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2588
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2589
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2590
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2591
            }
2592
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2593
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2594
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2595
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2596
            break;
2597
        case 0x7e: /* movd ea, mm */
2598
            gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2599
            gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2600
            break;
2601
        case 0x17e: /* movd ea, xmm */
2602
            gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2603
            gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2604
            break;
2605
        case 0x27e: /* movq xmm, ea */
2606
            if (mod != 3) {
2607
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2608
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2609
            } else {
2610
                rm = (modrm & 7) | REX_B(s);
2611
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2612
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2613
            }
2614
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2615
            break;
2616
        case 0x7f: /* movq ea, mm */
2617
            if (mod != 3) {
2618
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2619
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2620
            } else {
2621
                rm = (modrm & 7);
2622
                gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2623
                            offsetof(CPUX86State,fpregs[reg].mmx));
2624
            }
2625
            break;
2626
        case 0x011: /* movups */
2627
        case 0x111: /* movupd */
2628
        case 0x029: /* movaps */
2629
        case 0x129: /* movapd */
2630
        case 0x17f: /* movdqa ea, xmm */
2631
        case 0x27f: /* movdqu ea, xmm */
2632
            if (mod != 3) {
2633
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2634
                gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2635
            } else {
2636
                rm = (modrm & 7) | REX_B(s);
2637
                gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2638
                            offsetof(CPUX86State,xmm_regs[reg]));
2639
            }
2640
            break;
2641
        case 0x211: /* movss ea, xmm */
2642
            if (mod != 3) {
2643
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2644
                gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2645
                gen_op_st_T0_A0[OT_LONG + s->mem_index]();
2646
            } else {
2647
                rm = (modrm & 7) | REX_B(s);
2648
                gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2649
                            offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2650
            }
2651
            break;
2652
        case 0x311: /* movsd ea, xmm */
2653
            if (mod != 3) {
2654
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2655
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2656
            } else {
2657
                rm = (modrm & 7) | REX_B(s);
2658
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2659
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2660
            }
2661
            break;
2662
        case 0x013: /* movlps */
2663
        case 0x113: /* movlpd */
2664
            if (mod != 3) {
2665
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2666
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2667
            } else {
2668
                goto illegal_op;
2669
            }
2670
            break;
2671
        case 0x017: /* movhps */
2672
        case 0x117: /* movhpd */
2673
            if (mod != 3) {
2674
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2675
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2676
            } else {
2677
                goto illegal_op;
2678
            }
2679
            break;
2680
        case 0x71: /* shift mm, im */
2681
        case 0x72:
2682
        case 0x73:
2683
        case 0x171: /* shift xmm, im */
2684
        case 0x172:
2685
        case 0x173:
2686
            val = ldub_code(s->pc++);
2687
            if (is_xmm) {
2688
                gen_op_movl_T0_im(val);
2689
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2690
                gen_op_movl_T0_0();
2691
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2692
                op1_offset = offsetof(CPUX86State,xmm_t0);
2693
            } else {
2694
                gen_op_movl_T0_im(val);
2695
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2696
                gen_op_movl_T0_0();
2697
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2698
                op1_offset = offsetof(CPUX86State,mmx_t0);
2699
            }
2700
            sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2701
            if (!sse_op2)
2702
                goto illegal_op;
2703
            if (is_xmm) {
2704
                rm = (modrm & 7) | REX_B(s);
2705
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2706
            } else {
2707
                rm = (modrm & 7);
2708
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2709
            }
2710
            sse_op2(op2_offset, op1_offset);
2711
            break;
2712
        case 0x050: /* movmskps */
2713
            gen_op_movmskps(offsetof(CPUX86State,xmm_regs[reg]));
2714
            rm = (modrm & 7) | REX_B(s);
2715
            gen_op_mov_reg_T0[OT_LONG][rm]();
2716
            break;
2717
        case 0x150: /* movmskpd */
2718
            gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[reg]));
2719
            rm = (modrm & 7) | REX_B(s);
2720
            gen_op_mov_reg_T0[OT_LONG][rm]();
2721
            break;
2722
        case 0x02a: /* cvtpi2ps */
2723
        case 0x12a: /* cvtpi2pd */
2724
            gen_op_enter_mmx();
2725
            if (mod != 3) {
2726
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2727
                op2_offset = offsetof(CPUX86State,mmx_t0);
2728
                gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2729
            } else {
2730
                rm = (modrm & 7);
2731
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2732
            }
2733
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2734
            switch(b >> 8) {
2735
            case 0x0:
2736
                gen_op_cvtpi2ps(op1_offset, op2_offset);
2737
                break;
2738
            default:
2739
            case 0x1:
2740
                gen_op_cvtpi2pd(op1_offset, op2_offset);
2741
                break;
2742
            }
2743
            break;
2744
        case 0x22a: /* cvtsi2ss */
2745
        case 0x32a: /* cvtsi2sd */
2746
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2747
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2748
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2749
            sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
2750
            break;
2751
        case 0x02c: /* cvttps2pi */
2752
        case 0x12c: /* cvttpd2pi */
2753
        case 0x02d: /* cvtps2pi */
2754
        case 0x12d: /* cvtpd2pi */
2755
            gen_op_enter_mmx();
2756
            if (mod != 3) {
2757
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2758
                op2_offset = offsetof(CPUX86State,xmm_t0);
2759
                gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
2760
            } else {
2761
                rm = (modrm & 7) | REX_B(s);
2762
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2763
            }
2764
            op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
2765
            switch(b) {
2766
            case 0x02c:
2767
                gen_op_cvttps2pi(op1_offset, op2_offset);
2768
                break;
2769
            case 0x12c:
2770
                gen_op_cvttpd2pi(op1_offset, op2_offset);
2771
                break;
2772
            case 0x02d:
2773
                gen_op_cvtps2pi(op1_offset, op2_offset);
2774
                break;
2775
            case 0x12d:
2776
                gen_op_cvtpd2pi(op1_offset, op2_offset);
2777
                break;
2778
            }
2779
            break;
2780
        case 0x22c: /* cvttss2si */
2781
        case 0x32c: /* cvttsd2si */
2782
        case 0x22d: /* cvtss2si */
2783
        case 0x32d: /* cvtsd2si */
2784
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2785
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2786
            sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 + 
2787
                          (b & 1) * 4](op1_offset);
2788
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
2789
            break;
2790
        case 0xc4: /* pinsrw */
2791
        case 0x1c4: 
2792
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
2793
            val = ldub_code(s->pc++);
2794
            if (b1) {
2795
                val &= 7;
2796
                gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
2797
            } else {
2798
                val &= 3;
2799
                gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
2800
            }
2801
            break;
2802
        case 0xc5: /* pextrw */
2803
        case 0x1c5: 
2804
            if (mod != 3)
2805
                goto illegal_op;
2806
            val = ldub_code(s->pc++);
2807
            if (b1) {
2808
                val &= 7;
2809
                rm = (modrm & 7) | REX_B(s);
2810
                gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
2811
            } else {
2812
                val &= 3;
2813
                rm = (modrm & 7);
2814
                gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
2815
            }
2816
            reg = ((modrm >> 3) & 7) | rex_r;
2817
            gen_op_mov_reg_T0[OT_LONG][reg]();
2818
            break;
2819
        case 0x1d6: /* movq ea, xmm */
2820
            if (mod != 3) {
2821
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2822
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2823
            } else {
2824
                rm = (modrm & 7) | REX_B(s);
2825
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2826
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2827
                gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2828
            }
2829
            break;
2830
        case 0x2d6: /* movq2dq */
2831
            gen_op_enter_mmx();
2832
            rm = (modrm & 7) | REX_B(s);
2833
            gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2834
                        offsetof(CPUX86State,fpregs[reg & 7].mmx));
2835
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2836
            break;
2837
        case 0x3d6: /* movdq2q */
2838
            gen_op_enter_mmx();
2839
            rm = (modrm & 7);
2840
            gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2841
                        offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2842
            break;
2843
        case 0xd7: /* pmovmskb */
2844
        case 0x1d7:
2845
            if (mod != 3)
2846
                goto illegal_op;
2847
            if (b1) {
2848
                rm = (modrm & 7) | REX_B(s);
2849
                gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
2850
            } else {
2851
                rm = (modrm & 7);
2852
                gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
2853
            }
2854
            reg = ((modrm >> 3) & 7) | rex_r;
2855
            gen_op_mov_reg_T0[OT_LONG][reg]();
2856
            break;
2857
        default:
2858
            goto illegal_op;
2859
        }
2860
    } else {
2861
        /* generic MMX or SSE operation */
2862
        if (b == 0xf7) {
2863
            /* maskmov : we must prepare A0 */
2864
            if (mod != 3) 
2865
                goto illegal_op;
2866
#ifdef TARGET_X86_64
2867
            if (CODE64(s)) {
2868
                gen_op_movq_A0_reg[R_EDI]();
2869
            } else 
2870
#endif
2871
            {
2872
                gen_op_movl_A0_reg[R_EDI]();
2873
                if (s->aflag == 0)
2874
                    gen_op_andl_A0_ffff();
2875
            }
2876
            gen_add_A0_ds_seg(s);
2877
        }
2878
        if (is_xmm) {
2879
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2880
            if (mod != 3) {
2881
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2882
                op2_offset = offsetof(CPUX86State,xmm_t0);
2883
                if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f) ||
2884
                                b == 0xc2)) {
2885
                    /* specific case for SSE single instructions */
2886
                    if (b1 == 2) {
2887
                        /* 32 bit access */
2888
                        gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2889
                        gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2890
                    } else {
2891
                        /* 64 bit access */
2892
                        gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
2893
                    }
2894
                } else {
2895
                    gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
2896
                }
2897
            } else {
2898
                rm = (modrm & 7) | REX_B(s);
2899
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2900
            }
2901
        } else {
2902
            op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
2903
            if (mod != 3) {
2904
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2905
                op2_offset = offsetof(CPUX86State,mmx_t0);
2906
                gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2907
            } else {
2908
                rm = (modrm & 7);
2909
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2910
            }
2911
        }
2912
        switch(b) {
2913
        case 0x70: /* pshufx insn */
2914
        case 0xc6: /* pshufx insn */
2915
            val = ldub_code(s->pc++);
2916
            sse_op3 = (GenOpFunc3 *)sse_op2;
2917
            sse_op3(op1_offset, op2_offset, val);
2918
            break;
2919
        case 0xc2:
2920
            /* compare insns */
2921
            val = ldub_code(s->pc++);
2922
            if (val >= 8)
2923
                goto illegal_op;
2924
            sse_op2 = sse_op_table4[val][b1];
2925
            sse_op2(op1_offset, op2_offset);
2926
            break;
2927
        default:
2928
            sse_op2(op1_offset, op2_offset);
2929
            break;
2930
        }
2931
        if (b == 0x2e || b == 0x2f) {
2932
            s->cc_op = CC_OP_EFLAGS;
2933
        }
2934
    }
2935
}
2936

    
2937

    
2938
/* convert one instruction. s->is_jmp is set if the translation must
2939
   be stopped. Return the next pc value */
2940
static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
2941
{
2942
    int b, prefixes, aflag, dflag;
2943
    int shift, ot;
2944
    int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
2945
    target_ulong next_eip, tval;
2946
    int rex_w, rex_r;
2947

    
2948
    s->pc = pc_start;
2949
    prefixes = 0;
2950
    aflag = s->code32;
2951
    dflag = s->code32;
2952
    s->override = -1;
2953
    rex_w = -1;
2954
    rex_r = 0;
2955
#ifdef TARGET_X86_64
2956
    s->rex_x = 0;
2957
    s->rex_b = 0;
2958
    x86_64_hregs = 0; 
2959
#endif
2960
    s->rip_offset = 0; /* for relative ip address */
2961
 next_byte:
2962
    b = ldub_code(s->pc);
2963
    s->pc++;
2964
    /* check prefixes */
2965
#ifdef TARGET_X86_64
2966
    if (CODE64(s)) {
2967
        switch (b) {
2968
        case 0xf3:
2969
            prefixes |= PREFIX_REPZ;
2970
            goto next_byte;
2971
        case 0xf2:
2972
            prefixes |= PREFIX_REPNZ;
2973
            goto next_byte;
2974
        case 0xf0:
2975
            prefixes |= PREFIX_LOCK;
2976
            goto next_byte;
2977
        case 0x2e:
2978
            s->override = R_CS;
2979
            goto next_byte;
2980
        case 0x36:
2981
            s->override = R_SS;
2982
            goto next_byte;
2983
        case 0x3e:
2984
            s->override = R_DS;
2985
            goto next_byte;
2986
        case 0x26:
2987
            s->override = R_ES;
2988
            goto next_byte;
2989
        case 0x64:
2990
            s->override = R_FS;
2991
            goto next_byte;
2992
        case 0x65:
2993
            s->override = R_GS;
2994
            goto next_byte;
2995
        case 0x66:
2996
            prefixes |= PREFIX_DATA;
2997
            goto next_byte;
2998
        case 0x67:
2999
            prefixes |= PREFIX_ADR;
3000
            goto next_byte;
3001
        case 0x40 ... 0x4f:
3002
            /* REX prefix */
3003
            rex_w = (b >> 3) & 1;
3004
            rex_r = (b & 0x4) << 1;
3005
            s->rex_x = (b & 0x2) << 2;
3006
            REX_B(s) = (b & 0x1) << 3;
3007
            x86_64_hregs = 1; /* select uniform byte register addressing */
3008
            goto next_byte;
3009
        }
3010
        if (rex_w == 1) {
3011
            /* 0x66 is ignored if rex.w is set */
3012
            dflag = 2;
3013
        } else {
3014
            if (prefixes & PREFIX_DATA)
3015
                dflag ^= 1;
3016
        }
3017
        if (!(prefixes & PREFIX_ADR))
3018
            aflag = 2;
3019
    } else 
3020
#endif
3021
    {
3022
        switch (b) {
3023
        case 0xf3:
3024
            prefixes |= PREFIX_REPZ;
3025
            goto next_byte;
3026
        case 0xf2:
3027
            prefixes |= PREFIX_REPNZ;
3028
            goto next_byte;
3029
        case 0xf0:
3030
            prefixes |= PREFIX_LOCK;
3031
            goto next_byte;
3032
        case 0x2e:
3033
            s->override = R_CS;
3034
            goto next_byte;
3035
        case 0x36:
3036
            s->override = R_SS;
3037
            goto next_byte;
3038
        case 0x3e:
3039
            s->override = R_DS;
3040
            goto next_byte;
3041
        case 0x26:
3042
            s->override = R_ES;
3043
            goto next_byte;
3044
        case 0x64:
3045
            s->override = R_FS;
3046
            goto next_byte;
3047
        case 0x65:
3048
            s->override = R_GS;
3049
            goto next_byte;
3050
        case 0x66:
3051
            prefixes |= PREFIX_DATA;
3052
            goto next_byte;
3053
        case 0x67:
3054
            prefixes |= PREFIX_ADR;
3055
            goto next_byte;
3056
        }
3057
        if (prefixes & PREFIX_DATA)
3058
            dflag ^= 1;
3059
        if (prefixes & PREFIX_ADR)
3060
            aflag ^= 1;
3061
    }
3062

    
3063
    s->prefix = prefixes;
3064
    s->aflag = aflag;
3065
    s->dflag = dflag;
3066

    
3067
    /* lock generation */
3068
    if (prefixes & PREFIX_LOCK)
3069
        gen_op_lock();
3070

    
3071
    /* now check op code */
3072
 reswitch:
3073
    switch(b) {
3074
    case 0x0f:
3075
        /**************************/
3076
        /* extended op code */
3077
        b = ldub_code(s->pc++) | 0x100;
3078
        goto reswitch;
3079
        
3080
        /**************************/
3081
        /* arith & logic */
3082
    case 0x00 ... 0x05:
3083
    case 0x08 ... 0x0d:
3084
    case 0x10 ... 0x15:
3085
    case 0x18 ... 0x1d:
3086
    case 0x20 ... 0x25:
3087
    case 0x28 ... 0x2d:
3088
    case 0x30 ... 0x35:
3089
    case 0x38 ... 0x3d:
3090
        {
3091
            int op, f, val;
3092
            op = (b >> 3) & 7;
3093
            f = (b >> 1) & 3;
3094

    
3095
            if ((b & 1) == 0)
3096
                ot = OT_BYTE;
3097
            else
3098
                ot = dflag + OT_WORD;
3099
            
3100
            switch(f) {
3101
            case 0: /* OP Ev, Gv */
3102
                modrm = ldub_code(s->pc++);
3103
                reg = ((modrm >> 3) & 7) | rex_r;
3104
                mod = (modrm >> 6) & 3;
3105
                rm = (modrm & 7) | REX_B(s);
3106
                if (mod != 3) {
3107
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3108
                    opreg = OR_TMP0;
3109
                } else if (op == OP_XORL && rm == reg) {
3110
                xor_zero:
3111
                    /* xor reg, reg optimisation */
3112
                    gen_op_movl_T0_0();
3113
                    s->cc_op = CC_OP_LOGICB + ot;
3114
                    gen_op_mov_reg_T0[ot][reg]();
3115
                    gen_op_update1_cc();
3116
                    break;
3117
                } else {
3118
                    opreg = rm;
3119
                }
3120
                gen_op_mov_TN_reg[ot][1][reg]();
3121
                gen_op(s, op, ot, opreg);
3122
                break;
3123
            case 1: /* OP Gv, Ev */
3124
                modrm = ldub_code(s->pc++);
3125
                mod = (modrm >> 6) & 3;
3126
                reg = ((modrm >> 3) & 7) | rex_r;
3127
                rm = (modrm & 7) | REX_B(s);
3128
                if (mod != 3) {
3129
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3130
                    gen_op_ld_T1_A0[ot + s->mem_index]();
3131
                } else if (op == OP_XORL && rm == reg) {
3132
                    goto xor_zero;
3133
                } else {
3134
                    gen_op_mov_TN_reg[ot][1][rm]();
3135
                }
3136
                gen_op(s, op, ot, reg);
3137
                break;
3138
            case 2: /* OP A, Iv */
3139
                val = insn_get(s, ot);
3140
                gen_op_movl_T1_im(val);
3141
                gen_op(s, op, ot, OR_EAX);
3142
                break;
3143
            }
3144
        }
3145
        break;
3146

    
3147
    case 0x80: /* GRP1 */
3148
    case 0x81:
3149
    case 0x82:
3150
    case 0x83:
3151
        {
3152
            int val;
3153

    
3154
            if ((b & 1) == 0)
3155
                ot = OT_BYTE;
3156
            else
3157
                ot = dflag + OT_WORD;
3158
            
3159
            modrm = ldub_code(s->pc++);
3160
            mod = (modrm >> 6) & 3;
3161
            rm = (modrm & 7) | REX_B(s);
3162
            op = (modrm >> 3) & 7;
3163
            
3164
            if (mod != 3) {
3165
                if (b == 0x83)
3166
                    s->rip_offset = 1;
3167
                else
3168
                    s->rip_offset = insn_const_size(ot);
3169
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3170
                opreg = OR_TMP0;
3171
            } else {
3172
                opreg = rm;
3173
            }
3174

    
3175
            switch(b) {
3176
            default:
3177
            case 0x80:
3178
            case 0x81:
3179
            case 0x82:
3180
                val = insn_get(s, ot);
3181
                break;
3182
            case 0x83:
3183
                val = (int8_t)insn_get(s, OT_BYTE);
3184
                break;
3185
            }
3186
            gen_op_movl_T1_im(val);
3187
            gen_op(s, op, ot, opreg);
3188
        }
3189
        break;
3190

    
3191
        /**************************/
3192
        /* inc, dec, and other misc arith */
3193
    case 0x40 ... 0x47: /* inc Gv */
3194
        ot = dflag ? OT_LONG : OT_WORD;
3195
        gen_inc(s, ot, OR_EAX + (b & 7), 1);
3196
        break;
3197
    case 0x48 ... 0x4f: /* dec Gv */
3198
        ot = dflag ? OT_LONG : OT_WORD;
3199
        gen_inc(s, ot, OR_EAX + (b & 7), -1);
3200
        break;
3201
    case 0xf6: /* GRP3 */
3202
    case 0xf7:
3203
        if ((b & 1) == 0)
3204
            ot = OT_BYTE;
3205
        else
3206
            ot = dflag + OT_WORD;
3207

    
3208
        modrm = ldub_code(s->pc++);
3209
        mod = (modrm >> 6) & 3;
3210
        rm = (modrm & 7) | REX_B(s);
3211
        op = (modrm >> 3) & 7;
3212
        if (mod != 3) {
3213
            if (op == 0)
3214
                s->rip_offset = insn_const_size(ot);
3215
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3216
            gen_op_ld_T0_A0[ot + s->mem_index]();
3217
        } else {
3218
            gen_op_mov_TN_reg[ot][0][rm]();
3219
        }
3220

    
3221
        switch(op) {
3222
        case 0: /* test */
3223
            val = insn_get(s, ot);
3224
            gen_op_movl_T1_im(val);
3225
            gen_op_testl_T0_T1_cc();
3226
            s->cc_op = CC_OP_LOGICB + ot;
3227
            break;
3228
        case 2: /* not */
3229
            gen_op_notl_T0();
3230
            if (mod != 3) {
3231
                gen_op_st_T0_A0[ot + s->mem_index]();
3232
            } else {
3233
                gen_op_mov_reg_T0[ot][rm]();
3234
            }
3235
            break;
3236
        case 3: /* neg */
3237
            gen_op_negl_T0();
3238
            if (mod != 3) {
3239
                gen_op_st_T0_A0[ot + s->mem_index]();
3240
            } else {
3241
                gen_op_mov_reg_T0[ot][rm]();
3242
            }
3243
            gen_op_update_neg_cc();
3244
            s->cc_op = CC_OP_SUBB + ot;
3245
            break;
3246
        case 4: /* mul */
3247
            switch(ot) {
3248
            case OT_BYTE:
3249
                gen_op_mulb_AL_T0();
3250
                s->cc_op = CC_OP_MULB;
3251
                break;
3252
            case OT_WORD:
3253
                gen_op_mulw_AX_T0();
3254
                s->cc_op = CC_OP_MULW;
3255
                break;
3256
            default:
3257
            case OT_LONG:
3258
                gen_op_mull_EAX_T0();
3259
                s->cc_op = CC_OP_MULL;
3260
                break;
3261
#ifdef TARGET_X86_64
3262
            case OT_QUAD:
3263
                gen_op_mulq_EAX_T0();
3264
                s->cc_op = CC_OP_MULQ;
3265
                break;
3266
#endif
3267
            }
3268
            break;
3269
        case 5: /* imul */
3270
            switch(ot) {
3271
            case OT_BYTE:
3272
                gen_op_imulb_AL_T0();
3273
                s->cc_op = CC_OP_MULB;
3274
                break;
3275
            case OT_WORD:
3276
                gen_op_imulw_AX_T0();
3277
                s->cc_op = CC_OP_MULW;
3278
                break;
3279
            default:
3280
            case OT_LONG:
3281
                gen_op_imull_EAX_T0();
3282
                s->cc_op = CC_OP_MULL;
3283
                break;
3284
#ifdef TARGET_X86_64
3285
            case OT_QUAD:
3286
                gen_op_imulq_EAX_T0();
3287
                s->cc_op = CC_OP_MULQ;
3288
                break;
3289
#endif
3290
            }
3291
            break;
3292
        case 6: /* div */
3293
            switch(ot) {
3294
            case OT_BYTE:
3295
                gen_jmp_im(pc_start - s->cs_base);
3296
                gen_op_divb_AL_T0();
3297
                break;
3298
            case OT_WORD:
3299
                gen_jmp_im(pc_start - s->cs_base);
3300
                gen_op_divw_AX_T0();
3301
                break;
3302
            default:
3303
            case OT_LONG:
3304
                gen_jmp_im(pc_start - s->cs_base);
3305
                gen_op_divl_EAX_T0();
3306
                break;
3307
#ifdef TARGET_X86_64
3308
            case OT_QUAD:
3309
                gen_jmp_im(pc_start - s->cs_base);
3310
                gen_op_divq_EAX_T0();
3311
                break;
3312
#endif
3313
            }
3314
            break;
3315
        case 7: /* idiv */
3316
            switch(ot) {
3317
            case OT_BYTE:
3318
                gen_jmp_im(pc_start - s->cs_base);
3319
                gen_op_idivb_AL_T0();
3320
                break;
3321
            case OT_WORD:
3322
                gen_jmp_im(pc_start - s->cs_base);
3323
                gen_op_idivw_AX_T0();
3324
                break;
3325
            default:
3326
            case OT_LONG:
3327
                gen_jmp_im(pc_start - s->cs_base);
3328
                gen_op_idivl_EAX_T0();
3329
                break;
3330
#ifdef TARGET_X86_64
3331
            case OT_QUAD:
3332
                gen_jmp_im(pc_start - s->cs_base);
3333
                gen_op_idivq_EAX_T0();
3334
                break;
3335
#endif
3336
            }
3337
            break;
3338
        default:
3339
            goto illegal_op;
3340
        }
3341
        break;
3342

    
3343
    case 0xfe: /* GRP4 */
3344
    case 0xff: /* GRP5 */
3345
        if ((b & 1) == 0)
3346
            ot = OT_BYTE;
3347
        else
3348
            ot = dflag + OT_WORD;
3349

    
3350
        modrm = ldub_code(s->pc++);
3351
        mod = (modrm >> 6) & 3;
3352
        rm = (modrm & 7) | REX_B(s);
3353
        op = (modrm >> 3) & 7;
3354
        if (op >= 2 && b == 0xfe) {
3355
            goto illegal_op;
3356
        }
3357
        if (CODE64(s)) {
3358
            if (op >= 2 && op <= 5) {
3359
                /* operand size for jumps is 64 bit */
3360
                ot = OT_QUAD;
3361
            } else if (op == 6) {
3362
                /* default push size is 64 bit */
3363
                ot = dflag ? OT_QUAD : OT_WORD;
3364
            }
3365
        }
3366
        if (mod != 3) {
3367
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3368
            if (op >= 2 && op != 3 && op != 5)
3369
                gen_op_ld_T0_A0[ot + s->mem_index]();
3370
        } else {
3371
            gen_op_mov_TN_reg[ot][0][rm]();
3372
        }
3373

    
3374
        switch(op) {
3375
        case 0: /* inc Ev */
3376
            if (mod != 3)
3377
                opreg = OR_TMP0;
3378
            else
3379
                opreg = rm;
3380
            gen_inc(s, ot, opreg, 1);
3381
            break;
3382
        case 1: /* dec Ev */
3383
            if (mod != 3)
3384
                opreg = OR_TMP0;
3385
            else
3386
                opreg = rm;
3387
            gen_inc(s, ot, opreg, -1);
3388
            break;
3389
        case 2: /* call Ev */
3390
            /* XXX: optimize if memory (no 'and' is necessary) */
3391
            if (s->dflag == 0)
3392
                gen_op_andl_T0_ffff();
3393
            next_eip = s->pc - s->cs_base;
3394
            gen_op_movl_T1_im(next_eip);
3395
            gen_push_T1(s);
3396
            gen_op_jmp_T0();
3397
            gen_eob(s);
3398
            break;
3399
        case 3: /* lcall Ev */
3400
            gen_op_ld_T1_A0[ot + s->mem_index]();
3401
            gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
3402
            gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3403
        do_lcall:
3404
            if (s->pe && !s->vm86) {
3405
                if (s->cc_op != CC_OP_DYNAMIC)
3406
                    gen_op_set_cc_op(s->cc_op);
3407
                gen_jmp_im(pc_start - s->cs_base);
3408
                gen_op_lcall_protected_T0_T1(dflag, s->pc - s->cs_base);
3409
            } else {
3410
                gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3411
            }
3412
            gen_eob(s);
3413
            break;
3414
        case 4: /* jmp Ev */
3415
            if (s->dflag == 0)
3416
                gen_op_andl_T0_ffff();
3417
            gen_op_jmp_T0();
3418
            gen_eob(s);
3419
            break;
3420
        case 5: /* ljmp Ev */
3421
            gen_op_ld_T1_A0[ot + s->mem_index]();
3422
            gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
3423
            gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3424
        do_ljmp:
3425
            if (s->pe && !s->vm86) {
3426
                if (s->cc_op != CC_OP_DYNAMIC)
3427
                    gen_op_set_cc_op(s->cc_op);
3428
                gen_jmp_im(pc_start - s->cs_base);
3429
                gen_op_ljmp_protected_T0_T1(s->pc - s->cs_base);
3430
            } else {
3431
                gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3432
                gen_op_movl_T0_T1();
3433
                gen_op_jmp_T0();
3434
            }
3435
            gen_eob(s);
3436
            break;
3437
        case 6: /* push Ev */
3438
            gen_push_T0(s);
3439
            break;
3440
        default:
3441
            goto illegal_op;
3442
        }
3443
        break;
3444

    
3445
    case 0x84: /* test Ev, Gv */
3446
    case 0x85: 
3447
        if ((b & 1) == 0)
3448
            ot = OT_BYTE;
3449
        else
3450
            ot = dflag + OT_WORD;
3451

    
3452
        modrm = ldub_code(s->pc++);
3453
        mod = (modrm >> 6) & 3;
3454
        rm = (modrm & 7) | REX_B(s);
3455
        reg = ((modrm >> 3) & 7) | rex_r;
3456
        
3457
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3458
        gen_op_mov_TN_reg[ot][1][reg]();
3459
        gen_op_testl_T0_T1_cc();
3460
        s->cc_op = CC_OP_LOGICB + ot;
3461
        break;
3462
        
3463
    case 0xa8: /* test eAX, Iv */
3464
    case 0xa9:
3465
        if ((b & 1) == 0)
3466
            ot = OT_BYTE;
3467
        else
3468
            ot = dflag + OT_WORD;
3469
        val = insn_get(s, ot);
3470

    
3471
        gen_op_mov_TN_reg[ot][0][OR_EAX]();
3472
        gen_op_movl_T1_im(val);
3473
        gen_op_testl_T0_T1_cc();
3474
        s->cc_op = CC_OP_LOGICB + ot;
3475
        break;
3476
        
3477
    case 0x98: /* CWDE/CBW */
3478
#ifdef TARGET_X86_64
3479
        if (dflag == 2) {
3480
            gen_op_movslq_RAX_EAX();
3481
        } else
3482
#endif
3483
        if (dflag == 1)
3484
            gen_op_movswl_EAX_AX();
3485
        else
3486
            gen_op_movsbw_AX_AL();
3487
        break;
3488
    case 0x99: /* CDQ/CWD */
3489
#ifdef TARGET_X86_64
3490
        if (dflag == 2) {
3491
            gen_op_movsqo_RDX_RAX();
3492
        } else
3493
#endif
3494
        if (dflag == 1)
3495
            gen_op_movslq_EDX_EAX();
3496
        else
3497
            gen_op_movswl_DX_AX();
3498
        break;
3499
    case 0x1af: /* imul Gv, Ev */
3500
    case 0x69: /* imul Gv, Ev, I */
3501
    case 0x6b:
3502
        ot = dflag + OT_WORD;
3503
        modrm = ldub_code(s->pc++);
3504
        reg = ((modrm >> 3) & 7) | rex_r;
3505
        if (b == 0x69)
3506
            s->rip_offset = insn_const_size(ot);
3507
        else if (b == 0x6b)
3508
            s->rip_offset = 1;
3509
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3510
        if (b == 0x69) {
3511
            val = insn_get(s, ot);
3512
            gen_op_movl_T1_im(val);
3513
        } else if (b == 0x6b) {
3514
            val = (int8_t)insn_get(s, OT_BYTE);
3515
            gen_op_movl_T1_im(val);
3516
        } else {
3517
            gen_op_mov_TN_reg[ot][1][reg]();
3518
        }
3519

    
3520
#ifdef TARGET_X86_64
3521
        if (ot == OT_QUAD) {
3522
            gen_op_imulq_T0_T1();
3523
        } else
3524
#endif
3525
        if (ot == OT_LONG) {
3526
            gen_op_imull_T0_T1();
3527
        } else {
3528
            gen_op_imulw_T0_T1();
3529
        }
3530
        gen_op_mov_reg_T0[ot][reg]();
3531
        s->cc_op = CC_OP_MULB + ot;
3532
        break;
3533
    case 0x1c0:
3534
    case 0x1c1: /* xadd Ev, Gv */
3535
        if ((b & 1) == 0)
3536
            ot = OT_BYTE;
3537
        else
3538
            ot = dflag + OT_WORD;
3539
        modrm = ldub_code(s->pc++);
3540
        reg = ((modrm >> 3) & 7) | rex_r;
3541
        mod = (modrm >> 6) & 3;
3542
        if (mod == 3) {
3543
            rm = (modrm & 7) | REX_B(s);
3544
            gen_op_mov_TN_reg[ot][0][reg]();
3545
            gen_op_mov_TN_reg[ot][1][rm]();
3546
            gen_op_addl_T0_T1();
3547
            gen_op_mov_reg_T1[ot][reg]();
3548
            gen_op_mov_reg_T0[ot][rm]();
3549
        } else {
3550
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3551
            gen_op_mov_TN_reg[ot][0][reg]();
3552
            gen_op_ld_T1_A0[ot + s->mem_index]();
3553
            gen_op_addl_T0_T1();
3554
            gen_op_st_T0_A0[ot + s->mem_index]();
3555
            gen_op_mov_reg_T1[ot][reg]();
3556
        }
3557
        gen_op_update2_cc();
3558
        s->cc_op = CC_OP_ADDB + ot;
3559
        break;
3560
    case 0x1b0:
3561
    case 0x1b1: /* cmpxchg Ev, Gv */
3562
        if ((b & 1) == 0)
3563
            ot = OT_BYTE;
3564
        else
3565
            ot = dflag + OT_WORD;
3566
        modrm = ldub_code(s->pc++);
3567
        reg = ((modrm >> 3) & 7) | rex_r;
3568
        mod = (modrm >> 6) & 3;
3569
        gen_op_mov_TN_reg[ot][1][reg]();
3570
        if (mod == 3) {
3571
            rm = (modrm & 7) | REX_B(s);
3572
            gen_op_mov_TN_reg[ot][0][rm]();
3573
            gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3574
            gen_op_mov_reg_T0[ot][rm]();
3575
        } else {
3576
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3577
            gen_op_ld_T0_A0[ot + s->mem_index]();
3578
            gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3579
        }
3580
        s->cc_op = CC_OP_SUBB + ot;
3581
        break;
3582
    case 0x1c7: /* cmpxchg8b */
3583
        modrm = ldub_code(s->pc++);
3584
        mod = (modrm >> 6) & 3;
3585
        if (mod == 3)
3586
            goto illegal_op;
3587
        if (s->cc_op != CC_OP_DYNAMIC)
3588
            gen_op_set_cc_op(s->cc_op);
3589
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3590
        gen_op_cmpxchg8b();
3591
        s->cc_op = CC_OP_EFLAGS;
3592
        break;
3593
        
3594
        /**************************/
3595
        /* push/pop */
3596
    case 0x50 ... 0x57: /* push */
3597
        gen_op_mov_TN_reg[OT_LONG][0][(b & 7) | REX_B(s)]();
3598
        gen_push_T0(s);
3599
        break;
3600
    case 0x58 ... 0x5f: /* pop */
3601
        if (CODE64(s)) {
3602
            ot = dflag ? OT_QUAD : OT_WORD;
3603
        } else {
3604
            ot = dflag + OT_WORD;
3605
        }
3606
        gen_pop_T0(s);
3607
        /* NOTE: order is important for pop %sp */
3608
        gen_pop_update(s);
3609
        gen_op_mov_reg_T0[ot][(b & 7) | REX_B(s)]();
3610
        break;
3611
    case 0x60: /* pusha */
3612
        if (CODE64(s))
3613
            goto illegal_op;
3614
        gen_pusha(s);
3615
        break;
3616
    case 0x61: /* popa */
3617
        if (CODE64(s))
3618
            goto illegal_op;
3619
        gen_popa(s);
3620
        break;
3621
    case 0x68: /* push Iv */
3622
    case 0x6a:
3623
        if (CODE64(s)) {
3624
            ot = dflag ? OT_QUAD : OT_WORD;
3625
        } else {
3626
            ot = dflag + OT_WORD;
3627
        }
3628
        if (b == 0x68)
3629
            val = insn_get(s, ot);
3630
        else
3631
            val = (int8_t)insn_get(s, OT_BYTE);
3632
        gen_op_movl_T0_im(val);
3633
        gen_push_T0(s);
3634
        break;
3635
    case 0x8f: /* pop Ev */
3636
        if (CODE64(s)) {
3637
            ot = dflag ? OT_QUAD : OT_WORD;
3638
        } else {
3639
            ot = dflag + OT_WORD;
3640
        }
3641
        modrm = ldub_code(s->pc++);
3642
        mod = (modrm >> 6) & 3;
3643
        gen_pop_T0(s);
3644
        if (mod == 3) {
3645
            /* NOTE: order is important for pop %sp */
3646
            gen_pop_update(s);
3647
            rm = (modrm & 7) | REX_B(s);
3648
            gen_op_mov_reg_T0[ot][rm]();
3649
        } else {
3650
            /* NOTE: order is important too for MMU exceptions */
3651
            s->popl_esp_hack = 1 << ot;
3652
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3653
            s->popl_esp_hack = 0;
3654
            gen_pop_update(s);
3655
        }
3656
        break;
3657
    case 0xc8: /* enter */
3658
        {
3659
            /* XXX: long mode support */
3660
            int level;
3661
            val = lduw_code(s->pc);
3662
            s->pc += 2;
3663
            level = ldub_code(s->pc++);
3664
            gen_enter(s, val, level);
3665
        }
3666
        break;
3667
    case 0xc9: /* leave */
3668
        /* XXX: exception not precise (ESP is updated before potential exception) */
3669
        /* XXX: may be invalid for 16 bit in long mode */
3670
        if (CODE64(s)) {
3671
            gen_op_mov_TN_reg[OT_QUAD][0][R_EBP]();
3672
            gen_op_mov_reg_T0[OT_QUAD][R_ESP]();
3673
        } else if (s->ss32) {
3674
            gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
3675
            gen_op_mov_reg_T0[OT_LONG][R_ESP]();
3676
        } else {
3677
            gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
3678
            gen_op_mov_reg_T0[OT_WORD][R_ESP]();
3679
        }
3680
        gen_pop_T0(s);
3681
        if (CODE64(s)) {
3682
            ot = dflag ? OT_QUAD : OT_WORD;
3683
        } else {
3684
            ot = dflag + OT_WORD;
3685
        }
3686
        gen_op_mov_reg_T0[ot][R_EBP]();
3687
        gen_pop_update(s);
3688
        break;
3689
    case 0x06: /* push es */
3690
    case 0x0e: /* push cs */
3691
    case 0x16: /* push ss */
3692
    case 0x1e: /* push ds */
3693
        if (CODE64(s))
3694
            goto illegal_op;
3695
        gen_op_movl_T0_seg(b >> 3);
3696
        gen_push_T0(s);
3697
        break;
3698
    case 0x1a0: /* push fs */
3699
    case 0x1a8: /* push gs */
3700
        gen_op_movl_T0_seg((b >> 3) & 7);
3701
        gen_push_T0(s);
3702
        break;
3703
    case 0x07: /* pop es */
3704
    case 0x17: /* pop ss */
3705
    case 0x1f: /* pop ds */
3706
        if (CODE64(s))
3707
            goto illegal_op;
3708
        reg = b >> 3;
3709
        gen_pop_T0(s);
3710
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3711
        gen_pop_update(s);
3712
        if (reg == R_SS) {
3713
            /* if reg == SS, inhibit interrupts/trace. */
3714
            /* If several instructions disable interrupts, only the
3715
               _first_ does it */
3716
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3717
                gen_op_set_inhibit_irq();
3718
            s->tf = 0;
3719
        }
3720
        if (s->is_jmp) {
3721
            gen_jmp_im(s->pc - s->cs_base);
3722
            gen_eob(s);
3723
        }
3724
        break;
3725
    case 0x1a1: /* pop fs */
3726
    case 0x1a9: /* pop gs */
3727
        gen_pop_T0(s);
3728
        gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
3729
        gen_pop_update(s);
3730
        if (s->is_jmp) {
3731
            gen_jmp_im(s->pc - s->cs_base);
3732
            gen_eob(s);
3733
        }
3734
        break;
3735

    
3736
        /**************************/
3737
        /* mov */
3738
    case 0x88:
3739
    case 0x89: /* mov Gv, Ev */
3740
        if ((b & 1) == 0)
3741
            ot = OT_BYTE;
3742
        else
3743
            ot = dflag + OT_WORD;
3744
        modrm = ldub_code(s->pc++);
3745
        reg = ((modrm >> 3) & 7) | rex_r;
3746
        
3747
        /* generate a generic store */
3748
        gen_ldst_modrm(s, modrm, ot, reg, 1);
3749
        break;
3750
    case 0xc6:
3751
    case 0xc7: /* mov Ev, Iv */
3752
        if ((b & 1) == 0)
3753
            ot = OT_BYTE;
3754
        else
3755
            ot = dflag + OT_WORD;
3756
        modrm = ldub_code(s->pc++);
3757
        mod = (modrm >> 6) & 3;
3758
        if (mod != 3) {
3759
            s->rip_offset = insn_const_size(ot);
3760
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3761
        }
3762
        val = insn_get(s, ot);
3763
        gen_op_movl_T0_im(val);
3764
        if (mod != 3)
3765
            gen_op_st_T0_A0[ot + s->mem_index]();
3766
        else
3767
            gen_op_mov_reg_T0[ot][(modrm & 7) | REX_B(s)]();
3768
        break;
3769
    case 0x8a:
3770
    case 0x8b: /* mov Ev, Gv */
3771
        if ((b & 1) == 0)
3772
            ot = OT_BYTE;
3773
        else
3774
            ot = OT_WORD + dflag;
3775
        modrm = ldub_code(s->pc++);
3776
        reg = ((modrm >> 3) & 7) | rex_r;
3777
        
3778
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3779
        gen_op_mov_reg_T0[ot][reg]();
3780
        break;
3781
    case 0x8e: /* mov seg, Gv */
3782
        modrm = ldub_code(s->pc++);
3783
        reg = (modrm >> 3) & 7;
3784
        if (reg >= 6 || reg == R_CS)
3785
            goto illegal_op;
3786
        gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3787
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3788
        if (reg == R_SS) {
3789
            /* if reg == SS, inhibit interrupts/trace */
3790
            /* If several instructions disable interrupts, only the
3791
               _first_ does it */
3792
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3793
                gen_op_set_inhibit_irq();
3794
            s->tf = 0;
3795
        }
3796
        if (s->is_jmp) {
3797
            gen_jmp_im(s->pc - s->cs_base);
3798
            gen_eob(s);
3799
        }
3800
        break;
3801
    case 0x8c: /* mov Gv, seg */
3802
        modrm = ldub_code(s->pc++);
3803
        reg = (modrm >> 3) & 7;
3804
        mod = (modrm >> 6) & 3;
3805
        if (reg >= 6)
3806
            goto illegal_op;
3807
        gen_op_movl_T0_seg(reg);
3808
        if (mod == 3)
3809
            ot = OT_WORD + dflag;
3810
        else
3811
            ot = OT_WORD;
3812
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3813
        break;
3814

    
3815
    case 0x1b6: /* movzbS Gv, Eb */
3816
    case 0x1b7: /* movzwS Gv, Eb */
3817
    case 0x1be: /* movsbS Gv, Eb */
3818
    case 0x1bf: /* movswS Gv, Eb */
3819
        {
3820
            int d_ot;
3821
            /* d_ot is the size of destination */
3822
            d_ot = dflag + OT_WORD;
3823
            /* ot is the size of source */
3824
            ot = (b & 1) + OT_BYTE;
3825
            modrm = ldub_code(s->pc++);
3826
            reg = ((modrm >> 3) & 7) | rex_r;
3827
            mod = (modrm >> 6) & 3;
3828
            rm = (modrm & 7) | REX_B(s);
3829
            
3830
            if (mod == 3) {
3831
                gen_op_mov_TN_reg[ot][0][rm]();
3832
                switch(ot | (b & 8)) {
3833
                case OT_BYTE:
3834
                    gen_op_movzbl_T0_T0();
3835
                    break;
3836
                case OT_BYTE | 8:
3837
                    gen_op_movsbl_T0_T0();
3838
                    break;
3839
                case OT_WORD:
3840
                    gen_op_movzwl_T0_T0();
3841
                    break;
3842
                default:
3843
                case OT_WORD | 8:
3844
                    gen_op_movswl_T0_T0();
3845
                    break;
3846
                }
3847
                gen_op_mov_reg_T0[d_ot][reg]();
3848
            } else {
3849
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3850
                if (b & 8) {
3851
                    gen_op_lds_T0_A0[ot + s->mem_index]();
3852
                } else {
3853
                    gen_op_ldu_T0_A0[ot + s->mem_index]();
3854
                }
3855
                gen_op_mov_reg_T0[d_ot][reg]();
3856
            }
3857
        }
3858
        break;
3859

    
3860
    case 0x8d: /* lea */
3861
        ot = dflag + OT_WORD;
3862
        modrm = ldub_code(s->pc++);
3863
        mod = (modrm >> 6) & 3;
3864
        if (mod == 3)
3865
            goto illegal_op;
3866
        reg = ((modrm >> 3) & 7) | rex_r;
3867
        /* we must ensure that no segment is added */
3868
        s->override = -1;
3869
        val = s->addseg;
3870
        s->addseg = 0;
3871
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3872
        s->addseg = val;
3873
        gen_op_mov_reg_A0[ot - OT_WORD][reg]();
3874
        break;
3875
        
3876
    case 0xa0: /* mov EAX, Ov */
3877
    case 0xa1:
3878
    case 0xa2: /* mov Ov, EAX */
3879
    case 0xa3:
3880
        {
3881
            target_ulong offset_addr;
3882

    
3883
            if ((b & 1) == 0)
3884
                ot = OT_BYTE;
3885
            else
3886
                ot = dflag + OT_WORD;
3887
#ifdef TARGET_X86_64
3888
            if (CODE64(s)) {
3889
                offset_addr = ldq_code(s->pc);
3890
                s->pc += 8;
3891
                if (offset_addr == (int32_t)offset_addr)
3892
                    gen_op_movq_A0_im(offset_addr);
3893
                else
3894
                    gen_op_movq_A0_im64(offset_addr >> 32, offset_addr);
3895
            } else 
3896
#endif
3897
            {
3898
                if (s->aflag) {
3899
                    offset_addr = insn_get(s, OT_LONG);
3900
                } else {
3901
                    offset_addr = insn_get(s, OT_WORD);
3902
                }
3903
                gen_op_movl_A0_im(offset_addr);
3904
            }
3905
            gen_add_A0_ds_seg(s);
3906
            if ((b & 2) == 0) {
3907
                gen_op_ld_T0_A0[ot + s->mem_index]();
3908
                gen_op_mov_reg_T0[ot][R_EAX]();
3909
            } else {
3910
                gen_op_mov_TN_reg[ot][0][R_EAX]();
3911
                gen_op_st_T0_A0[ot + s->mem_index]();
3912
            }
3913
        }
3914
        break;
3915
    case 0xd7: /* xlat */
3916
#ifdef TARGET_X86_64
3917
        if (CODE64(s)) {
3918
            gen_op_movq_A0_reg[R_EBX]();
3919
            gen_op_addq_A0_AL();
3920
        } else 
3921
#endif
3922
        {
3923
            gen_op_movl_A0_reg[R_EBX]();
3924
            gen_op_addl_A0_AL();
3925
            if (s->aflag == 0)
3926
                gen_op_andl_A0_ffff();
3927
        }
3928
        gen_add_A0_ds_seg(s);
3929
        gen_op_ldu_T0_A0[OT_BYTE + s->mem_index]();
3930
        gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
3931
        break;
3932
    case 0xb0 ... 0xb7: /* mov R, Ib */
3933
        val = insn_get(s, OT_BYTE);
3934
        gen_op_movl_T0_im(val);
3935
        gen_op_mov_reg_T0[OT_BYTE][(b & 7) | REX_B(s)]();
3936
        break;
3937
    case 0xb8 ... 0xbf: /* mov R, Iv */
3938
#ifdef TARGET_X86_64
3939
        if (dflag == 2) {
3940
            uint64_t tmp;
3941
            /* 64 bit case */
3942
            tmp = ldq_code(s->pc);
3943
            s->pc += 8;
3944
            reg = (b & 7) | REX_B(s);
3945
            gen_movtl_T0_im(tmp);
3946
            gen_op_mov_reg_T0[OT_QUAD][reg]();
3947
        } else 
3948
#endif
3949
        {
3950
            ot = dflag ? OT_LONG : OT_WORD;
3951
            val = insn_get(s, ot);
3952
            reg = (b & 7) | REX_B(s);
3953
            gen_op_movl_T0_im(val);
3954
            gen_op_mov_reg_T0[ot][reg]();
3955
        }
3956
        break;
3957

    
3958
    case 0x91 ... 0x97: /* xchg R, EAX */
3959
        ot = dflag + OT_WORD;
3960
        reg = (b & 7) | REX_B(s);
3961
        rm = R_EAX;
3962
        goto do_xchg_reg;
3963
    case 0x86:
3964
    case 0x87: /* xchg Ev, Gv */
3965
        if ((b & 1) == 0)
3966
            ot = OT_BYTE;
3967
        else
3968
            ot = dflag + OT_WORD;
3969
        modrm = ldub_code(s->pc++);
3970
        reg = ((modrm >> 3) & 7) | rex_r;
3971
        mod = (modrm >> 6) & 3;
3972
        if (mod == 3) {
3973
            rm = (modrm & 7) | REX_B(s);
3974
        do_xchg_reg:
3975
            gen_op_mov_TN_reg[ot][0][reg]();
3976
            gen_op_mov_TN_reg[ot][1][rm]();
3977
            gen_op_mov_reg_T0[ot][rm]();
3978
            gen_op_mov_reg_T1[ot][reg]();
3979
        } else {
3980
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3981
            gen_op_mov_TN_reg[ot][0][reg]();
3982
            /* for xchg, lock is implicit */
3983
            if (!(prefixes & PREFIX_LOCK))
3984
                gen_op_lock();
3985
            gen_op_ld_T1_A0[ot + s->mem_index]();
3986
            gen_op_st_T0_A0[ot + s->mem_index]();
3987
            if (!(prefixes & PREFIX_LOCK))
3988
                gen_op_unlock();
3989
            gen_op_mov_reg_T1[ot][reg]();
3990
        }
3991
        break;
3992
    case 0xc4: /* les Gv */
3993
        if (CODE64(s))
3994
            goto illegal_op;
3995
        op = R_ES;
3996
        goto do_lxx;
3997
    case 0xc5: /* lds Gv */
3998
        if (CODE64(s))
3999
            goto illegal_op;
4000
        op = R_DS;
4001
        goto do_lxx;
4002
    case 0x1b2: /* lss Gv */
4003
        op = R_SS;
4004
        goto do_lxx;
4005
    case 0x1b4: /* lfs Gv */
4006
        op = R_FS;
4007
        goto do_lxx;
4008
    case 0x1b5: /* lgs Gv */
4009
        op = R_GS;
4010
    do_lxx:
4011
        ot = dflag ? OT_LONG : OT_WORD;
4012
        modrm = ldub_code(s->pc++);
4013
        reg = ((modrm >> 3) & 7) | rex_r;
4014
        mod = (modrm >> 6) & 3;
4015
        if (mod == 3)
4016
            goto illegal_op;
4017
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4018
        gen_op_ld_T1_A0[ot + s->mem_index]();
4019
        gen_op_addl_A0_im(1 << (ot - OT_WORD + 1));
4020
        /* load the segment first to handle exceptions properly */
4021
        gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
4022
        gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4023
        /* then put the data */
4024
        gen_op_mov_reg_T1[ot][reg]();
4025
        if (s->is_jmp) {
4026
            gen_jmp_im(s->pc - s->cs_base);
4027
            gen_eob(s);
4028
        }
4029
        break;
4030
        
4031
        /************************/
4032
        /* shifts */
4033
    case 0xc0:
4034
    case 0xc1:
4035
        /* shift Ev,Ib */
4036
        shift = 2;
4037
    grp2:
4038
        {
4039
            if ((b & 1) == 0)
4040
                ot = OT_BYTE;
4041
            else
4042
                ot = dflag + OT_WORD;
4043
            
4044
            modrm = ldub_code(s->pc++);
4045
            mod = (modrm >> 6) & 3;
4046
            op = (modrm >> 3) & 7;
4047
            
4048
            if (mod != 3) {
4049
                if (shift == 2) {
4050
                    s->rip_offset = 1;
4051
                }
4052
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4053
                opreg = OR_TMP0;
4054
            } else {
4055
                opreg = (modrm & 7) | REX_B(s);
4056
            }
4057

    
4058
            /* simpler op */
4059
            if (shift == 0) {
4060
                gen_shift(s, op, ot, opreg, OR_ECX);
4061
            } else {
4062
                if (shift == 2) {
4063
                    shift = ldub_code(s->pc++);
4064
                }
4065
                gen_shifti(s, op, ot, opreg, shift);
4066
            }
4067
        }
4068
        break;
4069
    case 0xd0:
4070
    case 0xd1:
4071
        /* shift Ev,1 */
4072
        shift = 1;
4073
        goto grp2;
4074
    case 0xd2:
4075
    case 0xd3:
4076
        /* shift Ev,cl */
4077
        shift = 0;
4078
        goto grp2;
4079

    
4080
    case 0x1a4: /* shld imm */
4081
        op = 0;
4082
        shift = 1;
4083
        goto do_shiftd;
4084
    case 0x1a5: /* shld cl */
4085
        op = 0;
4086
        shift = 0;
4087
        goto do_shiftd;
4088
    case 0x1ac: /* shrd imm */
4089
        op = 1;
4090
        shift = 1;
4091
        goto do_shiftd;
4092
    case 0x1ad: /* shrd cl */
4093
        op = 1;
4094
        shift = 0;
4095
    do_shiftd:
4096
        ot = dflag + OT_WORD;
4097
        modrm = ldub_code(s->pc++);
4098
        mod = (modrm >> 6) & 3;
4099
        rm = (modrm & 7) | REX_B(s);
4100
        reg = ((modrm >> 3) & 7) | rex_r;
4101
        
4102
        if (mod != 3) {
4103
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4104
            gen_op_ld_T0_A0[ot + s->mem_index]();
4105
        } else {
4106
            gen_op_mov_TN_reg[ot][0][rm]();
4107
        }
4108
        gen_op_mov_TN_reg[ot][1][reg]();
4109
        
4110
        if (shift) {
4111
            val = ldub_code(s->pc++);
4112
            if (ot == OT_QUAD)
4113
                val &= 0x3f;
4114
            else
4115
                val &= 0x1f;
4116
            if (val) {
4117
                if (mod == 3)
4118
                    gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4119
                else
4120
                    gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4121
                if (op == 0 && ot != OT_WORD)
4122
                    s->cc_op = CC_OP_SHLB + ot;
4123
                else
4124
                    s->cc_op = CC_OP_SARB + ot;
4125
            }
4126
        } else {
4127
            if (s->cc_op != CC_OP_DYNAMIC)
4128
                gen_op_set_cc_op(s->cc_op);
4129
            if (mod == 3)
4130
                gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4131
            else
4132
                gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4133
            s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4134
        }
4135
        if (mod == 3) {
4136
            gen_op_mov_reg_T0[ot][rm]();
4137
        }
4138
        break;
4139

    
4140
        /************************/
4141
        /* floats */
4142
    case 0xd8 ... 0xdf: 
4143
        if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4144
            /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4145
            /* XXX: what to do if illegal op ? */
4146
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4147
            break;
4148
        }
4149
        modrm = ldub_code(s->pc++);
4150
        mod = (modrm >> 6) & 3;
4151
        rm = modrm & 7;
4152
        op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4153
        if (mod != 3) {
4154
            /* memory op */
4155
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4156
            switch(op) {
4157
            case 0x00 ... 0x07: /* fxxxs */
4158
            case 0x10 ... 0x17: /* fixxxl */
4159
            case 0x20 ... 0x27: /* fxxxl */
4160
            case 0x30 ... 0x37: /* fixxx */
4161
                {
4162
                    int op1;
4163
                    op1 = op & 7;
4164

    
4165
                    switch(op >> 4) {
4166
                    case 0:
4167
                        gen_op_flds_FT0_A0();
4168
                        break;
4169
                    case 1:
4170
                        gen_op_fildl_FT0_A0();
4171
                        break;
4172
                    case 2:
4173
                        gen_op_fldl_FT0_A0();
4174
                        break;
4175
                    case 3:
4176
                    default:
4177
                        gen_op_fild_FT0_A0();
4178
                        break;
4179
                    }
4180
                    
4181
                    gen_op_fp_arith_ST0_FT0[op1]();
4182
                    if (op1 == 3) {
4183
                        /* fcomp needs pop */
4184
                        gen_op_fpop();
4185
                    }
4186
                }
4187
                break;
4188
            case 0x08: /* flds */
4189
            case 0x0a: /* fsts */
4190
            case 0x0b: /* fstps */
4191
            case 0x18: /* fildl */
4192
            case 0x1a: /* fistl */
4193
            case 0x1b: /* fistpl */
4194
            case 0x28: /* fldl */
4195
            case 0x2a: /* fstl */
4196
            case 0x2b: /* fstpl */
4197
            case 0x38: /* filds */
4198
            case 0x3a: /* fists */
4199
            case 0x3b: /* fistps */
4200
                
4201
                switch(op & 7) {
4202
                case 0:
4203
                    switch(op >> 4) {
4204
                    case 0:
4205
                        gen_op_flds_ST0_A0();
4206
                        break;
4207
                    case 1:
4208
                        gen_op_fildl_ST0_A0();
4209
                        break;
4210
                    case 2:
4211
                        gen_op_fldl_ST0_A0();
4212
                        break;
4213
                    case 3:
4214
                    default:
4215
                        gen_op_fild_ST0_A0();
4216
                        break;
4217
                    }
4218
                    break;
4219
                default:
4220
                    switch(op >> 4) {
4221
                    case 0:
4222
                        gen_op_fsts_ST0_A0();
4223
                        break;
4224
                    case 1:
4225
                        gen_op_fistl_ST0_A0();
4226
                        break;
4227
                    case 2:
4228
                        gen_op_fstl_ST0_A0();
4229
                        break;
4230
                    case 3:
4231
                    default:
4232
                        gen_op_fist_ST0_A0();
4233
                        break;
4234
                    }
4235
                    if ((op & 7) == 3)
4236
                        gen_op_fpop();
4237
                    break;
4238
                }
4239
                break;
4240
            case 0x0c: /* fldenv mem */
4241
                gen_op_fldenv_A0(s->dflag);
4242
                break;
4243
            case 0x0d: /* fldcw mem */
4244
                gen_op_fldcw_A0();
4245
                break;
4246
            case 0x0e: /* fnstenv mem */
4247
                gen_op_fnstenv_A0(s->dflag);
4248
                break;
4249
            case 0x0f: /* fnstcw mem */
4250
                gen_op_fnstcw_A0();
4251
                break;
4252
            case 0x1d: /* fldt mem */
4253
                gen_op_fldt_ST0_A0();
4254
                break;
4255
            case 0x1f: /* fstpt mem */
4256
                gen_op_fstt_ST0_A0();
4257
                gen_op_fpop();
4258
                break;
4259
            case 0x2c: /* frstor mem */
4260
                gen_op_frstor_A0(s->dflag);
4261
                break;
4262
            case 0x2e: /* fnsave mem */
4263
                gen_op_fnsave_A0(s->dflag);
4264
                break;
4265
            case 0x2f: /* fnstsw mem */
4266
                gen_op_fnstsw_A0();
4267
                break;
4268
            case 0x3c: /* fbld */
4269
                gen_op_fbld_ST0_A0();
4270
                break;
4271
            case 0x3e: /* fbstp */
4272
                gen_op_fbst_ST0_A0();
4273
                gen_op_fpop();
4274
                break;
4275
            case 0x3d: /* fildll */
4276
                gen_op_fildll_ST0_A0();
4277
                break;
4278
            case 0x3f: /* fistpll */
4279
                gen_op_fistll_ST0_A0();
4280
                gen_op_fpop();
4281
                break;
4282
            default:
4283
                goto illegal_op;
4284
            }
4285
        } else {
4286
            /* register float ops */
4287
            opreg = rm;
4288

    
4289
            switch(op) {
4290
            case 0x08: /* fld sti */
4291
                gen_op_fpush();
4292
                gen_op_fmov_ST0_STN((opreg + 1) & 7);
4293
                break;
4294
            case 0x09: /* fxchg sti */
4295
            case 0x29: /* fxchg4 sti, undocumented op */
4296
            case 0x39: /* fxchg7 sti, undocumented op */
4297
                gen_op_fxchg_ST0_STN(opreg);
4298
                break;
4299
            case 0x0a: /* grp d9/2 */
4300
                switch(rm) {
4301
                case 0: /* fnop */
4302
                    /* check exceptions (FreeBSD FPU probe) */
4303
                    if (s->cc_op != CC_OP_DYNAMIC)
4304
                        gen_op_set_cc_op(s->cc_op);
4305
                    gen_jmp_im(pc_start - s->cs_base);
4306
                    gen_op_fwait();
4307
                    break;
4308
                default:
4309
                    goto illegal_op;
4310
                }
4311
                break;
4312
            case 0x0c: /* grp d9/4 */
4313
                switch(rm) {
4314
                case 0: /* fchs */
4315
                    gen_op_fchs_ST0();
4316
                    break;
4317
                case 1: /* fabs */
4318
                    gen_op_fabs_ST0();
4319
                    break;
4320
                case 4: /* ftst */
4321
                    gen_op_fldz_FT0();
4322
                    gen_op_fcom_ST0_FT0();
4323
                    break;
4324
                case 5: /* fxam */
4325
                    gen_op_fxam_ST0();
4326
                    break;
4327
                default:
4328
                    goto illegal_op;
4329
                }
4330
                break;
4331
            case 0x0d: /* grp d9/5 */
4332
                {
4333
                    switch(rm) {
4334
                    case 0:
4335
                        gen_op_fpush();
4336
                        gen_op_fld1_ST0();
4337
                        break;
4338
                    case 1:
4339
                        gen_op_fpush();
4340
                        gen_op_fldl2t_ST0();
4341
                        break;
4342
                    case 2:
4343
                        gen_op_fpush();
4344
                        gen_op_fldl2e_ST0();
4345
                        break;
4346
                    case 3:
4347
                        gen_op_fpush();
4348
                        gen_op_fldpi_ST0();
4349
                        break;
4350
                    case 4:
4351
                        gen_op_fpush();
4352
                        gen_op_fldlg2_ST0();
4353
                        break;
4354
                    case 5:
4355
                        gen_op_fpush();
4356
                        gen_op_fldln2_ST0();
4357
                        break;
4358
                    case 6:
4359
                        gen_op_fpush();
4360
                        gen_op_fldz_ST0();
4361
                        break;
4362
                    default:
4363
                        goto illegal_op;
4364
                    }
4365
                }
4366
                break;
4367
            case 0x0e: /* grp d9/6 */
4368
                switch(rm) {
4369
                case 0: /* f2xm1 */
4370
                    gen_op_f2xm1();
4371
                    break;
4372
                case 1: /* fyl2x */
4373
                    gen_op_fyl2x();
4374
                    break;
4375
                case 2: /* fptan */
4376
                    gen_op_fptan();
4377
                    break;
4378
                case 3: /* fpatan */
4379
                    gen_op_fpatan();
4380
                    break;
4381
                case 4: /* fxtract */
4382
                    gen_op_fxtract();
4383
                    break;
4384
                case 5: /* fprem1 */
4385
                    gen_op_fprem1();
4386
                    break;
4387
                case 6: /* fdecstp */
4388
                    gen_op_fdecstp();
4389
                    break;
4390
                default:
4391
                case 7: /* fincstp */
4392
                    gen_op_fincstp();
4393
                    break;
4394
                }
4395
                break;
4396
            case 0x0f: /* grp d9/7 */
4397
                switch(rm) {
4398
                case 0: /* fprem */
4399
                    gen_op_fprem();
4400
                    break;
4401
                case 1: /* fyl2xp1 */
4402
                    gen_op_fyl2xp1();
4403
                    break;
4404
                case 2: /* fsqrt */
4405
                    gen_op_fsqrt();
4406
                    break;
4407
                case 3: /* fsincos */
4408
                    gen_op_fsincos();
4409
                    break;
4410
                case 5: /* fscale */
4411
                    gen_op_fscale();
4412
                    break;
4413
                case 4: /* frndint */
4414
                    gen_op_frndint();
4415
                    break;
4416
                case 6: /* fsin */
4417
                    gen_op_fsin();
4418
                    break;
4419
                default:
4420
                case 7: /* fcos */
4421
                    gen_op_fcos();
4422
                    break;
4423
                }
4424
                break;
4425
            case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4426
            case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4427
            case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4428
                {
4429
                    int op1;
4430
                    
4431
                    op1 = op & 7;
4432
                    if (op >= 0x20) {
4433
                        gen_op_fp_arith_STN_ST0[op1](opreg);
4434
                        if (op >= 0x30)
4435
                            gen_op_fpop();
4436
                    } else {
4437
                        gen_op_fmov_FT0_STN(opreg);
4438
                        gen_op_fp_arith_ST0_FT0[op1]();
4439
                    }
4440
                }
4441
                break;
4442
            case 0x02: /* fcom */
4443
            case 0x22: /* fcom2, undocumented op */
4444
                gen_op_fmov_FT0_STN(opreg);
4445
                gen_op_fcom_ST0_FT0();
4446
                break;
4447
            case 0x03: /* fcomp */
4448
            case 0x23: /* fcomp3, undocumented op */
4449
            case 0x32: /* fcomp5, undocumented op */
4450
                gen_op_fmov_FT0_STN(opreg);
4451
                gen_op_fcom_ST0_FT0();
4452
                gen_op_fpop();
4453
                break;
4454
            case 0x15: /* da/5 */
4455
                switch(rm) {
4456
                case 1: /* fucompp */
4457
                    gen_op_fmov_FT0_STN(1);
4458
                    gen_op_fucom_ST0_FT0();
4459
                    gen_op_fpop();
4460
                    gen_op_fpop();
4461
                    break;
4462
                default:
4463
                    goto illegal_op;
4464
                }
4465
                break;
4466
            case 0x1c:
4467
                switch(rm) {
4468
                case 0: /* feni (287 only, just do nop here) */
4469
                    break;
4470
                case 1: /* fdisi (287 only, just do nop here) */
4471
                    break;
4472
                case 2: /* fclex */
4473
                    gen_op_fclex();
4474
                    break;
4475
                case 3: /* fninit */
4476
                    gen_op_fninit();
4477
                    break;
4478
                case 4: /* fsetpm (287 only, just do nop here) */
4479
                    break;
4480
                default:
4481
                    goto illegal_op;
4482
                }
4483
                break;
4484
            case 0x1d: /* fucomi */
4485
                if (s->cc_op != CC_OP_DYNAMIC)
4486
                    gen_op_set_cc_op(s->cc_op);
4487
                gen_op_fmov_FT0_STN(opreg);
4488
                gen_op_fucomi_ST0_FT0();
4489
                s->cc_op = CC_OP_EFLAGS;
4490
                break;
4491
            case 0x1e: /* fcomi */
4492
                if (s->cc_op != CC_OP_DYNAMIC)
4493
                    gen_op_set_cc_op(s->cc_op);
4494
                gen_op_fmov_FT0_STN(opreg);
4495
                gen_op_fcomi_ST0_FT0();
4496
                s->cc_op = CC_OP_EFLAGS;
4497
                break;
4498
            case 0x28: /* ffree sti */
4499
                gen_op_ffree_STN(opreg);
4500
                break; 
4501
            case 0x2a: /* fst sti */
4502
                gen_op_fmov_STN_ST0(opreg);
4503
                break;
4504
            case 0x2b: /* fstp sti */
4505
            case 0x0b: /* fstp1 sti, undocumented op */
4506
            case 0x3a: /* fstp8 sti, undocumented op */
4507
            case 0x3b: /* fstp9 sti, undocumented op */
4508
                gen_op_fmov_STN_ST0(opreg);
4509
                gen_op_fpop();
4510
                break;
4511
            case 0x2c: /* fucom st(i) */
4512
                gen_op_fmov_FT0_STN(opreg);
4513
                gen_op_fucom_ST0_FT0();
4514
                break;
4515
            case 0x2d: /* fucomp st(i) */
4516
                gen_op_fmov_FT0_STN(opreg);
4517
                gen_op_fucom_ST0_FT0();
4518
                gen_op_fpop();
4519
                break;
4520
            case 0x33: /* de/3 */
4521
                switch(rm) {
4522
                case 1: /* fcompp */
4523
                    gen_op_fmov_FT0_STN(1);
4524
                    gen_op_fcom_ST0_FT0();
4525
                    gen_op_fpop();
4526
                    gen_op_fpop();
4527
                    break;
4528
                default:
4529
                    goto illegal_op;
4530
                }
4531
                break;
4532
            case 0x38: /* ffreep sti, undocumented op */
4533
                gen_op_ffree_STN(opreg);
4534
                gen_op_fpop();
4535
                break;
4536
            case 0x3c: /* df/4 */
4537
                switch(rm) {
4538
                case 0:
4539
                    gen_op_fnstsw_EAX();
4540
                    break;
4541
                default:
4542
                    goto illegal_op;
4543
                }
4544
                break;
4545
            case 0x3d: /* fucomip */
4546
                if (s->cc_op != CC_OP_DYNAMIC)
4547
                    gen_op_set_cc_op(s->cc_op);
4548
                gen_op_fmov_FT0_STN(opreg);
4549
                gen_op_fucomi_ST0_FT0();
4550
                gen_op_fpop();
4551
                s->cc_op = CC_OP_EFLAGS;
4552
                break;
4553
            case 0x3e: /* fcomip */
4554
                if (s->cc_op != CC_OP_DYNAMIC)
4555
                    gen_op_set_cc_op(s->cc_op);
4556
                gen_op_fmov_FT0_STN(opreg);
4557
                gen_op_fcomi_ST0_FT0();
4558
                gen_op_fpop();
4559
                s->cc_op = CC_OP_EFLAGS;
4560
                break;
4561
            case 0x10 ... 0x13: /* fcmovxx */
4562
            case 0x18 ... 0x1b:
4563
                {
4564
                    int op1;
4565
                    const static uint8_t fcmov_cc[8] = {
4566
                        (JCC_B << 1),
4567
                        (JCC_Z << 1),
4568
                        (JCC_BE << 1),
4569
                        (JCC_P << 1),
4570
                    };
4571
                    op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
4572
                    gen_setcc(s, op1);
4573
                    gen_op_fcmov_ST0_STN_T0(opreg);
4574
                }
4575
                break;
4576
            default:
4577
                goto illegal_op;
4578
            }
4579
        }
4580
#ifdef USE_CODE_COPY
4581
        s->tb->cflags |= CF_TB_FP_USED;
4582
#endif
4583
        break;
4584
        /************************/
4585
        /* string ops */
4586

    
4587
    case 0xa4: /* movsS */
4588
    case 0xa5:
4589
        if ((b & 1) == 0)
4590
            ot = OT_BYTE;
4591
        else
4592
            ot = dflag + OT_WORD;
4593

    
4594
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4595
            gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4596
        } else {
4597
            gen_movs(s, ot);
4598
        }
4599
        break;
4600
        
4601
    case 0xaa: /* stosS */
4602
    case 0xab:
4603
        if ((b & 1) == 0)
4604
            ot = OT_BYTE;
4605
        else
4606
            ot = dflag + OT_WORD;
4607

    
4608
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4609
            gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4610
        } else {
4611
            gen_stos(s, ot);
4612
        }
4613
        break;
4614
    case 0xac: /* lodsS */
4615
    case 0xad:
4616
        if ((b & 1) == 0)
4617
            ot = OT_BYTE;
4618
        else
4619
            ot = dflag + OT_WORD;
4620
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4621
            gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4622
        } else {
4623
            gen_lods(s, ot);
4624
        }
4625
        break;
4626
    case 0xae: /* scasS */
4627
    case 0xaf:
4628
        if ((b & 1) == 0)
4629
            ot = OT_BYTE;
4630
        else
4631
            ot = dflag + OT_WORD;
4632
        if (prefixes & PREFIX_REPNZ) {
4633
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4634
        } else if (prefixes & PREFIX_REPZ) {
4635
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4636
        } else {
4637
            gen_scas(s, ot);
4638
            s->cc_op = CC_OP_SUBB + ot;
4639
        }
4640
        break;
4641

    
4642
    case 0xa6: /* cmpsS */
4643
    case 0xa7:
4644
        if ((b & 1) == 0)
4645
            ot = OT_BYTE;
4646
        else
4647
            ot = dflag + OT_WORD;
4648
        if (prefixes & PREFIX_REPNZ) {
4649
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4650
        } else if (prefixes & PREFIX_REPZ) {
4651
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4652
        } else {
4653
            gen_cmps(s, ot);
4654
            s->cc_op = CC_OP_SUBB + ot;
4655
        }
4656
        break;
4657
    case 0x6c: /* insS */
4658
    case 0x6d:
4659
        if ((b & 1) == 0)
4660
            ot = OT_BYTE;
4661
        else
4662
            ot = dflag ? OT_LONG : OT_WORD;
4663
        gen_check_io(s, ot, 1, pc_start - s->cs_base);
4664
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4665
            gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4666
        } else {
4667
            gen_ins(s, ot);
4668
        }
4669
        break;
4670
    case 0x6e: /* outsS */
4671
    case 0x6f:
4672
        if ((b & 1) == 0)
4673
            ot = OT_BYTE;
4674
        else
4675
            ot = dflag ? OT_LONG : OT_WORD;
4676
        gen_check_io(s, ot, 1, pc_start - s->cs_base);
4677
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4678
            gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4679
        } else {
4680
            gen_outs(s, ot);
4681
        }
4682
        break;
4683

    
4684
        /************************/
4685
        /* port I/O */
4686
    case 0xe4:
4687
    case 0xe5:
4688
        if ((b & 1) == 0)
4689
            ot = OT_BYTE;
4690
        else
4691
            ot = dflag ? OT_LONG : OT_WORD;
4692
        val = ldub_code(s->pc++);
4693
        gen_op_movl_T0_im(val);
4694
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
4695
        gen_op_in[ot]();
4696
        gen_op_mov_reg_T1[ot][R_EAX]();
4697
        break;
4698
    case 0xe6:
4699
    case 0xe7:
4700
        if ((b & 1) == 0)
4701
            ot = OT_BYTE;
4702
        else
4703
            ot = dflag ? OT_LONG : OT_WORD;
4704
        val = ldub_code(s->pc++);
4705
        gen_op_movl_T0_im(val);
4706
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
4707
        gen_op_mov_TN_reg[ot][1][R_EAX]();
4708
        gen_op_out[ot]();
4709
        break;
4710
    case 0xec:
4711
    case 0xed:
4712
        if ((b & 1) == 0)
4713
            ot = OT_BYTE;
4714
        else
4715
            ot = dflag ? OT_LONG : OT_WORD;
4716
        gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4717
        gen_op_andl_T0_ffff();
4718
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
4719
        gen_op_in[ot]();
4720
        gen_op_mov_reg_T1[ot][R_EAX]();
4721
        break;
4722
    case 0xee:
4723
    case 0xef:
4724
        if ((b & 1) == 0)
4725
            ot = OT_BYTE;
4726
        else
4727
            ot = dflag ? OT_LONG : OT_WORD;
4728
        gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4729
        gen_op_andl_T0_ffff();
4730
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
4731
        gen_op_mov_TN_reg[ot][1][R_EAX]();
4732
        gen_op_out[ot]();
4733
        break;
4734

    
4735
        /************************/
4736
        /* control */
4737
    case 0xc2: /* ret im */
4738
        val = ldsw_code(s->pc);
4739
        s->pc += 2;
4740
        gen_pop_T0(s);
4741
        gen_stack_update(s, val + (2 << s->dflag));
4742
        if (s->dflag == 0)
4743
            gen_op_andl_T0_ffff();
4744
        gen_op_jmp_T0();
4745
        gen_eob(s);
4746
        break;
4747
    case 0xc3: /* ret */
4748
        gen_pop_T0(s);
4749
        gen_pop_update(s);
4750
        if (s->dflag == 0)
4751
            gen_op_andl_T0_ffff();
4752
        gen_op_jmp_T0();
4753
        gen_eob(s);
4754
        break;
4755
    case 0xca: /* lret im */
4756
        val = ldsw_code(s->pc);
4757
        s->pc += 2;
4758
    do_lret:
4759
        if (s->pe && !s->vm86) {
4760
            if (s->cc_op != CC_OP_DYNAMIC)
4761
                gen_op_set_cc_op(s->cc_op);
4762
            gen_jmp_im(pc_start - s->cs_base);
4763
            gen_op_lret_protected(s->dflag, val);
4764
        } else {
4765
            gen_stack_A0(s);
4766
            /* pop offset */
4767
            gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4768
            if (s->dflag == 0)
4769
                gen_op_andl_T0_ffff();
4770
            /* NOTE: keeping EIP updated is not a problem in case of
4771
               exception */
4772
            gen_op_jmp_T0();
4773
            /* pop selector */
4774
            gen_op_addl_A0_im(2 << s->dflag);
4775
            gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4776
            gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
4777
            /* add stack offset */
4778
            gen_stack_update(s, val + (4 << s->dflag));
4779
        }
4780
        gen_eob(s);
4781
        break;
4782
    case 0xcb: /* lret */
4783
        val = 0;
4784
        goto do_lret;
4785
    case 0xcf: /* iret */
4786
        if (!s->pe) {
4787
            /* real mode */
4788
            gen_op_iret_real(s->dflag);
4789
            s->cc_op = CC_OP_EFLAGS;
4790
        } else if (s->vm86) {
4791
            if (s->iopl != 3) {
4792
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4793
            } else {
4794
                gen_op_iret_real(s->dflag);
4795
                s->cc_op = CC_OP_EFLAGS;
4796
            }
4797
        } else {
4798
            if (s->cc_op != CC_OP_DYNAMIC)
4799
                gen_op_set_cc_op(s->cc_op);
4800
            gen_jmp_im(pc_start - s->cs_base);
4801
            gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
4802
            s->cc_op = CC_OP_EFLAGS;
4803
        }
4804
        gen_eob(s);
4805
        break;
4806
    case 0xe8: /* call im */
4807
        {
4808
            if (dflag)
4809
                tval = (int32_t)insn_get(s, OT_LONG);
4810
            else
4811
                tval = (int16_t)insn_get(s, OT_WORD);
4812
            next_eip = s->pc - s->cs_base;
4813
            tval += next_eip;
4814
            if (s->dflag == 0)
4815
                tval &= 0xffff;
4816
            gen_movtl_T0_im(next_eip);
4817
            gen_push_T0(s);
4818
            gen_jmp(s, tval);
4819
        }
4820
        break;
4821
    case 0x9a: /* lcall im */
4822
        {
4823
            unsigned int selector, offset;
4824
            
4825
            if (CODE64(s))
4826
                goto illegal_op;
4827
            ot = dflag ? OT_LONG : OT_WORD;
4828
            offset = insn_get(s, ot);
4829
            selector = insn_get(s, OT_WORD);
4830
            
4831
            gen_op_movl_T0_im(selector);
4832
            gen_op_movl_T1_imu(offset);
4833
        }
4834
        goto do_lcall;
4835
    case 0xe9: /* jmp */
4836
        if (dflag)
4837
            tval = (int32_t)insn_get(s, OT_LONG);
4838
        else
4839
            tval = (int16_t)insn_get(s, OT_WORD);
4840
        tval += s->pc - s->cs_base;
4841
        if (s->dflag == 0)
4842
            tval &= 0xffff;
4843
        gen_jmp(s, tval);
4844
        break;
4845
    case 0xea: /* ljmp im */
4846
        {
4847
            unsigned int selector, offset;
4848

    
4849
            if (CODE64(s))
4850
                goto illegal_op;
4851
            ot = dflag ? OT_LONG : OT_WORD;
4852
            offset = insn_get(s, ot);
4853
            selector = insn_get(s, OT_WORD);
4854
            
4855
            gen_op_movl_T0_im(selector);
4856
            gen_op_movl_T1_imu(offset);
4857
        }
4858
        goto do_ljmp;
4859
    case 0xeb: /* jmp Jb */
4860
        tval = (int8_t)insn_get(s, OT_BYTE);
4861
        tval += s->pc - s->cs_base;
4862
        if (s->dflag == 0)
4863
            tval &= 0xffff;
4864
        gen_jmp(s, tval);
4865
        break;
4866
    case 0x70 ... 0x7f: /* jcc Jb */
4867
        tval = (int8_t)insn_get(s, OT_BYTE);
4868
        goto do_jcc;
4869
    case 0x180 ... 0x18f: /* jcc Jv */
4870
        if (dflag) {
4871
            tval = (int32_t)insn_get(s, OT_LONG);
4872
        } else {
4873
            tval = (int16_t)insn_get(s, OT_WORD); 
4874
        }
4875
    do_jcc:
4876
        next_eip = s->pc - s->cs_base;
4877
        tval += next_eip;
4878
        if (s->dflag == 0)
4879
            tval &= 0xffff;
4880
        gen_jcc(s, b, tval, next_eip);
4881
        break;
4882

    
4883
    case 0x190 ... 0x19f: /* setcc Gv */
4884
        modrm = ldub_code(s->pc++);
4885
        gen_setcc(s, b);
4886
        gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
4887
        break;
4888
    case 0x140 ... 0x14f: /* cmov Gv, Ev */
4889
        ot = dflag + OT_WORD;
4890
        modrm = ldub_code(s->pc++);
4891
        reg = ((modrm >> 3) & 7) | rex_r;
4892
        mod = (modrm >> 6) & 3;
4893
        gen_setcc(s, b);
4894
        if (mod != 3) {
4895
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4896
            gen_op_ld_T1_A0[ot + s->mem_index]();
4897
        } else {
4898
            rm = (modrm & 7) | REX_B(s);
4899
            gen_op_mov_TN_reg[ot][1][rm]();
4900
        }
4901
        gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
4902
        break;
4903
        
4904
        /************************/
4905
        /* flags */
4906
    case 0x9c: /* pushf */
4907
        if (s->vm86 && s->iopl != 3) {
4908
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4909
        } else {
4910
            if (s->cc_op != CC_OP_DYNAMIC)
4911
                gen_op_set_cc_op(s->cc_op);
4912
            gen_op_movl_T0_eflags();
4913
            gen_push_T0(s);
4914
        }
4915
        break;
4916
    case 0x9d: /* popf */
4917
        if (s->vm86 && s->iopl != 3) {
4918
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
4919
        } else {
4920
            gen_pop_T0(s);
4921
            if (s->cpl == 0) {
4922
                if (s->dflag) {
4923
                    gen_op_movl_eflags_T0_cpl0();
4924
                } else {
4925
                    gen_op_movw_eflags_T0_cpl0();
4926
                }
4927
            } else {
4928
                if (s->cpl <= s->iopl) {
4929
                    if (s->dflag) {
4930
                        gen_op_movl_eflags_T0_io();
4931
                    } else {
4932
                        gen_op_movw_eflags_T0_io();
4933
                    }
4934
                } else {
4935
                    if (s->dflag) {
4936
                        gen_op_movl_eflags_T0();
4937
                    } else {
4938
                        gen_op_movw_eflags_T0();
4939
                    }
4940
                }
4941
            }
4942
            gen_pop_update(s);
4943
            s->cc_op = CC_OP_EFLAGS;
4944
            /* abort translation because TF flag may change */
4945
            gen_jmp_im(s->pc - s->cs_base);
4946
            gen_eob(s);
4947
        }
4948
        break;
4949
    case 0x9e: /* sahf */
4950
        if (CODE64(s))
4951
            goto illegal_op;
4952
        gen_op_mov_TN_reg[OT_BYTE][0][R_AH]();
4953
        if (s->cc_op != CC_OP_DYNAMIC)
4954
            gen_op_set_cc_op(s->cc_op);
4955
        gen_op_movb_eflags_T0();
4956
        s->cc_op = CC_OP_EFLAGS;
4957
        break;
4958
    case 0x9f: /* lahf */
4959
        if (CODE64(s))
4960
            goto illegal_op;
4961
        if (s->cc_op != CC_OP_DYNAMIC)
4962
            gen_op_set_cc_op(s->cc_op);
4963
        gen_op_movl_T0_eflags();
4964
        gen_op_mov_reg_T0[OT_BYTE][R_AH]();
4965
        break;
4966
    case 0xf5: /* cmc */
4967
        if (s->cc_op != CC_OP_DYNAMIC)
4968
            gen_op_set_cc_op(s->cc_op);
4969
        gen_op_cmc();
4970
        s->cc_op = CC_OP_EFLAGS;
4971
        break;
4972
    case 0xf8: /* clc */
4973
        if (s->cc_op != CC_OP_DYNAMIC)
4974
            gen_op_set_cc_op(s->cc_op);
4975
        gen_op_clc();
4976
        s->cc_op = CC_OP_EFLAGS;
4977
        break;
4978
    case 0xf9: /* stc */
4979
        if (s->cc_op != CC_OP_DYNAMIC)
4980
            gen_op_set_cc_op(s->cc_op);
4981
        gen_op_stc();
4982
        s->cc_op = CC_OP_EFLAGS;
4983
        break;
4984
    case 0xfc: /* cld */
4985
        gen_op_cld();
4986
        break;
4987
    case 0xfd: /* std */
4988
        gen_op_std();
4989
        break;
4990

    
4991
        /************************/
4992
        /* bit operations */
4993
    case 0x1ba: /* bt/bts/btr/btc Gv, im */
4994
        ot = dflag + OT_WORD;
4995
        modrm = ldub_code(s->pc++);
4996
        op = ((modrm >> 3) & 7) | rex_r;
4997
        mod = (modrm >> 6) & 3;
4998
        rm = (modrm & 7) | REX_B(s);
4999
        if (mod != 3) {
5000
            s->rip_offset = 1;
5001
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5002
            gen_op_ld_T0_A0[ot + s->mem_index]();
5003
        } else {
5004
            gen_op_mov_TN_reg[ot][0][rm]();
5005
        }
5006
        /* load shift */
5007
        val = ldub_code(s->pc++);
5008
        gen_op_movl_T1_im(val);
5009
        if (op < 4)
5010
            goto illegal_op;
5011
        op -= 4;
5012
        gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5013
        s->cc_op = CC_OP_SARB + ot;
5014
        if (op != 0) {
5015
            if (mod != 3)
5016
                gen_op_st_T0_A0[ot + s->mem_index]();
5017
            else
5018
                gen_op_mov_reg_T0[ot][rm]();
5019
            gen_op_update_bt_cc();
5020
        }
5021
        break;
5022
    case 0x1a3: /* bt Gv, Ev */
5023
        op = 0;
5024
        goto do_btx;
5025
    case 0x1ab: /* bts */
5026
        op = 1;
5027
        goto do_btx;
5028
    case 0x1b3: /* btr */
5029
        op = 2;
5030
        goto do_btx;
5031
    case 0x1bb: /* btc */
5032
        op = 3;
5033
    do_btx:
5034
        ot = dflag + OT_WORD;
5035
        modrm = ldub_code(s->pc++);
5036
        reg = ((modrm >> 3) & 7) | rex_r;
5037
        mod = (modrm >> 6) & 3;
5038
        rm = (modrm & 7) | REX_B(s);
5039
        gen_op_mov_TN_reg[OT_LONG][1][reg]();
5040
        if (mod != 3) {
5041
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5042
            /* specific case: we need to add a displacement */
5043
            gen_op_add_bit_A0_T1[ot - OT_WORD]();
5044
            gen_op_ld_T0_A0[ot + s->mem_index]();
5045
        } else {
5046
            gen_op_mov_TN_reg[ot][0][rm]();
5047
        }
5048
        gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5049
        s->cc_op = CC_OP_SARB + ot;
5050
        if (op != 0) {
5051
            if (mod != 3)
5052
                gen_op_st_T0_A0[ot + s->mem_index]();
5053
            else
5054
                gen_op_mov_reg_T0[ot][rm]();
5055
            gen_op_update_bt_cc();
5056
        }
5057
        break;
5058
    case 0x1bc: /* bsf */
5059
    case 0x1bd: /* bsr */
5060
        ot = dflag + OT_WORD;
5061
        modrm = ldub_code(s->pc++);
5062
        reg = ((modrm >> 3) & 7) | rex_r;
5063
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5064
        /* NOTE: in order to handle the 0 case, we must load the
5065
           result. It could be optimized with a generated jump */
5066
        gen_op_mov_TN_reg[ot][1][reg]();
5067
        gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5068
        gen_op_mov_reg_T1[ot][reg]();
5069
        s->cc_op = CC_OP_LOGICB + ot;
5070
        break;
5071
        /************************/
5072
        /* bcd */
5073
    case 0x27: /* daa */
5074
        if (CODE64(s))
5075
            goto illegal_op;
5076
        if (s->cc_op != CC_OP_DYNAMIC)
5077
            gen_op_set_cc_op(s->cc_op);
5078
        gen_op_daa();
5079
        s->cc_op = CC_OP_EFLAGS;
5080
        break;
5081
    case 0x2f: /* das */
5082
        if (CODE64(s))
5083
            goto illegal_op;
5084
        if (s->cc_op != CC_OP_DYNAMIC)
5085
            gen_op_set_cc_op(s->cc_op);
5086
        gen_op_das();
5087
        s->cc_op = CC_OP_EFLAGS;
5088
        break;
5089
    case 0x37: /* aaa */
5090
        if (CODE64(s))
5091
            goto illegal_op;
5092
        if (s->cc_op != CC_OP_DYNAMIC)
5093
            gen_op_set_cc_op(s->cc_op);
5094
        gen_op_aaa();
5095
        s->cc_op = CC_OP_EFLAGS;
5096
        break;
5097
    case 0x3f: /* aas */
5098
        if (CODE64(s))
5099
            goto illegal_op;
5100
        if (s->cc_op != CC_OP_DYNAMIC)
5101
            gen_op_set_cc_op(s->cc_op);
5102
        gen_op_aas();
5103
        s->cc_op = CC_OP_EFLAGS;
5104
        break;
5105
    case 0xd4: /* aam */
5106
        if (CODE64(s))
5107
            goto illegal_op;
5108
        val = ldub_code(s->pc++);
5109
        gen_op_aam(val);
5110
        s->cc_op = CC_OP_LOGICB;
5111
        break;
5112
    case 0xd5: /* aad */
5113
        if (CODE64(s))
5114
            goto illegal_op;
5115
        val = ldub_code(s->pc++);
5116
        gen_op_aad(val);
5117
        s->cc_op = CC_OP_LOGICB;
5118
        break;
5119
        /************************/
5120
        /* misc */
5121
    case 0x90: /* nop */
5122
        /* XXX: xchg + rex handling */
5123
        /* XXX: correct lock test for all insn */
5124
        if (prefixes & PREFIX_LOCK)
5125
            goto illegal_op;
5126
        break;
5127
    case 0x9b: /* fwait */
5128
        if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) == 
5129
            (HF_MP_MASK | HF_TS_MASK)) {
5130
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5131
        } else {
5132
            if (s->cc_op != CC_OP_DYNAMIC)
5133
                gen_op_set_cc_op(s->cc_op);
5134
            gen_jmp_im(pc_start - s->cs_base);
5135
            gen_op_fwait();
5136
        }
5137
        break;
5138
    case 0xcc: /* int3 */
5139
        gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5140
        break;
5141
    case 0xcd: /* int N */
5142
        val = ldub_code(s->pc++);
5143
        if (s->vm86 && s->iopl != 3) {
5144
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base); 
5145
        } else {
5146
            gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5147
        }
5148
        break;
5149
    case 0xce: /* into */
5150
        if (CODE64(s))
5151
            goto illegal_op;
5152
        if (s->cc_op != CC_OP_DYNAMIC)
5153
            gen_op_set_cc_op(s->cc_op);
5154
        gen_jmp_im(pc_start - s->cs_base);
5155
        gen_op_into(s->pc - pc_start);
5156
        break;
5157
    case 0xf1: /* icebp (undocumented, exits to external debugger) */
5158
        gen_debug(s, pc_start - s->cs_base);
5159
        break;
5160
    case 0xfa: /* cli */
5161
        if (!s->vm86) {
5162
            if (s->cpl <= s->iopl) {
5163
                gen_op_cli();
5164
            } else {
5165
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5166
            }
5167
        } else {
5168
            if (s->iopl == 3) {
5169
                gen_op_cli();
5170
            } else {
5171
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5172
            }
5173
        }
5174
        break;
5175
    case 0xfb: /* sti */
5176
        if (!s->vm86) {
5177
            if (s->cpl <= s->iopl) {
5178
            gen_sti:
5179
                gen_op_sti();
5180
                /* interruptions are enabled only the first insn after sti */
5181
                /* If several instructions disable interrupts, only the
5182
                   _first_ does it */
5183
                if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5184
                    gen_op_set_inhibit_irq();
5185
                /* give a chance to handle pending irqs */
5186
                gen_jmp_im(s->pc - s->cs_base);
5187
                gen_eob(s);
5188
            } else {
5189
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5190
            }
5191
        } else {
5192
            if (s->iopl == 3) {
5193
                goto gen_sti;
5194
            } else {
5195
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5196
            }
5197
        }
5198
        break;
5199
    case 0x62: /* bound */
5200
        if (CODE64(s))
5201
            goto illegal_op;
5202
        ot = dflag ? OT_LONG : OT_WORD;
5203
        modrm = ldub_code(s->pc++);
5204
        reg = (modrm >> 3) & 7;
5205
        mod = (modrm >> 6) & 3;
5206
        if (mod == 3)
5207
            goto illegal_op;
5208
        gen_op_mov_TN_reg[ot][0][reg]();
5209
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5210
        gen_jmp_im(pc_start - s->cs_base);
5211
        if (ot == OT_WORD)
5212
            gen_op_boundw();
5213
        else
5214
            gen_op_boundl();
5215
        break;
5216
    case 0x1c8 ... 0x1cf: /* bswap reg */
5217
        reg = (b & 7) | REX_B(s);
5218
#ifdef TARGET_X86_64
5219
        if (dflag == 2) {
5220
            gen_op_mov_TN_reg[OT_QUAD][0][reg]();
5221
            gen_op_bswapq_T0();
5222
            gen_op_mov_reg_T0[OT_QUAD][reg]();
5223
        } else 
5224
#endif
5225
        {
5226
            gen_op_mov_TN_reg[OT_LONG][0][reg]();
5227
            gen_op_bswapl_T0();
5228
            gen_op_mov_reg_T0[OT_LONG][reg]();
5229
        }
5230
        break;
5231
    case 0xd6: /* salc */
5232
        if (CODE64(s))
5233
            goto illegal_op;
5234
        if (s->cc_op != CC_OP_DYNAMIC)
5235
            gen_op_set_cc_op(s->cc_op);
5236
        gen_op_salc();
5237
        break;
5238
    case 0xe0: /* loopnz */
5239
    case 0xe1: /* loopz */
5240
        if (s->cc_op != CC_OP_DYNAMIC)
5241
            gen_op_set_cc_op(s->cc_op);
5242
        /* FALL THRU */
5243
    case 0xe2: /* loop */
5244
    case 0xe3: /* jecxz */
5245
        {
5246
            int l1, l2;
5247

    
5248
            tval = (int8_t)insn_get(s, OT_BYTE);
5249
            next_eip = s->pc - s->cs_base;
5250
            tval += next_eip;
5251
            if (s->dflag == 0)
5252
                tval &= 0xffff;
5253
            
5254
            l1 = gen_new_label();
5255
            l2 = gen_new_label();
5256
            b &= 3;
5257
            if (b == 3) {
5258
                gen_op_jz_ecx[s->aflag](l1);
5259
            } else {
5260
                gen_op_dec_ECX[s->aflag]();
5261
                gen_op_loop[s->aflag][b](l1);
5262
            }
5263

    
5264
            gen_jmp_im(next_eip);
5265
            gen_op_jmp_label(l2);
5266
            gen_set_label(l1);
5267
            gen_jmp_im(tval);
5268
            gen_set_label(l2);
5269
            gen_eob(s);
5270
        }
5271
        break;
5272
    case 0x130: /* wrmsr */
5273
    case 0x132: /* rdmsr */
5274
        if (s->cpl != 0) {
5275
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5276
        } else {
5277
            if (b & 2)
5278
                gen_op_rdmsr();
5279
            else
5280
                gen_op_wrmsr();
5281
        }
5282
        break;
5283
    case 0x131: /* rdtsc */
5284
        gen_op_rdtsc();
5285
        break;
5286
    case 0x134: /* sysenter */
5287
        if (CODE64(s))
5288
            goto illegal_op;
5289
        if (!s->pe) {
5290
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5291
        } else {
5292
            if (s->cc_op != CC_OP_DYNAMIC) {
5293
                gen_op_set_cc_op(s->cc_op);
5294
                s->cc_op = CC_OP_DYNAMIC;
5295
            }
5296
            gen_jmp_im(pc_start - s->cs_base);
5297
            gen_op_sysenter();
5298
            gen_eob(s);
5299
        }
5300
        break;
5301
    case 0x135: /* sysexit */
5302
        if (CODE64(s))
5303
            goto illegal_op;
5304
        if (!s->pe) {
5305
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5306
        } else {
5307
            if (s->cc_op != CC_OP_DYNAMIC) {
5308
                gen_op_set_cc_op(s->cc_op);
5309
                s->cc_op = CC_OP_DYNAMIC;
5310
            }
5311
            gen_jmp_im(pc_start - s->cs_base);
5312
            gen_op_sysexit();
5313
            gen_eob(s);
5314
        }
5315
        break;
5316
#ifdef TARGET_X86_64
5317
    case 0x105: /* syscall */
5318
        /* XXX: is it usable in real mode ? */
5319
        if (s->cc_op != CC_OP_DYNAMIC) {
5320
            gen_op_set_cc_op(s->cc_op);
5321
            s->cc_op = CC_OP_DYNAMIC;
5322
        }
5323
        gen_jmp_im(pc_start - s->cs_base);
5324
        gen_op_syscall(s->pc - pc_start);
5325
        gen_eob(s);
5326
        break;
5327
    case 0x107: /* sysret */
5328
        if (!s->pe) {
5329
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5330
        } else {
5331
            if (s->cc_op != CC_OP_DYNAMIC) {
5332
                gen_op_set_cc_op(s->cc_op);
5333
                s->cc_op = CC_OP_DYNAMIC;
5334
            }
5335
            gen_jmp_im(pc_start - s->cs_base);
5336
            gen_op_sysret(s->dflag);
5337
            gen_eob(s);
5338
        }
5339
        break;
5340
#endif
5341
    case 0x1a2: /* cpuid */
5342
        gen_op_cpuid();
5343
        break;
5344
    case 0xf4: /* hlt */
5345
        if (s->cpl != 0) {
5346
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5347
        } else {
5348
            if (s->cc_op != CC_OP_DYNAMIC)
5349
                gen_op_set_cc_op(s->cc_op);
5350
            gen_jmp_im(s->pc - s->cs_base);
5351
            gen_op_hlt();
5352
            s->is_jmp = 3;
5353
        }
5354
        break;
5355
    case 0x100:
5356
        modrm = ldub_code(s->pc++);
5357
        mod = (modrm >> 6) & 3;
5358
        op = (modrm >> 3) & 7;
5359
        switch(op) {
5360
        case 0: /* sldt */
5361
            if (!s->pe || s->vm86)
5362
                goto illegal_op;
5363
            gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5364
            ot = OT_WORD;
5365
            if (mod == 3)
5366
                ot += s->dflag;
5367
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5368
            break;
5369
        case 2: /* lldt */
5370
            if (!s->pe || s->vm86)
5371
                goto illegal_op;
5372
            if (s->cpl != 0) {
5373
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5374
            } else {
5375
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5376
                gen_jmp_im(pc_start - s->cs_base);
5377
                gen_op_lldt_T0();
5378
            }
5379
            break;
5380
        case 1: /* str */
5381
            if (!s->pe || s->vm86)
5382
                goto illegal_op;
5383
            gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5384
            ot = OT_WORD;
5385
            if (mod == 3)
5386
                ot += s->dflag;
5387
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5388
            break;
5389
        case 3: /* ltr */
5390
            if (!s->pe || s->vm86)
5391
                goto illegal_op;
5392
            if (s->cpl != 0) {
5393
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5394
            } else {
5395
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5396
                gen_jmp_im(pc_start - s->cs_base);
5397
                gen_op_ltr_T0();
5398
            }
5399
            break;
5400
        case 4: /* verr */
5401
        case 5: /* verw */
5402
            if (!s->pe || s->vm86)
5403
                goto illegal_op;
5404
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5405
            if (s->cc_op != CC_OP_DYNAMIC)
5406
                gen_op_set_cc_op(s->cc_op);
5407
            if (op == 4)
5408
                gen_op_verr();
5409
            else
5410
                gen_op_verw();
5411
            s->cc_op = CC_OP_EFLAGS;
5412
            break;
5413
        default:
5414
            goto illegal_op;
5415
        }
5416
        break;
5417
    case 0x101:
5418
        modrm = ldub_code(s->pc++);
5419
        mod = (modrm >> 6) & 3;
5420
        op = (modrm >> 3) & 7;
5421
        switch(op) {
5422
        case 0: /* sgdt */
5423
        case 1: /* sidt */
5424
            if (mod == 3)
5425
                goto illegal_op;
5426
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5427
            if (op == 0)
5428
                gen_op_movl_T0_env(offsetof(CPUX86State,gdt.limit));
5429
            else
5430
                gen_op_movl_T0_env(offsetof(CPUX86State,idt.limit));
5431
            gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5432
#ifdef TARGET_X86_64
5433
            if (CODE64(s)) 
5434
                gen_op_addq_A0_im(2);
5435
            else
5436
#endif
5437
                gen_op_addl_A0_im(2);
5438
            if (op == 0)
5439
                gen_op_movtl_T0_env(offsetof(CPUX86State,gdt.base));
5440
            else
5441
                gen_op_movtl_T0_env(offsetof(CPUX86State,idt.base));
5442
            if (!s->dflag)
5443
                gen_op_andl_T0_im(0xffffff);
5444
            gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5445
            break;
5446
        case 2: /* lgdt */
5447
        case 3: /* lidt */
5448
            if (mod == 3)
5449
                goto illegal_op;
5450
            if (s->cpl != 0) {
5451
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5452
            } else {
5453
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5454
                gen_op_ld_T1_A0[OT_WORD + s->mem_index]();
5455
#ifdef TARGET_X86_64
5456
                if (CODE64(s))
5457
                    gen_op_addq_A0_im(2);
5458
                else
5459
#endif
5460
                    gen_op_addl_A0_im(2);
5461
                gen_op_ld_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5462
                if (!s->dflag)
5463
                    gen_op_andl_T0_im(0xffffff);
5464
                if (op == 2) {
5465
                    gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
5466
                    gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
5467
                } else {
5468
                    gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
5469
                    gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
5470
                }
5471
            }
5472
            break;
5473
        case 4: /* smsw */
5474
            gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
5475
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
5476
            break;
5477
        case 6: /* lmsw */
5478
            if (s->cpl != 0) {
5479
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5480
            } else {
5481
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5482
                gen_op_lmsw_T0();
5483
                gen_jmp_im(s->pc - s->cs_base);
5484
                gen_eob(s);
5485
            }
5486
            break;
5487
        case 7: /* invlpg */
5488
            if (s->cpl != 0) {
5489
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5490
            } else {
5491
                if (mod == 3) {
5492
#ifdef TARGET_X86_64
5493
                    if (CODE64(s) && (modrm & 7) == 0) {
5494
                        /* swapgs */
5495
                        gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
5496
                        gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
5497
                        gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
5498
                        gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
5499
                    } else 
5500
#endif
5501
                    {
5502
                        goto illegal_op;
5503
                    }
5504
                } else {
5505
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5506
                    gen_op_invlpg_A0();
5507
                    gen_jmp_im(s->pc - s->cs_base);
5508
                    gen_eob(s);
5509
                }
5510
            }
5511
            break;
5512
        default:
5513
            goto illegal_op;
5514
        }
5515
        break;
5516
    case 0x108: /* invd */
5517
    case 0x109: /* wbinvd */
5518
        if (s->cpl != 0) {
5519
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5520
        } else {
5521
            /* nothing to do */
5522
        }
5523
        break;
5524
    case 0x63: /* arpl or movslS (x86_64) */
5525
#ifdef TARGET_X86_64
5526
        if (CODE64(s)) {
5527
            int d_ot;
5528
            /* d_ot is the size of destination */
5529
            d_ot = dflag + OT_WORD;
5530

    
5531
            modrm = ldub_code(s->pc++);
5532
            reg = ((modrm >> 3) & 7) | rex_r;
5533
            mod = (modrm >> 6) & 3;
5534
            rm = (modrm & 7) | REX_B(s);
5535
            
5536
            if (mod == 3) {
5537
                gen_op_mov_TN_reg[OT_LONG][0][rm]();
5538
                /* sign extend */
5539
                if (d_ot == OT_QUAD)
5540
                    gen_op_movslq_T0_T0();
5541
                gen_op_mov_reg_T0[d_ot][reg]();
5542
            } else {
5543
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5544
                if (d_ot == OT_QUAD) {
5545
                    gen_op_lds_T0_A0[OT_LONG + s->mem_index]();
5546
                } else {
5547
                    gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
5548
                }
5549
                gen_op_mov_reg_T0[d_ot][reg]();
5550
            }
5551
        } else 
5552
#endif
5553
        {
5554
            if (!s->pe || s->vm86)
5555
                goto illegal_op;
5556
            ot = dflag ? OT_LONG : OT_WORD;
5557
            modrm = ldub_code(s->pc++);
5558
            reg = (modrm >> 3) & 7;
5559
            mod = (modrm >> 6) & 3;
5560
            rm = modrm & 7;
5561
            if (mod != 3) {
5562
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5563
                gen_op_ld_T0_A0[ot + s->mem_index]();
5564
            } else {
5565
                gen_op_mov_TN_reg[ot][0][rm]();
5566
            }
5567
            if (s->cc_op != CC_OP_DYNAMIC)
5568
                gen_op_set_cc_op(s->cc_op);
5569
            gen_op_arpl();
5570
            s->cc_op = CC_OP_EFLAGS;
5571
            if (mod != 3) {
5572
                gen_op_st_T0_A0[ot + s->mem_index]();
5573
            } else {
5574
                gen_op_mov_reg_T0[ot][rm]();
5575
            }
5576
            gen_op_arpl_update();
5577
        }
5578
        break;
5579
    case 0x102: /* lar */
5580
    case 0x103: /* lsl */
5581
        if (!s->pe || s->vm86)
5582
            goto illegal_op;
5583
        ot = dflag ? OT_LONG : OT_WORD;
5584
        modrm = ldub_code(s->pc++);
5585
        reg = ((modrm >> 3) & 7) | rex_r;
5586
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5587
        gen_op_mov_TN_reg[ot][1][reg]();
5588
        if (s->cc_op != CC_OP_DYNAMIC)
5589
            gen_op_set_cc_op(s->cc_op);
5590
        if (b == 0x102)
5591
            gen_op_lar();
5592
        else
5593
            gen_op_lsl();
5594
        s->cc_op = CC_OP_EFLAGS;
5595
        gen_op_mov_reg_T1[ot][reg]();
5596
        break;
5597
    case 0x118:
5598
        modrm = ldub_code(s->pc++);
5599
        mod = (modrm >> 6) & 3;
5600
        op = (modrm >> 3) & 7;
5601
        switch(op) {
5602
        case 0: /* prefetchnta */
5603
        case 1: /* prefetchnt0 */
5604
        case 2: /* prefetchnt0 */
5605
        case 3: /* prefetchnt0 */
5606
            if (mod == 3)
5607
                goto illegal_op;
5608
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5609
            /* nothing more to do */
5610
            break;
5611
        default:
5612
            goto illegal_op;
5613
        }
5614
        break;
5615
    case 0x120: /* mov reg, crN */
5616
    case 0x122: /* mov crN, reg */
5617
        if (s->cpl != 0) {
5618
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5619
        } else {
5620
            modrm = ldub_code(s->pc++);
5621
            if ((modrm & 0xc0) != 0xc0)
5622
                goto illegal_op;
5623
            rm = (modrm & 7) | REX_B(s);
5624
            reg = ((modrm >> 3) & 7) | rex_r;
5625
            if (CODE64(s))
5626
                ot = OT_QUAD;
5627
            else
5628
                ot = OT_LONG;
5629
            switch(reg) {
5630
            case 0:
5631
            case 2:
5632
            case 3:
5633
            case 4:
5634
                if (b & 2) {
5635
                    gen_op_mov_TN_reg[ot][0][rm]();
5636
                    gen_op_movl_crN_T0(reg);
5637
                    gen_jmp_im(s->pc - s->cs_base);
5638
                    gen_eob(s);
5639
                } else {
5640
                    gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
5641
                    gen_op_mov_reg_T0[ot][rm]();
5642
                }
5643
                break;
5644
                /* XXX: add CR8 for x86_64 */
5645
            default:
5646
                goto illegal_op;
5647
            }
5648
        }
5649
        break;
5650
    case 0x121: /* mov reg, drN */
5651
    case 0x123: /* mov drN, reg */
5652
        if (s->cpl != 0) {
5653
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5654
        } else {
5655
            modrm = ldub_code(s->pc++);
5656
            if ((modrm & 0xc0) != 0xc0)
5657
                goto illegal_op;
5658
            rm = (modrm & 7) | REX_B(s);
5659
            reg = ((modrm >> 3) & 7) | rex_r;
5660
            if (CODE64(s))
5661
                ot = OT_QUAD;
5662
            else
5663
                ot = OT_LONG;
5664
            /* XXX: do it dynamically with CR4.DE bit */
5665
            if (reg == 4 || reg == 5 || reg >= 8)
5666
                goto illegal_op;
5667
            if (b & 2) {
5668
                gen_op_mov_TN_reg[ot][0][rm]();
5669
                gen_op_movl_drN_T0(reg);
5670
                gen_jmp_im(s->pc - s->cs_base);
5671
                gen_eob(s);
5672
            } else {
5673
                gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
5674
                gen_op_mov_reg_T0[ot][rm]();
5675
            }
5676
        }
5677
        break;
5678
    case 0x106: /* clts */
5679
        if (s->cpl != 0) {
5680
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5681
        } else {
5682
            gen_op_clts();
5683
            /* abort block because static cpu state changed */
5684
            gen_jmp_im(s->pc - s->cs_base);
5685
            gen_eob(s);
5686
        }
5687
        break;
5688
    /* MMX/SSE/SSE2/PNI support */
5689
    case 0x1c3: /* MOVNTI reg, mem */
5690
        if (!(s->cpuid_features & CPUID_SSE2))
5691
            goto illegal_op;
5692
        ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
5693
        modrm = ldub_code(s->pc++);
5694
        mod = (modrm >> 6) & 3;
5695
        if (mod == 3)
5696
            goto illegal_op;
5697
        reg = ((modrm >> 3) & 7) | rex_r;
5698
        /* generate a generic store */
5699
        gen_ldst_modrm(s, modrm, ot, reg, 1);
5700
        break;
5701
    case 0x1ae:
5702
        modrm = ldub_code(s->pc++);
5703
        mod = (modrm >> 6) & 3;
5704
        op = (modrm >> 3) & 7;
5705
        switch(op) {
5706
        case 0: /* fxsave */
5707
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR))
5708
                goto illegal_op;
5709
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5710
            gen_op_fxsave_A0((s->dflag == 2));
5711
            break;
5712
        case 1: /* fxrstor */
5713
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR))
5714
                goto illegal_op;
5715
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5716
            gen_op_fxrstor_A0((s->dflag == 2));
5717
            break;
5718
        case 2: /* ldmxcsr */
5719
        case 3: /* stmxcsr */
5720
            if (s->flags & HF_TS_MASK) {
5721
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5722
                break;
5723
            }
5724
            if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
5725
                mod == 3)
5726
                goto illegal_op;
5727
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5728
            if (op == 2) {
5729
                gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
5730
                gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
5731
            } else {
5732
                gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
5733
                gen_op_st_T0_A0[OT_LONG + s->mem_index]();
5734
            }
5735
            break;
5736
        case 5: /* lfence */
5737
        case 6: /* mfence */
5738
        case 7: /* sfence */
5739
            if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
5740
                goto illegal_op;
5741
            break;
5742
        default:
5743
            goto illegal_op;
5744
        }
5745
        break;
5746
    case 0x110 ... 0x117:
5747
    case 0x128 ... 0x12f:
5748
    case 0x150 ... 0x177:
5749
    case 0x17c ... 0x17f:
5750
    case 0x1c2:
5751
    case 0x1c4 ... 0x1c6:
5752
    case 0x1d0 ... 0x1fe:
5753
        gen_sse(s, b, pc_start, rex_r);
5754
        break;
5755
    default:
5756
        goto illegal_op;
5757
    }
5758
    /* lock generation */
5759
    if (s->prefix & PREFIX_LOCK)
5760
        gen_op_unlock();
5761
    return s->pc;
5762
 illegal_op:
5763
    if (s->prefix & PREFIX_LOCK)
5764
        gen_op_unlock();
5765
    /* XXX: ensure that no lock was generated */
5766
    gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
5767
    return s->pc;
5768
}
5769

    
5770
#define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
5771
#define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
5772

    
5773
/* flags read by an operation */
5774
static uint16_t opc_read_flags[NB_OPS] = { 
5775
    [INDEX_op_aas] = CC_A,
5776
    [INDEX_op_aaa] = CC_A,
5777
    [INDEX_op_das] = CC_A | CC_C,
5778
    [INDEX_op_daa] = CC_A | CC_C,
5779

    
5780
    /* subtle: due to the incl/decl implementation, C is used */
5781
    [INDEX_op_update_inc_cc] = CC_C, 
5782

    
5783
    [INDEX_op_into] = CC_O,
5784

    
5785
    [INDEX_op_jb_subb] = CC_C,
5786
    [INDEX_op_jb_subw] = CC_C,
5787
    [INDEX_op_jb_subl] = CC_C,
5788

    
5789
    [INDEX_op_jz_subb] = CC_Z,
5790
    [INDEX_op_jz_subw] = CC_Z,
5791
    [INDEX_op_jz_subl] = CC_Z,
5792

    
5793
    [INDEX_op_jbe_subb] = CC_Z | CC_C,
5794
    [INDEX_op_jbe_subw] = CC_Z | CC_C,
5795
    [INDEX_op_jbe_subl] = CC_Z | CC_C,
5796

    
5797
    [INDEX_op_js_subb] = CC_S,
5798
    [INDEX_op_js_subw] = CC_S,
5799
    [INDEX_op_js_subl] = CC_S,
5800

    
5801
    [INDEX_op_jl_subb] = CC_O | CC_S,
5802
    [INDEX_op_jl_subw] = CC_O | CC_S,
5803
    [INDEX_op_jl_subl] = CC_O | CC_S,
5804

    
5805
    [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
5806
    [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
5807
    [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
5808

    
5809
    [INDEX_op_loopnzw] = CC_Z,
5810
    [INDEX_op_loopnzl] = CC_Z,
5811
    [INDEX_op_loopzw] = CC_Z,
5812
    [INDEX_op_loopzl] = CC_Z,
5813

    
5814
    [INDEX_op_seto_T0_cc] = CC_O,
5815
    [INDEX_op_setb_T0_cc] = CC_C,
5816
    [INDEX_op_setz_T0_cc] = CC_Z,
5817
    [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
5818
    [INDEX_op_sets_T0_cc] = CC_S,
5819
    [INDEX_op_setp_T0_cc] = CC_P,
5820
    [INDEX_op_setl_T0_cc] = CC_O | CC_S,
5821
    [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
5822

    
5823
    [INDEX_op_setb_T0_subb] = CC_C,
5824
    [INDEX_op_setb_T0_subw] = CC_C,
5825
    [INDEX_op_setb_T0_subl] = CC_C,
5826

    
5827
    [INDEX_op_setz_T0_subb] = CC_Z,
5828
    [INDEX_op_setz_T0_subw] = CC_Z,
5829
    [INDEX_op_setz_T0_subl] = CC_Z,
5830

    
5831
    [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
5832
    [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
5833
    [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
5834

    
5835
    [INDEX_op_sets_T0_subb] = CC_S,
5836
    [INDEX_op_sets_T0_subw] = CC_S,
5837
    [INDEX_op_sets_T0_subl] = CC_S,
5838

    
5839
    [INDEX_op_setl_T0_subb] = CC_O | CC_S,
5840
    [INDEX_op_setl_T0_subw] = CC_O | CC_S,
5841
    [INDEX_op_setl_T0_subl] = CC_O | CC_S,
5842

    
5843
    [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
5844
    [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
5845
    [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
5846

    
5847
    [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
5848
    [INDEX_op_cmc] = CC_C,
5849
    [INDEX_op_salc] = CC_C,
5850

    
5851
    /* needed for correct flag optimisation before string ops */
5852
    [INDEX_op_jnz_ecxw] = CC_OSZAPC,
5853
    [INDEX_op_jnz_ecxl] = CC_OSZAPC,
5854
    [INDEX_op_jz_ecxw] = CC_OSZAPC,
5855
    [INDEX_op_jz_ecxl] = CC_OSZAPC,
5856

    
5857
#ifdef TARGET_X86_64
5858
    [INDEX_op_jb_subq] = CC_C,
5859
    [INDEX_op_jz_subq] = CC_Z,
5860
    [INDEX_op_jbe_subq] = CC_Z | CC_C,
5861
    [INDEX_op_js_subq] = CC_S,
5862
    [INDEX_op_jl_subq] = CC_O | CC_S,
5863
    [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
5864

    
5865
    [INDEX_op_loopnzq] = CC_Z,
5866
    [INDEX_op_loopzq] = CC_Z,
5867

    
5868
    [INDEX_op_setb_T0_subq] = CC_C,
5869
    [INDEX_op_setz_T0_subq] = CC_Z,
5870
    [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
5871
    [INDEX_op_sets_T0_subq] = CC_S,
5872
    [INDEX_op_setl_T0_subq] = CC_O | CC_S,
5873
    [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
5874

    
5875
    [INDEX_op_jnz_ecxq] = CC_OSZAPC,
5876
    [INDEX_op_jz_ecxq] = CC_OSZAPC,
5877
#endif
5878

    
5879
#define DEF_READF(SUFFIX)\
5880
    [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5881
    [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5882
    [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5883
    X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5884
    [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5885
    [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5886
    [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5887
    X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5888
\
5889
    [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5890
    [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5891
    [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
5892
    X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
5893
    [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
5894
    [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
5895
    [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
5896
    X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
5897

    
5898
    DEF_READF( )
5899
    DEF_READF(_raw)
5900
#ifndef CONFIG_USER_ONLY
5901
    DEF_READF(_kernel)
5902
    DEF_READF(_user)
5903
#endif
5904
};
5905

    
5906
/* flags written by an operation */
5907
static uint16_t opc_write_flags[NB_OPS] = { 
5908
    [INDEX_op_update2_cc] = CC_OSZAPC,
5909
    [INDEX_op_update1_cc] = CC_OSZAPC,
5910
    [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
5911
    [INDEX_op_update_neg_cc] = CC_OSZAPC,
5912
    /* subtle: due to the incl/decl implementation, C is used */
5913
    [INDEX_op_update_inc_cc] = CC_OSZAPC, 
5914
    [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
5915

    
5916
    [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
5917
    [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
5918
    [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
5919
    X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
5920
    [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
5921
    [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
5922
    [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
5923
    X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
5924
    [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
5925
    [INDEX_op_imull_T0_T1] = CC_OSZAPC,
5926
    X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
5927

    
5928
    /* sse */
5929
    [INDEX_op_ucomiss] = CC_OSZAPC,
5930
    [INDEX_op_ucomisd] = CC_OSZAPC,
5931
    [INDEX_op_comiss] = CC_OSZAPC,
5932
    [INDEX_op_comisd] = CC_OSZAPC,
5933

    
5934
    /* bcd */
5935
    [INDEX_op_aam] = CC_OSZAPC,
5936
    [INDEX_op_aad] = CC_OSZAPC,
5937
    [INDEX_op_aas] = CC_OSZAPC,
5938
    [INDEX_op_aaa] = CC_OSZAPC,
5939
    [INDEX_op_das] = CC_OSZAPC,
5940
    [INDEX_op_daa] = CC_OSZAPC,
5941

    
5942
    [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
5943
    [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
5944
    [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
5945
    [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
5946
    [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
5947
    [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
5948
    [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
5949
    [INDEX_op_clc] = CC_C,
5950
    [INDEX_op_stc] = CC_C,
5951
    [INDEX_op_cmc] = CC_C,
5952

    
5953
    [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
5954
    [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
5955
    X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
5956
    [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
5957
    [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
5958
    X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
5959
    [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
5960
    [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
5961
    X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
5962
    [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
5963
    [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
5964
    X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
5965

    
5966
    [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
5967
    [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
5968
    X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
5969
    [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
5970
    [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
5971
    X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
5972

    
5973
    [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
5974
    [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
5975
    [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
5976
    X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
5977

    
5978
    [INDEX_op_cmpxchg8b] = CC_Z,
5979
    [INDEX_op_lar] = CC_Z,
5980
    [INDEX_op_lsl] = CC_Z,
5981
    [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
5982
    [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
5983

    
5984
#define DEF_WRITEF(SUFFIX)\
5985
    [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5986
    [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5987
    [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5988
    X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
5989
    [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5990
    [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5991
    [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
5992
    X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
5993
\
5994
    [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5995
    [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5996
    [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5997
    X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
5998
    [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
5999
    [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6000
    [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6001
    X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6002
\
6003
    [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6004
    [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6005
    [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6006
    X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6007
    [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6008
    [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6009
    [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6010
    X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6011
\
6012
    [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6013
    [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6014
    [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6015
    X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6016
\
6017
    [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6018
    [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6019
    [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6020
    X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6021
\
6022
    [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6023
    [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6024
    [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6025
    X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6026
\
6027
    [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6028
    [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6029
    X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6030
    [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6031
    [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6032
    X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6033
\
6034
    [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6035
    [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6036
    X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6037
    [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6038
    [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6039
    X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6040
\
6041
    [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6042
    [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6043
    [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6044
    X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6045

    
6046

    
6047
    DEF_WRITEF( )
6048
    DEF_WRITEF(_raw)
6049
#ifndef CONFIG_USER_ONLY
6050
    DEF_WRITEF(_kernel)
6051
    DEF_WRITEF(_user)
6052
#endif
6053
};
6054

    
6055
/* simpler form of an operation if no flags need to be generated */
6056
static uint16_t opc_simpler[NB_OPS] = { 
6057
    [INDEX_op_update2_cc] = INDEX_op_nop,
6058
    [INDEX_op_update1_cc] = INDEX_op_nop,
6059
    [INDEX_op_update_neg_cc] = INDEX_op_nop,
6060
#if 0
6061
    /* broken: CC_OP logic must be rewritten */
6062
    [INDEX_op_update_inc_cc] = INDEX_op_nop,
6063
#endif
6064

    
6065
    [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6066
    [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6067
    [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6068
    X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6069

    
6070
    [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6071
    [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6072
    [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6073
    X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6074

    
6075
    [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6076
    [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6077
    [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6078
    X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6079

    
6080
#define DEF_SIMPLER(SUFFIX)\
6081
    [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6082
    [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6083
    [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6084
    X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6085
\
6086
    [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6087
    [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6088
    [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6089
    X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6090

    
6091
    DEF_SIMPLER( )
6092
    DEF_SIMPLER(_raw)
6093
#ifndef CONFIG_USER_ONLY
6094
    DEF_SIMPLER(_kernel)
6095
    DEF_SIMPLER(_user)
6096
#endif
6097
};
6098

    
6099
void optimize_flags_init(void)
6100
{
6101
    int i;
6102
    /* put default values in arrays */
6103
    for(i = 0; i < NB_OPS; i++) {
6104
        if (opc_simpler[i] == 0)
6105
            opc_simpler[i] = i;
6106
    }
6107
}
6108

    
6109
/* CPU flags computation optimization: we move backward thru the
6110
   generated code to see which flags are needed. The operation is
6111
   modified if suitable */
6112
static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6113
{
6114
    uint16_t *opc_ptr;
6115
    int live_flags, write_flags, op;
6116

    
6117
    opc_ptr = opc_buf + opc_buf_len;
6118
    /* live_flags contains the flags needed by the next instructions
6119
       in the code. At the end of the bloc, we consider that all the
6120
       flags are live. */
6121
    live_flags = CC_OSZAPC;
6122
    while (opc_ptr > opc_buf) {
6123
        op = *--opc_ptr;
6124
        /* if none of the flags written by the instruction is used,
6125
           then we can try to find a simpler instruction */
6126
        write_flags = opc_write_flags[op];
6127
        if ((live_flags & write_flags) == 0) {
6128
            *opc_ptr = opc_simpler[op];
6129
        }
6130
        /* compute the live flags before the instruction */
6131
        live_flags &= ~write_flags;
6132
        live_flags |= opc_read_flags[op];
6133
    }
6134
}
6135

    
6136
/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6137
   basic block 'tb'. If search_pc is TRUE, also generate PC
6138
   information for each intermediate instruction. */
6139
static inline int gen_intermediate_code_internal(CPUState *env,
6140
                                                 TranslationBlock *tb, 
6141
                                                 int search_pc)
6142
{
6143
    DisasContext dc1, *dc = &dc1;
6144
    target_ulong pc_ptr;
6145
    uint16_t *gen_opc_end;
6146
    int flags, j, lj, cflags;
6147
    target_ulong pc_start;
6148
    target_ulong cs_base;
6149
    
6150
    /* generate intermediate code */
6151
    pc_start = tb->pc;
6152
    cs_base = tb->cs_base;
6153
    flags = tb->flags;
6154
    cflags = tb->cflags;
6155

    
6156
    dc->pe = (flags >> HF_PE_SHIFT) & 1;
6157
    dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6158
    dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6159
    dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6160
    dc->f_st = 0;
6161
    dc->vm86 = (flags >> VM_SHIFT) & 1;
6162
    dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6163
    dc->iopl = (flags >> IOPL_SHIFT) & 3;
6164
    dc->tf = (flags >> TF_SHIFT) & 1;
6165
    dc->singlestep_enabled = env->singlestep_enabled;
6166
    dc->cc_op = CC_OP_DYNAMIC;
6167
    dc->cs_base = cs_base;
6168
    dc->tb = tb;
6169
    dc->popl_esp_hack = 0;
6170
    /* select memory access functions */
6171
    dc->mem_index = 0;
6172
    if (flags & HF_SOFTMMU_MASK) {
6173
        if (dc->cpl == 3)
6174
            dc->mem_index = 2 * 4;
6175
        else
6176
            dc->mem_index = 1 * 4;
6177
    }
6178
    dc->cpuid_features = env->cpuid_features;
6179
#ifdef TARGET_X86_64
6180
    dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6181
    dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6182
#endif
6183
    dc->flags = flags;
6184
    dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6185
                    (flags & HF_INHIBIT_IRQ_MASK)
6186
#ifndef CONFIG_SOFTMMU
6187
                    || (flags & HF_SOFTMMU_MASK)
6188
#endif
6189
                    );
6190
#if 0
6191
    /* check addseg logic */
6192
    if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6193
        printf("ERROR addseg\n");
6194
#endif
6195

    
6196
    gen_opc_ptr = gen_opc_buf;
6197
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6198
    gen_opparam_ptr = gen_opparam_buf;
6199
    nb_gen_labels = 0;
6200

    
6201
    dc->is_jmp = DISAS_NEXT;
6202
    pc_ptr = pc_start;
6203
    lj = -1;
6204

    
6205
    for(;;) {
6206
        if (env->nb_breakpoints > 0) {
6207
            for(j = 0; j < env->nb_breakpoints; j++) {
6208
                if (env->breakpoints[j] == pc_ptr) {
6209
                    gen_debug(dc, pc_ptr - dc->cs_base);
6210
                    break;
6211
                }
6212
            }
6213
        }
6214
        if (search_pc) {
6215
            j = gen_opc_ptr - gen_opc_buf;
6216
            if (lj < j) {
6217
                lj++;
6218
                while (lj < j)
6219
                    gen_opc_instr_start[lj++] = 0;
6220
            }
6221
            gen_opc_pc[lj] = pc_ptr;
6222
            gen_opc_cc_op[lj] = dc->cc_op;
6223
            gen_opc_instr_start[lj] = 1;
6224
        }
6225
        pc_ptr = disas_insn(dc, pc_ptr);
6226
        /* stop translation if indicated */
6227
        if (dc->is_jmp)
6228
            break;
6229
        /* if single step mode, we generate only one instruction and
6230
           generate an exception */
6231
        /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6232
           the flag and abort the translation to give the irqs a
6233
           change to be happen */
6234
        if (dc->tf || dc->singlestep_enabled || 
6235
            (flags & HF_INHIBIT_IRQ_MASK) ||
6236
            (cflags & CF_SINGLE_INSN)) {
6237
            gen_jmp_im(pc_ptr - dc->cs_base);
6238
            gen_eob(dc);
6239
            break;
6240
        }
6241
        /* if too long translation, stop generation too */
6242
        if (gen_opc_ptr >= gen_opc_end ||
6243
            (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6244
            gen_jmp_im(pc_ptr - dc->cs_base);
6245
            gen_eob(dc);
6246
            break;
6247
        }
6248
    }
6249
    *gen_opc_ptr = INDEX_op_end;
6250
    /* we don't forget to fill the last values */
6251
    if (search_pc) {
6252
        j = gen_opc_ptr - gen_opc_buf;
6253
        lj++;
6254
        while (lj <= j)
6255
            gen_opc_instr_start[lj++] = 0;
6256
    }
6257
        
6258
#ifdef DEBUG_DISAS
6259
    if (loglevel & CPU_LOG_TB_CPU) {
6260
        cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6261
    }
6262
    if (loglevel & CPU_LOG_TB_IN_ASM) {
6263
        int disas_flags;
6264
        fprintf(logfile, "----------------\n");
6265
        fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6266
#ifdef TARGET_X86_64
6267
        if (dc->code64)
6268
            disas_flags = 2;
6269
        else
6270
#endif
6271
            disas_flags = !dc->code32;
6272
        target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6273
        fprintf(logfile, "\n");
6274
        if (loglevel & CPU_LOG_TB_OP) {
6275
            fprintf(logfile, "OP:\n");
6276
            dump_ops(gen_opc_buf, gen_opparam_buf);
6277
            fprintf(logfile, "\n");
6278
        }
6279
    }
6280
#endif
6281

    
6282
    /* optimize flag computations */
6283
    optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6284

    
6285
#ifdef DEBUG_DISAS
6286
    if (loglevel & CPU_LOG_TB_OP_OPT) {
6287
        fprintf(logfile, "AFTER FLAGS OPT:\n");
6288
        dump_ops(gen_opc_buf, gen_opparam_buf);
6289
        fprintf(logfile, "\n");
6290
    }
6291
#endif
6292
    if (!search_pc)
6293
        tb->size = pc_ptr - pc_start;
6294
    return 0;
6295
}
6296

    
6297
int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6298
{
6299
    return gen_intermediate_code_internal(env, tb, 0);
6300
}
6301

    
6302
int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6303
{
6304
    return gen_intermediate_code_internal(env, tb, 1);
6305
}
6306