Statistics
| Branch: | Revision:

root / target-i386 / translate.c @ 298e01b6

History | View | Annotate | Download (205.3 kB)

1
/*
2
 *  i386 translation
3
 *
4
 *  Copyright (c) 2003 Fabrice Bellard
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20
#include <stdarg.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23
#include <string.h>
24
#include <inttypes.h>
25
#include <signal.h>
26
#include <assert.h>
27

    
28
#include "cpu.h"
29
#include "exec-all.h"
30
#include "disas.h"
31
#include "helper.h"
32
#include "tcg-op.h"
33

    
34
#define PREFIX_REPZ   0x01
35
#define PREFIX_REPNZ  0x02
36
#define PREFIX_LOCK   0x04
37
#define PREFIX_DATA   0x08
38
#define PREFIX_ADR    0x10
39

    
40
#ifdef TARGET_X86_64
41
#define X86_64_ONLY(x) x
42
#define X86_64_DEF(x...) x
43
#define CODE64(s) ((s)->code64)
44
#define REX_X(s) ((s)->rex_x)
45
#define REX_B(s) ((s)->rex_b)
46
/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
47
#if 1
48
#define BUGGY_64(x) NULL
49
#endif
50
#else
51
#define X86_64_ONLY(x) NULL
52
#define X86_64_DEF(x...)
53
#define CODE64(s) 0
54
#define REX_X(s) 0
55
#define REX_B(s) 0
56
#endif
57

    
58
//#define MACRO_TEST   1
59

    
60
/* global register indexes */
61
static TCGv cpu_env, cpu_T[2], cpu_A0;
62
/* local register indexes (only used inside old micro ops) */
63
static TCGv cpu_tmp0;
64

    
65
#ifdef TARGET_X86_64
66
static int x86_64_hregs;
67
#endif
68

    
69
typedef struct DisasContext {
70
    /* current insn context */
71
    int override; /* -1 if no override */
72
    int prefix;
73
    int aflag, dflag;
74
    target_ulong pc; /* pc = eip + cs_base */
75
    int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
76
                   static state change (stop translation) */
77
    /* current block context */
78
    target_ulong cs_base; /* base of CS segment */
79
    int pe;     /* protected mode */
80
    int code32; /* 32 bit code segment */
81
#ifdef TARGET_X86_64
82
    int lma;    /* long mode active */
83
    int code64; /* 64 bit code segment */
84
    int rex_x, rex_b;
85
#endif
86
    int ss32;   /* 32 bit stack segment */
87
    int cc_op;  /* current CC operation */
88
    int addseg; /* non zero if either DS/ES/SS have a non zero base */
89
    int f_st;   /* currently unused */
90
    int vm86;   /* vm86 mode */
91
    int cpl;
92
    int iopl;
93
    int tf;     /* TF cpu flag */
94
    int singlestep_enabled; /* "hardware" single step enabled */
95
    int jmp_opt; /* use direct block chaining for direct jumps */
96
    int mem_index; /* select memory access functions */
97
    uint64_t flags; /* all execution flags */
98
    struct TranslationBlock *tb;
99
    int popl_esp_hack; /* for correct popl with esp base handling */
100
    int rip_offset; /* only used in x86_64, but left for simplicity */
101
    int cpuid_features;
102
    int cpuid_ext_features;
103
} DisasContext;
104

    
105
static void gen_eob(DisasContext *s);
106
static void gen_jmp(DisasContext *s, target_ulong eip);
107
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
108

    
109
/* i386 arith/logic operations */
110
enum {
111
    OP_ADDL,
112
    OP_ORL,
113
    OP_ADCL,
114
    OP_SBBL,
115
    OP_ANDL,
116
    OP_SUBL,
117
    OP_XORL,
118
    OP_CMPL,
119
};
120

    
121
/* i386 shift ops */
122
enum {
123
    OP_ROL,
124
    OP_ROR,
125
    OP_RCL,
126
    OP_RCR,
127
    OP_SHL,
128
    OP_SHR,
129
    OP_SHL1, /* undocumented */
130
    OP_SAR = 7,
131
};
132

    
133
/* operand size */
134
enum {
135
    OT_BYTE = 0,
136
    OT_WORD,
137
    OT_LONG,
138
    OT_QUAD,
139
};
140

    
141
enum {
142
    /* I386 int registers */
143
    OR_EAX,   /* MUST be even numbered */
144
    OR_ECX,
145
    OR_EDX,
146
    OR_EBX,
147
    OR_ESP,
148
    OR_EBP,
149
    OR_ESI,
150
    OR_EDI,
151

    
152
    OR_TMP0 = 16,    /* temporary operand register */
153
    OR_TMP1,
154
    OR_A0, /* temporary register used when doing address evaluation */
155
};
156

    
157
static inline void gen_op_movl_T0_0(void)
158
{
159
    tcg_gen_movi_tl(cpu_T[0], 0);
160
}
161

    
162
static inline void gen_op_movl_T0_im(int32_t val)
163
{
164
    tcg_gen_movi_tl(cpu_T[0], val);
165
}
166

    
167
static inline void gen_op_movl_T0_imu(uint32_t val)
168
{
169
    tcg_gen_movi_tl(cpu_T[0], val);
170
}
171

    
172
static inline void gen_op_movl_T1_im(int32_t val)
173
{
174
    tcg_gen_movi_tl(cpu_T[1], val);
175
}
176

    
177
static inline void gen_op_movl_T1_imu(uint32_t val)
178
{
179
    tcg_gen_movi_tl(cpu_T[1], val);
180
}
181

    
182
static inline void gen_op_movl_A0_im(uint32_t val)
183
{
184
    tcg_gen_movi_tl(cpu_A0, val);
185
}
186

    
187
#ifdef TARGET_X86_64
188
static inline void gen_op_movq_A0_im(int64_t val)
189
{
190
    tcg_gen_movi_tl(cpu_A0, val);
191
}
192
#endif
193

    
194
static inline void gen_movtl_T0_im(target_ulong val)
195
{
196
    tcg_gen_movi_tl(cpu_T[0], val);
197
}
198

    
199
static inline void gen_movtl_T1_im(target_ulong val)
200
{
201
    tcg_gen_movi_tl(cpu_T[1], val);
202
}
203

    
204
static inline void gen_op_andl_T0_ffff(void)
205
{
206
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
207
}
208

    
209
static inline void gen_op_andl_T0_im(uint32_t val)
210
{
211
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
212
}
213

    
214
static inline void gen_op_movl_T0_T1(void)
215
{
216
    tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
217
}
218

    
219
static inline void gen_op_andl_A0_ffff(void)
220
{
221
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
222
}
223

    
224
#ifdef TARGET_X86_64
225

    
226
#define NB_OP_SIZES 4
227

    
228
#define DEF_REGS(prefix, suffix) \
229
  prefix ## EAX ## suffix,\
230
  prefix ## ECX ## suffix,\
231
  prefix ## EDX ## suffix,\
232
  prefix ## EBX ## suffix,\
233
  prefix ## ESP ## suffix,\
234
  prefix ## EBP ## suffix,\
235
  prefix ## ESI ## suffix,\
236
  prefix ## EDI ## suffix,\
237
  prefix ## R8 ## suffix,\
238
  prefix ## R9 ## suffix,\
239
  prefix ## R10 ## suffix,\
240
  prefix ## R11 ## suffix,\
241
  prefix ## R12 ## suffix,\
242
  prefix ## R13 ## suffix,\
243
  prefix ## R14 ## suffix,\
244
  prefix ## R15 ## suffix,
245

    
246
#else /* !TARGET_X86_64 */
247

    
248
#define NB_OP_SIZES 3
249

    
250
#define DEF_REGS(prefix, suffix) \
251
  prefix ## EAX ## suffix,\
252
  prefix ## ECX ## suffix,\
253
  prefix ## EDX ## suffix,\
254
  prefix ## EBX ## suffix,\
255
  prefix ## ESP ## suffix,\
256
  prefix ## EBP ## suffix,\
257
  prefix ## ESI ## suffix,\
258
  prefix ## EDI ## suffix,
259

    
260
#endif /* !TARGET_X86_64 */
261

    
262
#if defined(WORDS_BIGENDIAN)
263
#define REG_B_OFFSET (sizeof(target_ulong) - 1)
264
#define REG_H_OFFSET (sizeof(target_ulong) - 2)
265
#define REG_W_OFFSET (sizeof(target_ulong) - 2)
266
#define REG_L_OFFSET (sizeof(target_ulong) - 4)
267
#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
268
#else
269
#define REG_B_OFFSET 0
270
#define REG_H_OFFSET 1
271
#define REG_W_OFFSET 0
272
#define REG_L_OFFSET 0
273
#define REG_LH_OFFSET 4
274
#endif
275

    
276
static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
277
{
278
    switch(ot) {
279
    case OT_BYTE:
280
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
281
            tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
282
        } else {
283
            tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
284
        }
285
        break;
286
    case OT_WORD:
287
        tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
288
        break;
289
#ifdef TARGET_X86_64
290
    case OT_LONG:
291
        tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
292
        /* high part of register set to zero */
293
        tcg_gen_movi_tl(cpu_tmp0, 0);
294
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
295
        break;
296
    default:
297
    case OT_QUAD:
298
        tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
299
        break;
300
#else
301
    default:
302
    case OT_LONG:
303
        tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
304
        break;
305
#endif
306
    }
307
}
308

    
309
static inline void gen_op_mov_reg_T0(int ot, int reg)
310
{
311
    gen_op_mov_reg_TN(ot, 0, reg);
312
}
313

    
314
static inline void gen_op_mov_reg_T1(int ot, int reg)
315
{
316
    gen_op_mov_reg_TN(ot, 1, reg);
317
}
318

    
319
static inline void gen_op_mov_reg_A0(int size, int reg)
320
{
321
    switch(size) {
322
    case 0:
323
        tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
324
        break;
325
#ifdef TARGET_X86_64
326
    case 1:
327
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
328
        /* high part of register set to zero */
329
        tcg_gen_movi_tl(cpu_tmp0, 0);
330
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
331
        break;
332
    default:
333
    case 2:
334
        tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
335
        break;
336
#else
337
    default:
338
    case 1:
339
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
340
        break;
341
#endif
342
    }
343
}
344

    
345
static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
346
{
347
    switch(ot) {
348
    case OT_BYTE:
349
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
350
            goto std_case;
351
        } else {
352
            tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
353
        }
354
        break;
355
    default:
356
    std_case:
357
        tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
358
        break;
359
    }
360
}
361

    
362
static inline void gen_op_movl_A0_reg(int reg)
363
{
364
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
365
}
366

    
367
static inline void gen_op_addl_A0_im(int32_t val)
368
{
369
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
370
#ifdef TARGET_X86_64
371
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
372
#endif
373
}
374

    
375
#ifdef TARGET_X86_64
376
static inline void gen_op_addq_A0_im(int64_t val)
377
{
378
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
379
}
380
#endif
381
    
382
static void gen_add_A0_im(DisasContext *s, int val)
383
{
384
#ifdef TARGET_X86_64
385
    if (CODE64(s))
386
        gen_op_addq_A0_im(val);
387
    else
388
#endif
389
        gen_op_addl_A0_im(val);
390
}
391

    
392
static inline void gen_op_addl_T0_T1(void)
393
{
394
    tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
395
}
396

    
397
static inline void gen_op_jmp_T0(void)
398
{
399
    tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
400
}
401

    
402
static inline void gen_op_addw_ESP_im(int32_t val)
403
{
404
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
405
    tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
406
    tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]) + REG_W_OFFSET);
407
}
408

    
409
static inline void gen_op_addl_ESP_im(int32_t val)
410
{
411
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
412
    tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
413
#ifdef TARGET_X86_64
414
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
415
#endif
416
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
417
}
418

    
419
#ifdef TARGET_X86_64
420
static inline void gen_op_addq_ESP_im(int32_t val)
421
{
422
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
423
    tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
424
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
425
}
426
#endif
427

    
428
static inline void gen_op_set_cc_op(int32_t val)
429
{
430
    tcg_gen_movi_tl(cpu_tmp0, val);
431
    tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, cc_op));
432
}
433

    
434
static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
435
{
436
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
437
    if (shift != 0) 
438
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
439
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
440
#ifdef TARGET_X86_64
441
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
442
#endif
443
}
444

    
445
static inline void gen_op_movl_A0_seg(int reg)
446
{
447
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
448
}
449

    
450
static inline void gen_op_addl_A0_seg(int reg)
451
{
452
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
453
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
454
#ifdef TARGET_X86_64
455
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
456
#endif
457
}
458

    
459
#ifdef TARGET_X86_64
460
static inline void gen_op_movq_A0_seg(int reg)
461
{
462
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
463
}
464

    
465
static inline void gen_op_addq_A0_seg(int reg)
466
{
467
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
468
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
469
}
470

    
471
static inline void gen_op_movq_A0_reg(int reg)
472
{
473
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
474
}
475

    
476
static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
477
{
478
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
479
    if (shift != 0) 
480
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
481
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
482
}
483
#endif
484

    
485
static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
486
    [0] = {
487
        DEF_REGS(gen_op_cmovw_, _T1_T0)
488
    },
489
    [1] = {
490
        DEF_REGS(gen_op_cmovl_, _T1_T0)
491
    },
492
#ifdef TARGET_X86_64
493
    [2] = {
494
        DEF_REGS(gen_op_cmovq_, _T1_T0)
495
    },
496
#endif
497
};
498

    
499
#define DEF_ARITHC(SUFFIX)\
500
    {\
501
        gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
502
        gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
503
    },\
504
    {\
505
        gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
506
        gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
507
    },\
508
    {\
509
        gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
510
        gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
511
    },\
512
    {\
513
        X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
514
        X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
515
    },
516

    
517
static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
518
    DEF_ARITHC( )
519
};
520

    
521
static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
522
    DEF_ARITHC(_raw)
523
#ifndef CONFIG_USER_ONLY
524
    DEF_ARITHC(_kernel)
525
    DEF_ARITHC(_user)
526
#endif
527
};
528

    
529
static const int cc_op_arithb[8] = {
530
    CC_OP_ADDB,
531
    CC_OP_LOGICB,
532
    CC_OP_ADDB,
533
    CC_OP_SUBB,
534
    CC_OP_LOGICB,
535
    CC_OP_SUBB,
536
    CC_OP_LOGICB,
537
    CC_OP_SUBB,
538
};
539

    
540
#define DEF_CMPXCHG(SUFFIX)\
541
    gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
542
    gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
543
    gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
544
    X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
545

    
546
static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
547
    DEF_CMPXCHG( )
548
};
549

    
550
static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
551
    DEF_CMPXCHG(_raw)
552
#ifndef CONFIG_USER_ONLY
553
    DEF_CMPXCHG(_kernel)
554
    DEF_CMPXCHG(_user)
555
#endif
556
};
557

    
558
#define DEF_SHIFT(SUFFIX)\
559
    {\
560
        gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
561
        gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
562
        gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
563
        gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
564
        gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
565
        gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
566
        gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
567
        gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
568
    },\
569
    {\
570
        gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
571
        gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
572
        gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
573
        gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
574
        gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
575
        gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
576
        gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
577
        gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
578
    },\
579
    {\
580
        gen_op_roll ## SUFFIX ## _T0_T1_cc,\
581
        gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
582
        gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
583
        gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
584
        gen_op_shll ## SUFFIX ## _T0_T1_cc,\
585
        gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
586
        gen_op_shll ## SUFFIX ## _T0_T1_cc,\
587
        gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
588
    },\
589
    {\
590
        X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
591
        X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
592
        X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
593
        X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
594
        X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
595
        X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
596
        X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
597
        X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
598
    },
599

    
600
static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
601
    DEF_SHIFT( )
602
};
603

    
604
static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
605
    DEF_SHIFT(_raw)
606
#ifndef CONFIG_USER_ONLY
607
    DEF_SHIFT(_kernel)
608
    DEF_SHIFT(_user)
609
#endif
610
};
611

    
612
#define DEF_SHIFTD(SUFFIX, op)\
613
    {\
614
        NULL,\
615
        NULL,\
616
    },\
617
    {\
618
        gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
619
        gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
620
     },\
621
    {\
622
        gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
623
        gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
624
    },\
625
    {\
626
X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
627
           gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
628
    },
629

    
630
static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
631
    DEF_SHIFTD(, im)
632
};
633

    
634
static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
635
    DEF_SHIFTD(, ECX)
636
};
637

    
638
static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
639
    DEF_SHIFTD(_raw, im)
640
#ifndef CONFIG_USER_ONLY
641
    DEF_SHIFTD(_kernel, im)
642
    DEF_SHIFTD(_user, im)
643
#endif
644
};
645

    
646
static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
647
    DEF_SHIFTD(_raw, ECX)
648
#ifndef CONFIG_USER_ONLY
649
    DEF_SHIFTD(_kernel, ECX)
650
    DEF_SHIFTD(_user, ECX)
651
#endif
652
};
653

    
654
static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
655
    [0] = {
656
        gen_op_btw_T0_T1_cc,
657
        gen_op_btsw_T0_T1_cc,
658
        gen_op_btrw_T0_T1_cc,
659
        gen_op_btcw_T0_T1_cc,
660
    },
661
    [1] = {
662
        gen_op_btl_T0_T1_cc,
663
        gen_op_btsl_T0_T1_cc,
664
        gen_op_btrl_T0_T1_cc,
665
        gen_op_btcl_T0_T1_cc,
666
    },
667
#ifdef TARGET_X86_64
668
    [2] = {
669
        gen_op_btq_T0_T1_cc,
670
        gen_op_btsq_T0_T1_cc,
671
        gen_op_btrq_T0_T1_cc,
672
        gen_op_btcq_T0_T1_cc,
673
    },
674
#endif
675
};
676

    
677
static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
678
    gen_op_add_bitw_A0_T1,
679
    gen_op_add_bitl_A0_T1,
680
    X86_64_ONLY(gen_op_add_bitq_A0_T1),
681
};
682

    
683
static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
684
    [0] = {
685
        gen_op_bsfw_T0_cc,
686
        gen_op_bsrw_T0_cc,
687
    },
688
    [1] = {
689
        gen_op_bsfl_T0_cc,
690
        gen_op_bsrl_T0_cc,
691
    },
692
#ifdef TARGET_X86_64
693
    [2] = {
694
        gen_op_bsfq_T0_cc,
695
        gen_op_bsrq_T0_cc,
696
    },
697
#endif
698
};
699

    
700
static inline void gen_op_lds_T0_A0(int idx)
701
{
702
    int mem_index = (idx >> 2) - 1;
703
    switch(idx & 3) {
704
    case 0:
705
        tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
706
        break;
707
    case 1:
708
        tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
709
        break;
710
    default:
711
    case 2:
712
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
713
        break;
714
    }
715
}
716

    
717
/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
718
static inline void gen_op_ld_T0_A0(int idx)
719
{
720
    int mem_index = (idx >> 2) - 1;
721
    switch(idx & 3) {
722
    case 0:
723
        tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
724
        break;
725
    case 1:
726
        tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
727
        break;
728
    case 2:
729
        tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
730
        break;
731
    default:
732
    case 3:
733
        tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
734
        break;
735
    }
736
}
737

    
738
static inline void gen_op_ldu_T0_A0(int idx)
739
{
740
    gen_op_ld_T0_A0(idx);
741
}
742

    
743
static inline void gen_op_ld_T1_A0(int idx)
744
{
745
    int mem_index = (idx >> 2) - 1;
746
    switch(idx & 3) {
747
    case 0:
748
        tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
749
        break;
750
    case 1:
751
        tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
752
        break;
753
    case 2:
754
        tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
755
        break;
756
    default:
757
    case 3:
758
        tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
759
        break;
760
    }
761
}
762

    
763
static inline void gen_op_st_T0_A0(int idx)
764
{
765
    int mem_index = (idx >> 2) - 1;
766
    switch(idx & 3) {
767
    case 0:
768
        tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
769
        break;
770
    case 1:
771
        tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
772
        break;
773
    case 2:
774
        tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
775
        break;
776
    default:
777
    case 3:
778
        tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
779
        break;
780
    }
781
}
782

    
783
static inline void gen_op_st_T1_A0(int idx)
784
{
785
    int mem_index = (idx >> 2) - 1;
786
    switch(idx & 3) {
787
    case 0:
788
        tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
789
        break;
790
    case 1:
791
        tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
792
        break;
793
    case 2:
794
        tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
795
        break;
796
    default:
797
    case 3:
798
        tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
799
        break;
800
    }
801
}
802

    
803
static inline void gen_jmp_im(target_ulong pc)
804
{
805
    tcg_gen_movi_tl(cpu_tmp0, pc);
806
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
807
}
808

    
809
static inline void gen_string_movl_A0_ESI(DisasContext *s)
810
{
811
    int override;
812

    
813
    override = s->override;
814
#ifdef TARGET_X86_64
815
    if (s->aflag == 2) {
816
        if (override >= 0) {
817
            gen_op_movq_A0_seg(override);
818
            gen_op_addq_A0_reg_sN(0, R_ESI);
819
        } else {
820
            gen_op_movq_A0_reg(R_ESI);
821
        }
822
    } else
823
#endif
824
    if (s->aflag) {
825
        /* 32 bit address */
826
        if (s->addseg && override < 0)
827
            override = R_DS;
828
        if (override >= 0) {
829
            gen_op_movl_A0_seg(override);
830
            gen_op_addl_A0_reg_sN(0, R_ESI);
831
        } else {
832
            gen_op_movl_A0_reg(R_ESI);
833
        }
834
    } else {
835
        /* 16 address, always override */
836
        if (override < 0)
837
            override = R_DS;
838
        gen_op_movl_A0_reg(R_ESI);
839
        gen_op_andl_A0_ffff();
840
        gen_op_addl_A0_seg(override);
841
    }
842
}
843

    
844
static inline void gen_string_movl_A0_EDI(DisasContext *s)
845
{
846
#ifdef TARGET_X86_64
847
    if (s->aflag == 2) {
848
        gen_op_movq_A0_reg(R_EDI);
849
    } else
850
#endif
851
    if (s->aflag) {
852
        if (s->addseg) {
853
            gen_op_movl_A0_seg(R_ES);
854
            gen_op_addl_A0_reg_sN(0, R_EDI);
855
        } else {
856
            gen_op_movl_A0_reg(R_EDI);
857
        }
858
    } else {
859
        gen_op_movl_A0_reg(R_EDI);
860
        gen_op_andl_A0_ffff();
861
        gen_op_addl_A0_seg(R_ES);
862
    }
863
}
864

    
865
static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
866
    gen_op_movl_T0_Dshiftb,
867
    gen_op_movl_T0_Dshiftw,
868
    gen_op_movl_T0_Dshiftl,
869
    X86_64_ONLY(gen_op_movl_T0_Dshiftq),
870
};
871

    
872
static GenOpFunc1 *gen_op_jnz_ecx[3] = {
873
    gen_op_jnz_ecxw,
874
    gen_op_jnz_ecxl,
875
    X86_64_ONLY(gen_op_jnz_ecxq),
876
};
877

    
878
static GenOpFunc1 *gen_op_jz_ecx[3] = {
879
    gen_op_jz_ecxw,
880
    gen_op_jz_ecxl,
881
    X86_64_ONLY(gen_op_jz_ecxq),
882
};
883

    
884
static GenOpFunc *gen_op_dec_ECX[3] = {
885
    gen_op_decw_ECX,
886
    gen_op_decl_ECX,
887
    X86_64_ONLY(gen_op_decq_ECX),
888
};
889

    
890
static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
891
    {
892
        gen_op_jnz_subb,
893
        gen_op_jnz_subw,
894
        gen_op_jnz_subl,
895
        X86_64_ONLY(gen_op_jnz_subq),
896
    },
897
    {
898
        gen_op_jz_subb,
899
        gen_op_jz_subw,
900
        gen_op_jz_subl,
901
        X86_64_ONLY(gen_op_jz_subq),
902
    },
903
};
904

    
905
static GenOpFunc *gen_op_in_DX_T0[3] = {
906
    gen_op_inb_DX_T0,
907
    gen_op_inw_DX_T0,
908
    gen_op_inl_DX_T0,
909
};
910

    
911
static GenOpFunc *gen_op_out_DX_T0[3] = {
912
    gen_op_outb_DX_T0,
913
    gen_op_outw_DX_T0,
914
    gen_op_outl_DX_T0,
915
};
916

    
917
static GenOpFunc *gen_op_in[3] = {
918
    gen_op_inb_T0_T1,
919
    gen_op_inw_T0_T1,
920
    gen_op_inl_T0_T1,
921
};
922

    
923
static GenOpFunc *gen_op_out[3] = {
924
    gen_op_outb_T0_T1,
925
    gen_op_outw_T0_T1,
926
    gen_op_outl_T0_T1,
927
};
928

    
929
static GenOpFunc *gen_check_io_T0[3] = {
930
    gen_op_check_iob_T0,
931
    gen_op_check_iow_T0,
932
    gen_op_check_iol_T0,
933
};
934

    
935
static GenOpFunc *gen_check_io_DX[3] = {
936
    gen_op_check_iob_DX,
937
    gen_op_check_iow_DX,
938
    gen_op_check_iol_DX,
939
};
940

    
941
static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
942
{
943
    if (s->pe && (s->cpl > s->iopl || s->vm86)) {
944
        if (s->cc_op != CC_OP_DYNAMIC)
945
            gen_op_set_cc_op(s->cc_op);
946
        gen_jmp_im(cur_eip);
947
        if (use_dx)
948
            gen_check_io_DX[ot]();
949
        else
950
            gen_check_io_T0[ot]();
951
    }
952
}
953

    
954
static inline void gen_movs(DisasContext *s, int ot)
955
{
956
    gen_string_movl_A0_ESI(s);
957
    gen_op_ld_T0_A0(ot + s->mem_index);
958
    gen_string_movl_A0_EDI(s);
959
    gen_op_st_T0_A0(ot + s->mem_index);
960
    gen_op_movl_T0_Dshift[ot]();
961
#ifdef TARGET_X86_64
962
    if (s->aflag == 2) {
963
        gen_op_addq_ESI_T0();
964
        gen_op_addq_EDI_T0();
965
    } else
966
#endif
967
    if (s->aflag) {
968
        gen_op_addl_ESI_T0();
969
        gen_op_addl_EDI_T0();
970
    } else {
971
        gen_op_addw_ESI_T0();
972
        gen_op_addw_EDI_T0();
973
    }
974
}
975

    
976
static inline void gen_update_cc_op(DisasContext *s)
977
{
978
    if (s->cc_op != CC_OP_DYNAMIC) {
979
        gen_op_set_cc_op(s->cc_op);
980
        s->cc_op = CC_OP_DYNAMIC;
981
    }
982
}
983

    
984
/* XXX: does not work with gdbstub "ice" single step - not a
985
   serious problem */
986
static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
987
{
988
    int l1, l2;
989

    
990
    l1 = gen_new_label();
991
    l2 = gen_new_label();
992
    gen_op_jnz_ecx[s->aflag](l1);
993
    gen_set_label(l2);
994
    gen_jmp_tb(s, next_eip, 1);
995
    gen_set_label(l1);
996
    return l2;
997
}
998

    
999
static inline void gen_stos(DisasContext *s, int ot)
1000
{
1001
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1002
    gen_string_movl_A0_EDI(s);
1003
    gen_op_st_T0_A0(ot + s->mem_index);
1004
    gen_op_movl_T0_Dshift[ot]();
1005
#ifdef TARGET_X86_64
1006
    if (s->aflag == 2) {
1007
        gen_op_addq_EDI_T0();
1008
    } else
1009
#endif
1010
    if (s->aflag) {
1011
        gen_op_addl_EDI_T0();
1012
    } else {
1013
        gen_op_addw_EDI_T0();
1014
    }
1015
}
1016

    
1017
static inline void gen_lods(DisasContext *s, int ot)
1018
{
1019
    gen_string_movl_A0_ESI(s);
1020
    gen_op_ld_T0_A0(ot + s->mem_index);
1021
    gen_op_mov_reg_T0(ot, R_EAX);
1022
    gen_op_movl_T0_Dshift[ot]();
1023
#ifdef TARGET_X86_64
1024
    if (s->aflag == 2) {
1025
        gen_op_addq_ESI_T0();
1026
    } else
1027
#endif
1028
    if (s->aflag) {
1029
        gen_op_addl_ESI_T0();
1030
    } else {
1031
        gen_op_addw_ESI_T0();
1032
    }
1033
}
1034

    
1035
static inline void gen_scas(DisasContext *s, int ot)
1036
{
1037
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1038
    gen_string_movl_A0_EDI(s);
1039
    gen_op_ld_T1_A0(ot + s->mem_index);
1040
    gen_op_cmpl_T0_T1_cc();
1041
    gen_op_movl_T0_Dshift[ot]();
1042
#ifdef TARGET_X86_64
1043
    if (s->aflag == 2) {
1044
        gen_op_addq_EDI_T0();
1045
    } else
1046
#endif
1047
    if (s->aflag) {
1048
        gen_op_addl_EDI_T0();
1049
    } else {
1050
        gen_op_addw_EDI_T0();
1051
    }
1052
}
1053

    
1054
static inline void gen_cmps(DisasContext *s, int ot)
1055
{
1056
    gen_string_movl_A0_ESI(s);
1057
    gen_op_ld_T0_A0(ot + s->mem_index);
1058
    gen_string_movl_A0_EDI(s);
1059
    gen_op_ld_T1_A0(ot + s->mem_index);
1060
    gen_op_cmpl_T0_T1_cc();
1061
    gen_op_movl_T0_Dshift[ot]();
1062
#ifdef TARGET_X86_64
1063
    if (s->aflag == 2) {
1064
        gen_op_addq_ESI_T0();
1065
        gen_op_addq_EDI_T0();
1066
    } else
1067
#endif
1068
    if (s->aflag) {
1069
        gen_op_addl_ESI_T0();
1070
        gen_op_addl_EDI_T0();
1071
    } else {
1072
        gen_op_addw_ESI_T0();
1073
        gen_op_addw_EDI_T0();
1074
    }
1075
}
1076

    
1077
static inline void gen_ins(DisasContext *s, int ot)
1078
{
1079
    gen_string_movl_A0_EDI(s);
1080
    gen_op_movl_T0_0();
1081
    gen_op_st_T0_A0(ot + s->mem_index);
1082
    gen_op_in_DX_T0[ot]();
1083
    gen_op_st_T0_A0(ot + s->mem_index);
1084
    gen_op_movl_T0_Dshift[ot]();
1085
#ifdef TARGET_X86_64
1086
    if (s->aflag == 2) {
1087
        gen_op_addq_EDI_T0();
1088
    } else
1089
#endif
1090
    if (s->aflag) {
1091
        gen_op_addl_EDI_T0();
1092
    } else {
1093
        gen_op_addw_EDI_T0();
1094
    }
1095
}
1096

    
1097
static inline void gen_outs(DisasContext *s, int ot)
1098
{
1099
    gen_string_movl_A0_ESI(s);
1100
    gen_op_ld_T0_A0(ot + s->mem_index);
1101
    gen_op_out_DX_T0[ot]();
1102
    gen_op_movl_T0_Dshift[ot]();
1103
#ifdef TARGET_X86_64
1104
    if (s->aflag == 2) {
1105
        gen_op_addq_ESI_T0();
1106
    } else
1107
#endif
1108
    if (s->aflag) {
1109
        gen_op_addl_ESI_T0();
1110
    } else {
1111
        gen_op_addw_ESI_T0();
1112
    }
1113
}
1114

    
1115
/* same method as Valgrind : we generate jumps to current or next
1116
   instruction */
1117
#define GEN_REPZ(op)                                                          \
1118
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1119
                                 target_ulong cur_eip, target_ulong next_eip) \
1120
{                                                                             \
1121
    int l2;\
1122
    gen_update_cc_op(s);                                                      \
1123
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1124
    gen_ ## op(s, ot);                                                        \
1125
    gen_op_dec_ECX[s->aflag]();                                               \
1126
    /* a loop would cause two single step exceptions if ECX = 1               \
1127
       before rep string_insn */                                              \
1128
    if (!s->jmp_opt)                                                          \
1129
        gen_op_jz_ecx[s->aflag](l2);                                          \
1130
    gen_jmp(s, cur_eip);                                                      \
1131
}
1132

    
1133
#define GEN_REPZ2(op)                                                         \
1134
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1135
                                   target_ulong cur_eip,                      \
1136
                                   target_ulong next_eip,                     \
1137
                                   int nz)                                    \
1138
{                                                                             \
1139
    int l2;\
1140
    gen_update_cc_op(s);                                                      \
1141
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1142
    gen_ ## op(s, ot);                                                        \
1143
    gen_op_dec_ECX[s->aflag]();                                               \
1144
    gen_op_set_cc_op(CC_OP_SUBB + ot);                                        \
1145
    gen_op_string_jnz_sub[nz][ot](l2);\
1146
    if (!s->jmp_opt)                                                          \
1147
        gen_op_jz_ecx[s->aflag](l2);                                          \
1148
    gen_jmp(s, cur_eip);                                                      \
1149
}
1150

    
1151
GEN_REPZ(movs)
1152
GEN_REPZ(stos)
1153
GEN_REPZ(lods)
1154
GEN_REPZ(ins)
1155
GEN_REPZ(outs)
1156
GEN_REPZ2(scas)
1157
GEN_REPZ2(cmps)
1158

    
1159
enum {
1160
    JCC_O,
1161
    JCC_B,
1162
    JCC_Z,
1163
    JCC_BE,
1164
    JCC_S,
1165
    JCC_P,
1166
    JCC_L,
1167
    JCC_LE,
1168
};
1169

    
1170
static GenOpFunc1 *gen_jcc_sub[4][8] = {
1171
    [OT_BYTE] = {
1172
        NULL,
1173
        gen_op_jb_subb,
1174
        gen_op_jz_subb,
1175
        gen_op_jbe_subb,
1176
        gen_op_js_subb,
1177
        NULL,
1178
        gen_op_jl_subb,
1179
        gen_op_jle_subb,
1180
    },
1181
    [OT_WORD] = {
1182
        NULL,
1183
        gen_op_jb_subw,
1184
        gen_op_jz_subw,
1185
        gen_op_jbe_subw,
1186
        gen_op_js_subw,
1187
        NULL,
1188
        gen_op_jl_subw,
1189
        gen_op_jle_subw,
1190
    },
1191
    [OT_LONG] = {
1192
        NULL,
1193
        gen_op_jb_subl,
1194
        gen_op_jz_subl,
1195
        gen_op_jbe_subl,
1196
        gen_op_js_subl,
1197
        NULL,
1198
        gen_op_jl_subl,
1199
        gen_op_jle_subl,
1200
    },
1201
#ifdef TARGET_X86_64
1202
    [OT_QUAD] = {
1203
        NULL,
1204
        BUGGY_64(gen_op_jb_subq),
1205
        gen_op_jz_subq,
1206
        BUGGY_64(gen_op_jbe_subq),
1207
        gen_op_js_subq,
1208
        NULL,
1209
        BUGGY_64(gen_op_jl_subq),
1210
        BUGGY_64(gen_op_jle_subq),
1211
    },
1212
#endif
1213
};
1214
static GenOpFunc1 *gen_op_loop[3][4] = {
1215
    [0] = {
1216
        gen_op_loopnzw,
1217
        gen_op_loopzw,
1218
        gen_op_jnz_ecxw,
1219
    },
1220
    [1] = {
1221
        gen_op_loopnzl,
1222
        gen_op_loopzl,
1223
        gen_op_jnz_ecxl,
1224
    },
1225
#ifdef TARGET_X86_64
1226
    [2] = {
1227
        gen_op_loopnzq,
1228
        gen_op_loopzq,
1229
        gen_op_jnz_ecxq,
1230
    },
1231
#endif
1232
};
1233

    
1234
static GenOpFunc *gen_setcc_slow[8] = {
1235
    gen_op_seto_T0_cc,
1236
    gen_op_setb_T0_cc,
1237
    gen_op_setz_T0_cc,
1238
    gen_op_setbe_T0_cc,
1239
    gen_op_sets_T0_cc,
1240
    gen_op_setp_T0_cc,
1241
    gen_op_setl_T0_cc,
1242
    gen_op_setle_T0_cc,
1243
};
1244

    
1245
static GenOpFunc *gen_setcc_sub[4][8] = {
1246
    [OT_BYTE] = {
1247
        NULL,
1248
        gen_op_setb_T0_subb,
1249
        gen_op_setz_T0_subb,
1250
        gen_op_setbe_T0_subb,
1251
        gen_op_sets_T0_subb,
1252
        NULL,
1253
        gen_op_setl_T0_subb,
1254
        gen_op_setle_T0_subb,
1255
    },
1256
    [OT_WORD] = {
1257
        NULL,
1258
        gen_op_setb_T0_subw,
1259
        gen_op_setz_T0_subw,
1260
        gen_op_setbe_T0_subw,
1261
        gen_op_sets_T0_subw,
1262
        NULL,
1263
        gen_op_setl_T0_subw,
1264
        gen_op_setle_T0_subw,
1265
    },
1266
    [OT_LONG] = {
1267
        NULL,
1268
        gen_op_setb_T0_subl,
1269
        gen_op_setz_T0_subl,
1270
        gen_op_setbe_T0_subl,
1271
        gen_op_sets_T0_subl,
1272
        NULL,
1273
        gen_op_setl_T0_subl,
1274
        gen_op_setle_T0_subl,
1275
    },
1276
#ifdef TARGET_X86_64
1277
    [OT_QUAD] = {
1278
        NULL,
1279
        gen_op_setb_T0_subq,
1280
        gen_op_setz_T0_subq,
1281
        gen_op_setbe_T0_subq,
1282
        gen_op_sets_T0_subq,
1283
        NULL,
1284
        gen_op_setl_T0_subq,
1285
        gen_op_setle_T0_subq,
1286
    },
1287
#endif
1288
};
1289

    
1290
static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1291
    gen_op_fadd_ST0_FT0,
1292
    gen_op_fmul_ST0_FT0,
1293
    gen_op_fcom_ST0_FT0,
1294
    gen_op_fcom_ST0_FT0,
1295
    gen_op_fsub_ST0_FT0,
1296
    gen_op_fsubr_ST0_FT0,
1297
    gen_op_fdiv_ST0_FT0,
1298
    gen_op_fdivr_ST0_FT0,
1299
};
1300

    
1301
/* NOTE the exception in "r" op ordering */
1302
static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1303
    gen_op_fadd_STN_ST0,
1304
    gen_op_fmul_STN_ST0,
1305
    NULL,
1306
    NULL,
1307
    gen_op_fsubr_STN_ST0,
1308
    gen_op_fsub_STN_ST0,
1309
    gen_op_fdivr_STN_ST0,
1310
    gen_op_fdiv_STN_ST0,
1311
};
1312

    
1313
/* if d == OR_TMP0, it means memory operand (address in A0) */
1314
static void gen_op(DisasContext *s1, int op, int ot, int d)
1315
{
1316
    GenOpFunc *gen_update_cc;
1317

    
1318
    if (d != OR_TMP0) {
1319
        gen_op_mov_TN_reg(ot, 0, d);
1320
    } else {
1321
        gen_op_ld_T0_A0(ot + s1->mem_index);
1322
    }
1323
    switch(op) {
1324
    case OP_ADCL:
1325
    case OP_SBBL:
1326
        if (s1->cc_op != CC_OP_DYNAMIC)
1327
            gen_op_set_cc_op(s1->cc_op);
1328
        if (d != OR_TMP0) {
1329
            gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1330
            gen_op_mov_reg_T0(ot, d);
1331
        } else {
1332
            gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1333
        }
1334
        s1->cc_op = CC_OP_DYNAMIC;
1335
        goto the_end;
1336
    case OP_ADDL:
1337
        gen_op_addl_T0_T1();
1338
        s1->cc_op = CC_OP_ADDB + ot;
1339
        gen_update_cc = gen_op_update2_cc;
1340
        break;
1341
    case OP_SUBL:
1342
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1343
        s1->cc_op = CC_OP_SUBB + ot;
1344
        gen_update_cc = gen_op_update2_cc;
1345
        break;
1346
    default:
1347
    case OP_ANDL:
1348
        tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1349
        s1->cc_op = CC_OP_LOGICB + ot;
1350
        gen_update_cc = gen_op_update1_cc;
1351
        break;
1352
    case OP_ORL:
1353
        tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1354
        s1->cc_op = CC_OP_LOGICB + ot;
1355
        gen_update_cc = gen_op_update1_cc;
1356
        break;
1357
    case OP_XORL:
1358
        tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1359
        s1->cc_op = CC_OP_LOGICB + ot;
1360
        gen_update_cc = gen_op_update1_cc;
1361
        break;
1362
    case OP_CMPL:
1363
        gen_op_cmpl_T0_T1_cc();
1364
        s1->cc_op = CC_OP_SUBB + ot;
1365
        gen_update_cc = NULL;
1366
        break;
1367
    }
1368
    if (op != OP_CMPL) {
1369
        if (d != OR_TMP0)
1370
            gen_op_mov_reg_T0(ot, d);
1371
        else
1372
            gen_op_st_T0_A0(ot + s1->mem_index);
1373
    }
1374
    /* the flags update must happen after the memory write (precise
1375
       exception support) */
1376
    if (gen_update_cc)
1377
        gen_update_cc();
1378
 the_end: ;
1379
}
1380

    
1381
/* if d == OR_TMP0, it means memory operand (address in A0) */
1382
static void gen_inc(DisasContext *s1, int ot, int d, int c)
1383
{
1384
    if (d != OR_TMP0)
1385
        gen_op_mov_TN_reg(ot, 0, d);
1386
    else
1387
        gen_op_ld_T0_A0(ot + s1->mem_index);
1388
    if (s1->cc_op != CC_OP_DYNAMIC)
1389
        gen_op_set_cc_op(s1->cc_op);
1390
    if (c > 0) {
1391
        gen_op_incl_T0();
1392
        s1->cc_op = CC_OP_INCB + ot;
1393
    } else {
1394
        gen_op_decl_T0();
1395
        s1->cc_op = CC_OP_DECB + ot;
1396
    }
1397
    if (d != OR_TMP0)
1398
        gen_op_mov_reg_T0(ot, d);
1399
    else
1400
        gen_op_st_T0_A0(ot + s1->mem_index);
1401
    gen_op_update_inc_cc();
1402
}
1403

    
1404
static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1405
{
1406
    if (d != OR_TMP0)
1407
        gen_op_mov_TN_reg(ot, 0, d);
1408
    else
1409
        gen_op_ld_T0_A0(ot + s1->mem_index);
1410
    if (s != OR_TMP1)
1411
        gen_op_mov_TN_reg(ot, 1, s);
1412
    /* for zero counts, flags are not updated, so must do it dynamically */
1413
    if (s1->cc_op != CC_OP_DYNAMIC)
1414
        gen_op_set_cc_op(s1->cc_op);
1415

    
1416
    if (d != OR_TMP0)
1417
        gen_op_shift_T0_T1_cc[ot][op]();
1418
    else
1419
        gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1420
    if (d != OR_TMP0)
1421
        gen_op_mov_reg_T0(ot, d);
1422
    s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1423
}
1424

    
1425
static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1426
{
1427
    /* currently not optimized */
1428
    gen_op_movl_T1_im(c);
1429
    gen_shift(s1, op, ot, d, OR_TMP1);
1430
}
1431

    
1432
static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1433
{
1434
    target_long disp;
1435
    int havesib;
1436
    int base;
1437
    int index;
1438
    int scale;
1439
    int opreg;
1440
    int mod, rm, code, override, must_add_seg;
1441

    
1442
    override = s->override;
1443
    must_add_seg = s->addseg;
1444
    if (override >= 0)
1445
        must_add_seg = 1;
1446
    mod = (modrm >> 6) & 3;
1447
    rm = modrm & 7;
1448

    
1449
    if (s->aflag) {
1450

    
1451
        havesib = 0;
1452
        base = rm;
1453
        index = 0;
1454
        scale = 0;
1455

    
1456
        if (base == 4) {
1457
            havesib = 1;
1458
            code = ldub_code(s->pc++);
1459
            scale = (code >> 6) & 3;
1460
            index = ((code >> 3) & 7) | REX_X(s);
1461
            base = (code & 7);
1462
        }
1463
        base |= REX_B(s);
1464

    
1465
        switch (mod) {
1466
        case 0:
1467
            if ((base & 7) == 5) {
1468
                base = -1;
1469
                disp = (int32_t)ldl_code(s->pc);
1470
                s->pc += 4;
1471
                if (CODE64(s) && !havesib) {
1472
                    disp += s->pc + s->rip_offset;
1473
                }
1474
            } else {
1475
                disp = 0;
1476
            }
1477
            break;
1478
        case 1:
1479
            disp = (int8_t)ldub_code(s->pc++);
1480
            break;
1481
        default:
1482
        case 2:
1483
            disp = ldl_code(s->pc);
1484
            s->pc += 4;
1485
            break;
1486
        }
1487

    
1488
        if (base >= 0) {
1489
            /* for correct popl handling with esp */
1490
            if (base == 4 && s->popl_esp_hack)
1491
                disp += s->popl_esp_hack;
1492
#ifdef TARGET_X86_64
1493
            if (s->aflag == 2) {
1494
                gen_op_movq_A0_reg(base);
1495
                if (disp != 0) {
1496
                    gen_op_addq_A0_im(disp);
1497
                }
1498
            } else
1499
#endif
1500
            {
1501
                gen_op_movl_A0_reg(base);
1502
                if (disp != 0)
1503
                    gen_op_addl_A0_im(disp);
1504
            }
1505
        } else {
1506
#ifdef TARGET_X86_64
1507
            if (s->aflag == 2) {
1508
                gen_op_movq_A0_im(disp);
1509
            } else
1510
#endif
1511
            {
1512
                gen_op_movl_A0_im(disp);
1513
            }
1514
        }
1515
        /* XXX: index == 4 is always invalid */
1516
        if (havesib && (index != 4 || scale != 0)) {
1517
#ifdef TARGET_X86_64
1518
            if (s->aflag == 2) {
1519
                gen_op_addq_A0_reg_sN(scale, index);
1520
            } else
1521
#endif
1522
            {
1523
                gen_op_addl_A0_reg_sN(scale, index);
1524
            }
1525
        }
1526
        if (must_add_seg) {
1527
            if (override < 0) {
1528
                if (base == R_EBP || base == R_ESP)
1529
                    override = R_SS;
1530
                else
1531
                    override = R_DS;
1532
            }
1533
#ifdef TARGET_X86_64
1534
            if (s->aflag == 2) {
1535
                gen_op_addq_A0_seg(override);
1536
            } else
1537
#endif
1538
            {
1539
                gen_op_addl_A0_seg(override);
1540
            }
1541
        }
1542
    } else {
1543
        switch (mod) {
1544
        case 0:
1545
            if (rm == 6) {
1546
                disp = lduw_code(s->pc);
1547
                s->pc += 2;
1548
                gen_op_movl_A0_im(disp);
1549
                rm = 0; /* avoid SS override */
1550
                goto no_rm;
1551
            } else {
1552
                disp = 0;
1553
            }
1554
            break;
1555
        case 1:
1556
            disp = (int8_t)ldub_code(s->pc++);
1557
            break;
1558
        default:
1559
        case 2:
1560
            disp = lduw_code(s->pc);
1561
            s->pc += 2;
1562
            break;
1563
        }
1564
        switch(rm) {
1565
        case 0:
1566
            gen_op_movl_A0_reg(R_EBX);
1567
            gen_op_addl_A0_reg_sN(0, R_ESI);
1568
            break;
1569
        case 1:
1570
            gen_op_movl_A0_reg(R_EBX);
1571
            gen_op_addl_A0_reg_sN(0, R_EDI);
1572
            break;
1573
        case 2:
1574
            gen_op_movl_A0_reg(R_EBP);
1575
            gen_op_addl_A0_reg_sN(0, R_ESI);
1576
            break;
1577
        case 3:
1578
            gen_op_movl_A0_reg(R_EBP);
1579
            gen_op_addl_A0_reg_sN(0, R_EDI);
1580
            break;
1581
        case 4:
1582
            gen_op_movl_A0_reg(R_ESI);
1583
            break;
1584
        case 5:
1585
            gen_op_movl_A0_reg(R_EDI);
1586
            break;
1587
        case 6:
1588
            gen_op_movl_A0_reg(R_EBP);
1589
            break;
1590
        default:
1591
        case 7:
1592
            gen_op_movl_A0_reg(R_EBX);
1593
            break;
1594
        }
1595
        if (disp != 0)
1596
            gen_op_addl_A0_im(disp);
1597
        gen_op_andl_A0_ffff();
1598
    no_rm:
1599
        if (must_add_seg) {
1600
            if (override < 0) {
1601
                if (rm == 2 || rm == 3 || rm == 6)
1602
                    override = R_SS;
1603
                else
1604
                    override = R_DS;
1605
            }
1606
            gen_op_addl_A0_seg(override);
1607
        }
1608
    }
1609

    
1610
    opreg = OR_A0;
1611
    disp = 0;
1612
    *reg_ptr = opreg;
1613
    *offset_ptr = disp;
1614
}
1615

    
1616
static void gen_nop_modrm(DisasContext *s, int modrm)
1617
{
1618
    int mod, rm, base, code;
1619

    
1620
    mod = (modrm >> 6) & 3;
1621
    if (mod == 3)
1622
        return;
1623
    rm = modrm & 7;
1624

    
1625
    if (s->aflag) {
1626

    
1627
        base = rm;
1628

    
1629
        if (base == 4) {
1630
            code = ldub_code(s->pc++);
1631
            base = (code & 7);
1632
        }
1633

    
1634
        switch (mod) {
1635
        case 0:
1636
            if (base == 5) {
1637
                s->pc += 4;
1638
            }
1639
            break;
1640
        case 1:
1641
            s->pc++;
1642
            break;
1643
        default:
1644
        case 2:
1645
            s->pc += 4;
1646
            break;
1647
        }
1648
    } else {
1649
        switch (mod) {
1650
        case 0:
1651
            if (rm == 6) {
1652
                s->pc += 2;
1653
            }
1654
            break;
1655
        case 1:
1656
            s->pc++;
1657
            break;
1658
        default:
1659
        case 2:
1660
            s->pc += 2;
1661
            break;
1662
        }
1663
    }
1664
}
1665

    
1666
/* used for LEA and MOV AX, mem */
1667
static void gen_add_A0_ds_seg(DisasContext *s)
1668
{
1669
    int override, must_add_seg;
1670
    must_add_seg = s->addseg;
1671
    override = R_DS;
1672
    if (s->override >= 0) {
1673
        override = s->override;
1674
        must_add_seg = 1;
1675
    } else {
1676
        override = R_DS;
1677
    }
1678
    if (must_add_seg) {
1679
#ifdef TARGET_X86_64
1680
        if (CODE64(s)) {
1681
            gen_op_addq_A0_seg(override);
1682
        } else
1683
#endif
1684
        {
1685
            gen_op_addl_A0_seg(override);
1686
        }
1687
    }
1688
}
1689

    
1690
/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1691
   OR_TMP0 */
1692
static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1693
{
1694
    int mod, rm, opreg, disp;
1695

    
1696
    mod = (modrm >> 6) & 3;
1697
    rm = (modrm & 7) | REX_B(s);
1698
    if (mod == 3) {
1699
        if (is_store) {
1700
            if (reg != OR_TMP0)
1701
                gen_op_mov_TN_reg(ot, 0, reg);
1702
            gen_op_mov_reg_T0(ot, rm);
1703
        } else {
1704
            gen_op_mov_TN_reg(ot, 0, rm);
1705
            if (reg != OR_TMP0)
1706
                gen_op_mov_reg_T0(ot, reg);
1707
        }
1708
    } else {
1709
        gen_lea_modrm(s, modrm, &opreg, &disp);
1710
        if (is_store) {
1711
            if (reg != OR_TMP0)
1712
                gen_op_mov_TN_reg(ot, 0, reg);
1713
            gen_op_st_T0_A0(ot + s->mem_index);
1714
        } else {
1715
            gen_op_ld_T0_A0(ot + s->mem_index);
1716
            if (reg != OR_TMP0)
1717
                gen_op_mov_reg_T0(ot, reg);
1718
        }
1719
    }
1720
}
1721

    
1722
static inline uint32_t insn_get(DisasContext *s, int ot)
1723
{
1724
    uint32_t ret;
1725

    
1726
    switch(ot) {
1727
    case OT_BYTE:
1728
        ret = ldub_code(s->pc);
1729
        s->pc++;
1730
        break;
1731
    case OT_WORD:
1732
        ret = lduw_code(s->pc);
1733
        s->pc += 2;
1734
        break;
1735
    default:
1736
    case OT_LONG:
1737
        ret = ldl_code(s->pc);
1738
        s->pc += 4;
1739
        break;
1740
    }
1741
    return ret;
1742
}
1743

    
1744
static inline int insn_const_size(unsigned int ot)
1745
{
1746
    if (ot <= OT_LONG)
1747
        return 1 << ot;
1748
    else
1749
        return 4;
1750
}
1751

    
1752
static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1753
{
1754
    TranslationBlock *tb;
1755
    target_ulong pc;
1756

    
1757
    pc = s->cs_base + eip;
1758
    tb = s->tb;
1759
    /* NOTE: we handle the case where the TB spans two pages here */
1760
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1761
        (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK))  {
1762
        /* jump to same page: we can use a direct jump */
1763
        tcg_gen_goto_tb(tb_num);
1764
        gen_jmp_im(eip);
1765
        tcg_gen_exit_tb((long)tb + tb_num);
1766
    } else {
1767
        /* jump to another page: currently not optimized */
1768
        gen_jmp_im(eip);
1769
        gen_eob(s);
1770
    }
1771
}
1772

    
1773
static inline void gen_jcc(DisasContext *s, int b,
1774
                           target_ulong val, target_ulong next_eip)
1775
{
1776
    TranslationBlock *tb;
1777
    int inv, jcc_op;
1778
    GenOpFunc1 *func;
1779
    target_ulong tmp;
1780
    int l1, l2;
1781

    
1782
    inv = b & 1;
1783
    jcc_op = (b >> 1) & 7;
1784

    
1785
    if (s->jmp_opt) {
1786
        switch(s->cc_op) {
1787
            /* we optimize the cmp/jcc case */
1788
        case CC_OP_SUBB:
1789
        case CC_OP_SUBW:
1790
        case CC_OP_SUBL:
1791
        case CC_OP_SUBQ:
1792
            func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1793
            break;
1794

    
1795
            /* some jumps are easy to compute */
1796
        case CC_OP_ADDB:
1797
        case CC_OP_ADDW:
1798
        case CC_OP_ADDL:
1799
        case CC_OP_ADDQ:
1800

    
1801
        case CC_OP_ADCB:
1802
        case CC_OP_ADCW:
1803
        case CC_OP_ADCL:
1804
        case CC_OP_ADCQ:
1805

    
1806
        case CC_OP_SBBB:
1807
        case CC_OP_SBBW:
1808
        case CC_OP_SBBL:
1809
        case CC_OP_SBBQ:
1810

    
1811
        case CC_OP_LOGICB:
1812
        case CC_OP_LOGICW:
1813
        case CC_OP_LOGICL:
1814
        case CC_OP_LOGICQ:
1815

    
1816
        case CC_OP_INCB:
1817
        case CC_OP_INCW:
1818
        case CC_OP_INCL:
1819
        case CC_OP_INCQ:
1820

    
1821
        case CC_OP_DECB:
1822
        case CC_OP_DECW:
1823
        case CC_OP_DECL:
1824
        case CC_OP_DECQ:
1825

    
1826
        case CC_OP_SHLB:
1827
        case CC_OP_SHLW:
1828
        case CC_OP_SHLL:
1829
        case CC_OP_SHLQ:
1830

    
1831
        case CC_OP_SARB:
1832
        case CC_OP_SARW:
1833
        case CC_OP_SARL:
1834
        case CC_OP_SARQ:
1835
            switch(jcc_op) {
1836
            case JCC_Z:
1837
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1838
                break;
1839
            case JCC_S:
1840
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1841
                break;
1842
            default:
1843
                func = NULL;
1844
                break;
1845
            }
1846
            break;
1847
        default:
1848
            func = NULL;
1849
            break;
1850
        }
1851

    
1852
        if (s->cc_op != CC_OP_DYNAMIC) {
1853
            gen_op_set_cc_op(s->cc_op);
1854
            s->cc_op = CC_OP_DYNAMIC;
1855
        }
1856

    
1857
        if (!func) {
1858
            gen_setcc_slow[jcc_op]();
1859
            func = gen_op_jnz_T0_label;
1860
        }
1861

    
1862
        if (inv) {
1863
            tmp = val;
1864
            val = next_eip;
1865
            next_eip = tmp;
1866
        }
1867
        tb = s->tb;
1868

    
1869
        l1 = gen_new_label();
1870
        func(l1);
1871

    
1872
        gen_goto_tb(s, 0, next_eip);
1873

    
1874
        gen_set_label(l1);
1875
        gen_goto_tb(s, 1, val);
1876

    
1877
        s->is_jmp = 3;
1878
    } else {
1879

    
1880
        if (s->cc_op != CC_OP_DYNAMIC) {
1881
            gen_op_set_cc_op(s->cc_op);
1882
            s->cc_op = CC_OP_DYNAMIC;
1883
        }
1884
        gen_setcc_slow[jcc_op]();
1885
        if (inv) {
1886
            tmp = val;
1887
            val = next_eip;
1888
            next_eip = tmp;
1889
        }
1890
        l1 = gen_new_label();
1891
        l2 = gen_new_label();
1892
        gen_op_jnz_T0_label(l1);
1893
        gen_jmp_im(next_eip);
1894
        gen_op_jmp_label(l2);
1895
        gen_set_label(l1);
1896
        gen_jmp_im(val);
1897
        gen_set_label(l2);
1898
        gen_eob(s);
1899
    }
1900
}
1901

    
1902
static void gen_setcc(DisasContext *s, int b)
1903
{
1904
    int inv, jcc_op;
1905
    GenOpFunc *func;
1906

    
1907
    inv = b & 1;
1908
    jcc_op = (b >> 1) & 7;
1909
    switch(s->cc_op) {
1910
        /* we optimize the cmp/jcc case */
1911
    case CC_OP_SUBB:
1912
    case CC_OP_SUBW:
1913
    case CC_OP_SUBL:
1914
    case CC_OP_SUBQ:
1915
        func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1916
        if (!func)
1917
            goto slow_jcc;
1918
        break;
1919

    
1920
        /* some jumps are easy to compute */
1921
    case CC_OP_ADDB:
1922
    case CC_OP_ADDW:
1923
    case CC_OP_ADDL:
1924
    case CC_OP_ADDQ:
1925

    
1926
    case CC_OP_LOGICB:
1927
    case CC_OP_LOGICW:
1928
    case CC_OP_LOGICL:
1929
    case CC_OP_LOGICQ:
1930

    
1931
    case CC_OP_INCB:
1932
    case CC_OP_INCW:
1933
    case CC_OP_INCL:
1934
    case CC_OP_INCQ:
1935

    
1936
    case CC_OP_DECB:
1937
    case CC_OP_DECW:
1938
    case CC_OP_DECL:
1939
    case CC_OP_DECQ:
1940

    
1941
    case CC_OP_SHLB:
1942
    case CC_OP_SHLW:
1943
    case CC_OP_SHLL:
1944
    case CC_OP_SHLQ:
1945
        switch(jcc_op) {
1946
        case JCC_Z:
1947
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1948
            break;
1949
        case JCC_S:
1950
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1951
            break;
1952
        default:
1953
            goto slow_jcc;
1954
        }
1955
        break;
1956
    default:
1957
    slow_jcc:
1958
        if (s->cc_op != CC_OP_DYNAMIC)
1959
            gen_op_set_cc_op(s->cc_op);
1960
        func = gen_setcc_slow[jcc_op];
1961
        break;
1962
    }
1963
    func();
1964
    if (inv) {
1965
        gen_op_xor_T0_1();
1966
    }
1967
}
1968

    
1969
/* move T0 to seg_reg and compute if the CPU state may change. Never
1970
   call this function with seg_reg == R_CS */
1971
static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1972
{
1973
    if (s->pe && !s->vm86) {
1974
        /* XXX: optimize by finding processor state dynamically */
1975
        if (s->cc_op != CC_OP_DYNAMIC)
1976
            gen_op_set_cc_op(s->cc_op);
1977
        gen_jmp_im(cur_eip);
1978
        gen_op_movl_seg_T0(seg_reg);
1979
        /* abort translation because the addseg value may change or
1980
           because ss32 may change. For R_SS, translation must always
1981
           stop as a special handling must be done to disable hardware
1982
           interrupts for the next instruction */
1983
        if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1984
            s->is_jmp = 3;
1985
    } else {
1986
        gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1987
        if (seg_reg == R_SS)
1988
            s->is_jmp = 3;
1989
    }
1990
}
1991

    
1992
#define SVM_movq_T1_im(x) gen_movtl_T1_im(x)
1993

    
1994
static inline int
1995
gen_svm_check_io(DisasContext *s, target_ulong pc_start, uint64_t type)
1996
{
1997
#if !defined(CONFIG_USER_ONLY)
1998
    if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
1999
        if (s->cc_op != CC_OP_DYNAMIC)
2000
            gen_op_set_cc_op(s->cc_op);
2001
        SVM_movq_T1_im(s->pc - s->cs_base);
2002
        gen_jmp_im(pc_start - s->cs_base);
2003
        gen_op_geneflags();
2004
        gen_op_svm_check_intercept_io((uint32_t)(type >> 32), (uint32_t)type);
2005
        s->cc_op = CC_OP_DYNAMIC;
2006
        /* FIXME: maybe we could move the io intercept vector to the TB as well
2007
                  so we know if this is an EOB or not ... let's assume it's not
2008
                  for now. */
2009
    }
2010
#endif
2011
    return 0;
2012
}
2013

    
2014
static inline int svm_is_rep(int prefixes)
2015
{
2016
    return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2017
}
2018

    
2019
static inline int
2020
gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2021
                              uint64_t type, uint64_t param)
2022
{
2023
    if(!(s->flags & (INTERCEPT_SVM_MASK)))
2024
        /* no SVM activated */
2025
        return 0;
2026
    switch(type) {
2027
        /* CRx and DRx reads/writes */
2028
        case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2029
            if (s->cc_op != CC_OP_DYNAMIC) {
2030
                gen_op_set_cc_op(s->cc_op);
2031
                s->cc_op = CC_OP_DYNAMIC;
2032
            }
2033
            gen_jmp_im(pc_start - s->cs_base);
2034
            SVM_movq_T1_im(param);
2035
            gen_op_geneflags();
2036
            gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2037
            /* this is a special case as we do not know if the interception occurs
2038
               so we assume there was none */
2039
            return 0;
2040
        case SVM_EXIT_MSR:
2041
            if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2042
                if (s->cc_op != CC_OP_DYNAMIC) {
2043
                    gen_op_set_cc_op(s->cc_op);
2044
                    s->cc_op = CC_OP_DYNAMIC;
2045
                }
2046
                gen_jmp_im(pc_start - s->cs_base);
2047
                SVM_movq_T1_im(param);
2048
                gen_op_geneflags();
2049
                gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2050
                /* this is a special case as we do not know if the interception occurs
2051
                   so we assume there was none */
2052
                return 0;
2053
            }
2054
            break;
2055
        default:
2056
            if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2057
                if (s->cc_op != CC_OP_DYNAMIC) {
2058
                    gen_op_set_cc_op(s->cc_op);
2059
                    s->cc_op = CC_OP_EFLAGS;
2060
                }
2061
                gen_jmp_im(pc_start - s->cs_base);
2062
                SVM_movq_T1_im(param);
2063
                gen_op_geneflags();
2064
                gen_op_svm_vmexit(type >> 32, type);
2065
                /* we can optimize this one so TBs don't get longer
2066
                   than up to vmexit */
2067
                gen_eob(s);
2068
                return 1;
2069
            }
2070
    }
2071
    return 0;
2072
}
2073

    
2074
static inline int
2075
gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2076
{
2077
    return gen_svm_check_intercept_param(s, pc_start, type, 0);
2078
}
2079

    
2080
static inline void gen_stack_update(DisasContext *s, int addend)
2081
{
2082
#ifdef TARGET_X86_64
2083
    if (CODE64(s)) {
2084
        gen_op_addq_ESP_im(addend);
2085
    } else
2086
#endif
2087
    if (s->ss32) {
2088
        gen_op_addl_ESP_im(addend);
2089
    } else {
2090
        gen_op_addw_ESP_im(addend);
2091
    }
2092
}
2093

    
2094
/* generate a push. It depends on ss32, addseg and dflag */
2095
static void gen_push_T0(DisasContext *s)
2096
{
2097
#ifdef TARGET_X86_64
2098
    if (CODE64(s)) {
2099
        gen_op_movq_A0_reg(R_ESP);
2100
        if (s->dflag) {
2101
            gen_op_addq_A0_im(-8);
2102
            gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2103
        } else {
2104
            gen_op_addq_A0_im(-2);
2105
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2106
        }
2107
        gen_op_mov_reg_A0(2, R_ESP);
2108
    } else
2109
#endif
2110
    {
2111
        gen_op_movl_A0_reg(R_ESP);
2112
        if (!s->dflag)
2113
            gen_op_addl_A0_im(-2);
2114
        else
2115
            gen_op_addl_A0_im(-4);
2116
        if (s->ss32) {
2117
            if (s->addseg) {
2118
                gen_op_movl_T1_A0();
2119
                gen_op_addl_A0_seg(R_SS);
2120
            }
2121
        } else {
2122
            gen_op_andl_A0_ffff();
2123
            gen_op_movl_T1_A0();
2124
            gen_op_addl_A0_seg(R_SS);
2125
        }
2126
        gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2127
        if (s->ss32 && !s->addseg)
2128
            gen_op_mov_reg_A0(1, R_ESP);
2129
        else
2130
            gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2131
    }
2132
}
2133

    
2134
/* generate a push. It depends on ss32, addseg and dflag */
2135
/* slower version for T1, only used for call Ev */
2136
static void gen_push_T1(DisasContext *s)
2137
{
2138
#ifdef TARGET_X86_64
2139
    if (CODE64(s)) {
2140
        gen_op_movq_A0_reg(R_ESP);
2141
        if (s->dflag) {
2142
            gen_op_addq_A0_im(-8);
2143
            gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2144
        } else {
2145
            gen_op_addq_A0_im(-2);
2146
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2147
        }
2148
        gen_op_mov_reg_A0(2, R_ESP);
2149
    } else
2150
#endif
2151
    {
2152
        gen_op_movl_A0_reg(R_ESP);
2153
        if (!s->dflag)
2154
            gen_op_addl_A0_im(-2);
2155
        else
2156
            gen_op_addl_A0_im(-4);
2157
        if (s->ss32) {
2158
            if (s->addseg) {
2159
                gen_op_addl_A0_seg(R_SS);
2160
            }
2161
        } else {
2162
            gen_op_andl_A0_ffff();
2163
            gen_op_addl_A0_seg(R_SS);
2164
        }
2165
        gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2166

    
2167
        if (s->ss32 && !s->addseg)
2168
            gen_op_mov_reg_A0(1, R_ESP);
2169
        else
2170
            gen_stack_update(s, (-2) << s->dflag);
2171
    }
2172
}
2173

    
2174
/* two step pop is necessary for precise exceptions */
2175
static void gen_pop_T0(DisasContext *s)
2176
{
2177
#ifdef TARGET_X86_64
2178
    if (CODE64(s)) {
2179
        gen_op_movq_A0_reg(R_ESP);
2180
        gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2181
    } else
2182
#endif
2183
    {
2184
        gen_op_movl_A0_reg(R_ESP);
2185
        if (s->ss32) {
2186
            if (s->addseg)
2187
                gen_op_addl_A0_seg(R_SS);
2188
        } else {
2189
            gen_op_andl_A0_ffff();
2190
            gen_op_addl_A0_seg(R_SS);
2191
        }
2192
        gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2193
    }
2194
}
2195

    
2196
static void gen_pop_update(DisasContext *s)
2197
{
2198
#ifdef TARGET_X86_64
2199
    if (CODE64(s) && s->dflag) {
2200
        gen_stack_update(s, 8);
2201
    } else
2202
#endif
2203
    {
2204
        gen_stack_update(s, 2 << s->dflag);
2205
    }
2206
}
2207

    
2208
static void gen_stack_A0(DisasContext *s)
2209
{
2210
    gen_op_movl_A0_reg(R_ESP);
2211
    if (!s->ss32)
2212
        gen_op_andl_A0_ffff();
2213
    gen_op_movl_T1_A0();
2214
    if (s->addseg)
2215
        gen_op_addl_A0_seg(R_SS);
2216
}
2217

    
2218
/* NOTE: wrap around in 16 bit not fully handled */
2219
static void gen_pusha(DisasContext *s)
2220
{
2221
    int i;
2222
    gen_op_movl_A0_reg(R_ESP);
2223
    gen_op_addl_A0_im(-16 <<  s->dflag);
2224
    if (!s->ss32)
2225
        gen_op_andl_A0_ffff();
2226
    gen_op_movl_T1_A0();
2227
    if (s->addseg)
2228
        gen_op_addl_A0_seg(R_SS);
2229
    for(i = 0;i < 8; i++) {
2230
        gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2231
        gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2232
        gen_op_addl_A0_im(2 <<  s->dflag);
2233
    }
2234
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2235
}
2236

    
2237
/* NOTE: wrap around in 16 bit not fully handled */
2238
static void gen_popa(DisasContext *s)
2239
{
2240
    int i;
2241
    gen_op_movl_A0_reg(R_ESP);
2242
    if (!s->ss32)
2243
        gen_op_andl_A0_ffff();
2244
    gen_op_movl_T1_A0();
2245
    gen_op_addl_T1_im(16 <<  s->dflag);
2246
    if (s->addseg)
2247
        gen_op_addl_A0_seg(R_SS);
2248
    for(i = 0;i < 8; i++) {
2249
        /* ESP is not reloaded */
2250
        if (i != 3) {
2251
            gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2252
            gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2253
        }
2254
        gen_op_addl_A0_im(2 <<  s->dflag);
2255
    }
2256
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2257
}
2258

    
2259
static void gen_enter(DisasContext *s, int esp_addend, int level)
2260
{
2261
    int ot, opsize;
2262

    
2263
    level &= 0x1f;
2264
#ifdef TARGET_X86_64
2265
    if (CODE64(s)) {
2266
        ot = s->dflag ? OT_QUAD : OT_WORD;
2267
        opsize = 1 << ot;
2268

    
2269
        gen_op_movl_A0_reg(R_ESP);
2270
        gen_op_addq_A0_im(-opsize);
2271
        gen_op_movl_T1_A0();
2272

    
2273
        /* push bp */
2274
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2275
        gen_op_st_T0_A0(ot + s->mem_index);
2276
        if (level) {
2277
            gen_op_enter64_level(level, (ot == OT_QUAD));
2278
        }
2279
        gen_op_mov_reg_T1(ot, R_EBP);
2280
        gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2281
        gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2282
    } else
2283
#endif
2284
    {
2285
        ot = s->dflag + OT_WORD;
2286
        opsize = 2 << s->dflag;
2287

    
2288
        gen_op_movl_A0_reg(R_ESP);
2289
        gen_op_addl_A0_im(-opsize);
2290
        if (!s->ss32)
2291
            gen_op_andl_A0_ffff();
2292
        gen_op_movl_T1_A0();
2293
        if (s->addseg)
2294
            gen_op_addl_A0_seg(R_SS);
2295
        /* push bp */
2296
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2297
        gen_op_st_T0_A0(ot + s->mem_index);
2298
        if (level) {
2299
            gen_op_enter_level(level, s->dflag);
2300
        }
2301
        gen_op_mov_reg_T1(ot, R_EBP);
2302
        gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2303
        gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2304
    }
2305
}
2306

    
2307
static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2308
{
2309
    if (s->cc_op != CC_OP_DYNAMIC)
2310
        gen_op_set_cc_op(s->cc_op);
2311
    gen_jmp_im(cur_eip);
2312
    gen_op_raise_exception(trapno);
2313
    s->is_jmp = 3;
2314
}
2315

    
2316
/* an interrupt is different from an exception because of the
2317
   privilege checks */
2318
static void gen_interrupt(DisasContext *s, int intno,
2319
                          target_ulong cur_eip, target_ulong next_eip)
2320
{
2321
    if (s->cc_op != CC_OP_DYNAMIC)
2322
        gen_op_set_cc_op(s->cc_op);
2323
    gen_jmp_im(cur_eip);
2324
    gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2325
    s->is_jmp = 3;
2326
}
2327

    
2328
static void gen_debug(DisasContext *s, target_ulong cur_eip)
2329
{
2330
    if (s->cc_op != CC_OP_DYNAMIC)
2331
        gen_op_set_cc_op(s->cc_op);
2332
    gen_jmp_im(cur_eip);
2333
    gen_op_debug();
2334
    s->is_jmp = 3;
2335
}
2336

    
2337
/* generate a generic end of block. Trace exception is also generated
2338
   if needed */
2339
static void gen_eob(DisasContext *s)
2340
{
2341
    if (s->cc_op != CC_OP_DYNAMIC)
2342
        gen_op_set_cc_op(s->cc_op);
2343
    if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2344
        gen_op_reset_inhibit_irq();
2345
    }
2346
    if (s->singlestep_enabled) {
2347
        gen_op_debug();
2348
    } else if (s->tf) {
2349
        gen_op_single_step();
2350
    } else {
2351
        tcg_gen_exit_tb(0);
2352
    }
2353
    s->is_jmp = 3;
2354
}
2355

    
2356
/* generate a jump to eip. No segment change must happen before as a
2357
   direct call to the next block may occur */
2358
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2359
{
2360
    if (s->jmp_opt) {
2361
        if (s->cc_op != CC_OP_DYNAMIC) {
2362
            gen_op_set_cc_op(s->cc_op);
2363
            s->cc_op = CC_OP_DYNAMIC;
2364
        }
2365
        gen_goto_tb(s, tb_num, eip);
2366
        s->is_jmp = 3;
2367
    } else {
2368
        gen_jmp_im(eip);
2369
        gen_eob(s);
2370
    }
2371
}
2372

    
2373
static void gen_jmp(DisasContext *s, target_ulong eip)
2374
{
2375
    gen_jmp_tb(s, eip, 0);
2376
}
2377

    
2378
static GenOpFunc1 *gen_ldq_env_A0[3] = {
2379
    gen_op_ldq_raw_env_A0,
2380
#ifndef CONFIG_USER_ONLY
2381
    gen_op_ldq_kernel_env_A0,
2382
    gen_op_ldq_user_env_A0,
2383
#endif
2384
};
2385

    
2386
static GenOpFunc1 *gen_stq_env_A0[3] = {
2387
    gen_op_stq_raw_env_A0,
2388
#ifndef CONFIG_USER_ONLY
2389
    gen_op_stq_kernel_env_A0,
2390
    gen_op_stq_user_env_A0,
2391
#endif
2392
};
2393

    
2394
static GenOpFunc1 *gen_ldo_env_A0[3] = {
2395
    gen_op_ldo_raw_env_A0,
2396
#ifndef CONFIG_USER_ONLY
2397
    gen_op_ldo_kernel_env_A0,
2398
    gen_op_ldo_user_env_A0,
2399
#endif
2400
};
2401

    
2402
static GenOpFunc1 *gen_sto_env_A0[3] = {
2403
    gen_op_sto_raw_env_A0,
2404
#ifndef CONFIG_USER_ONLY
2405
    gen_op_sto_kernel_env_A0,
2406
    gen_op_sto_user_env_A0,
2407
#endif
2408
};
2409

    
2410
#define SSE_SPECIAL ((GenOpFunc2 *)1)
2411

    
2412
#define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2413
#define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2414
                     gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2415

    
2416
static GenOpFunc2 *sse_op_table1[256][4] = {
2417
    /* pure SSE operations */
2418
    [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2419
    [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2420
    [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2421
    [0x13] = { SSE_SPECIAL, SSE_SPECIAL },  /* movlps, movlpd */
2422
    [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2423
    [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2424
    [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd, movshdup */
2425
    [0x17] = { SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd */
2426

    
2427
    [0x28] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2428
    [0x29] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2429
    [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2430
    [0x2b] = { SSE_SPECIAL, SSE_SPECIAL },  /* movntps, movntpd */
2431
    [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2432
    [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2433
    [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2434
    [0x2f] = { gen_op_comiss, gen_op_comisd },
2435
    [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2436
    [0x51] = SSE_FOP(sqrt),
2437
    [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2438
    [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2439
    [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2440
    [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2441
    [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2442
    [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2443
    [0x58] = SSE_FOP(add),
2444
    [0x59] = SSE_FOP(mul),
2445
    [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps,
2446
               gen_op_cvtss2sd, gen_op_cvtsd2ss },
2447
    [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2448
    [0x5c] = SSE_FOP(sub),
2449
    [0x5d] = SSE_FOP(min),
2450
    [0x5e] = SSE_FOP(div),
2451
    [0x5f] = SSE_FOP(max),
2452

    
2453
    [0xc2] = SSE_FOP(cmpeq),
2454
    [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2455

    
2456
    /* MMX ops and their SSE extensions */
2457
    [0x60] = MMX_OP2(punpcklbw),
2458
    [0x61] = MMX_OP2(punpcklwd),
2459
    [0x62] = MMX_OP2(punpckldq),
2460
    [0x63] = MMX_OP2(packsswb),
2461
    [0x64] = MMX_OP2(pcmpgtb),
2462
    [0x65] = MMX_OP2(pcmpgtw),
2463
    [0x66] = MMX_OP2(pcmpgtl),
2464
    [0x67] = MMX_OP2(packuswb),
2465
    [0x68] = MMX_OP2(punpckhbw),
2466
    [0x69] = MMX_OP2(punpckhwd),
2467
    [0x6a] = MMX_OP2(punpckhdq),
2468
    [0x6b] = MMX_OP2(packssdw),
2469
    [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2470
    [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2471
    [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2472
    [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2473
    [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx,
2474
               (GenOpFunc2 *)gen_op_pshufd_xmm,
2475
               (GenOpFunc2 *)gen_op_pshufhw_xmm,
2476
               (GenOpFunc2 *)gen_op_pshuflw_xmm },
2477
    [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2478
    [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2479
    [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2480
    [0x74] = MMX_OP2(pcmpeqb),
2481
    [0x75] = MMX_OP2(pcmpeqw),
2482
    [0x76] = MMX_OP2(pcmpeql),
2483
    [0x77] = { SSE_SPECIAL }, /* emms */
2484
    [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2485
    [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2486
    [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2487
    [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2488
    [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2489
    [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2490
    [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2491
    [0xd1] = MMX_OP2(psrlw),
2492
    [0xd2] = MMX_OP2(psrld),
2493
    [0xd3] = MMX_OP2(psrlq),
2494
    [0xd4] = MMX_OP2(paddq),
2495
    [0xd5] = MMX_OP2(pmullw),
2496
    [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2497
    [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2498
    [0xd8] = MMX_OP2(psubusb),
2499
    [0xd9] = MMX_OP2(psubusw),
2500
    [0xda] = MMX_OP2(pminub),
2501
    [0xdb] = MMX_OP2(pand),
2502
    [0xdc] = MMX_OP2(paddusb),
2503
    [0xdd] = MMX_OP2(paddusw),
2504
    [0xde] = MMX_OP2(pmaxub),
2505
    [0xdf] = MMX_OP2(pandn),
2506
    [0xe0] = MMX_OP2(pavgb),
2507
    [0xe1] = MMX_OP2(psraw),
2508
    [0xe2] = MMX_OP2(psrad),
2509
    [0xe3] = MMX_OP2(pavgw),
2510
    [0xe4] = MMX_OP2(pmulhuw),
2511
    [0xe5] = MMX_OP2(pmulhw),
2512
    [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2513
    [0xe7] = { SSE_SPECIAL , SSE_SPECIAL },  /* movntq, movntq */
2514
    [0xe8] = MMX_OP2(psubsb),
2515
    [0xe9] = MMX_OP2(psubsw),
2516
    [0xea] = MMX_OP2(pminsw),
2517
    [0xeb] = MMX_OP2(por),
2518
    [0xec] = MMX_OP2(paddsb),
2519
    [0xed] = MMX_OP2(paddsw),
2520
    [0xee] = MMX_OP2(pmaxsw),
2521
    [0xef] = MMX_OP2(pxor),
2522
    [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2523
    [0xf1] = MMX_OP2(psllw),
2524
    [0xf2] = MMX_OP2(pslld),
2525
    [0xf3] = MMX_OP2(psllq),
2526
    [0xf4] = MMX_OP2(pmuludq),
2527
    [0xf5] = MMX_OP2(pmaddwd),
2528
    [0xf6] = MMX_OP2(psadbw),
2529
    [0xf7] = MMX_OP2(maskmov),
2530
    [0xf8] = MMX_OP2(psubb),
2531
    [0xf9] = MMX_OP2(psubw),
2532
    [0xfa] = MMX_OP2(psubl),
2533
    [0xfb] = MMX_OP2(psubq),
2534
    [0xfc] = MMX_OP2(paddb),
2535
    [0xfd] = MMX_OP2(paddw),
2536
    [0xfe] = MMX_OP2(paddl),
2537
};
2538

    
2539
static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2540
    [0 + 2] = MMX_OP2(psrlw),
2541
    [0 + 4] = MMX_OP2(psraw),
2542
    [0 + 6] = MMX_OP2(psllw),
2543
    [8 + 2] = MMX_OP2(psrld),
2544
    [8 + 4] = MMX_OP2(psrad),
2545
    [8 + 6] = MMX_OP2(pslld),
2546
    [16 + 2] = MMX_OP2(psrlq),
2547
    [16 + 3] = { NULL, gen_op_psrldq_xmm },
2548
    [16 + 6] = MMX_OP2(psllq),
2549
    [16 + 7] = { NULL, gen_op_pslldq_xmm },
2550
};
2551

    
2552
static GenOpFunc1 *sse_op_table3[4 * 3] = {
2553
    gen_op_cvtsi2ss,
2554
    gen_op_cvtsi2sd,
2555
    X86_64_ONLY(gen_op_cvtsq2ss),
2556
    X86_64_ONLY(gen_op_cvtsq2sd),
2557

    
2558
    gen_op_cvttss2si,
2559
    gen_op_cvttsd2si,
2560
    X86_64_ONLY(gen_op_cvttss2sq),
2561
    X86_64_ONLY(gen_op_cvttsd2sq),
2562

    
2563
    gen_op_cvtss2si,
2564
    gen_op_cvtsd2si,
2565
    X86_64_ONLY(gen_op_cvtss2sq),
2566
    X86_64_ONLY(gen_op_cvtsd2sq),
2567
};
2568

    
2569
static GenOpFunc2 *sse_op_table4[8][4] = {
2570
    SSE_FOP(cmpeq),
2571
    SSE_FOP(cmplt),
2572
    SSE_FOP(cmple),
2573
    SSE_FOP(cmpunord),
2574
    SSE_FOP(cmpneq),
2575
    SSE_FOP(cmpnlt),
2576
    SSE_FOP(cmpnle),
2577
    SSE_FOP(cmpord),
2578
};
2579

    
2580
static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2581
{
2582
    int b1, op1_offset, op2_offset, is_xmm, val, ot;
2583
    int modrm, mod, rm, reg, reg_addr, offset_addr;
2584
    GenOpFunc2 *sse_op2;
2585
    GenOpFunc3 *sse_op3;
2586

    
2587
    b &= 0xff;
2588
    if (s->prefix & PREFIX_DATA)
2589
        b1 = 1;
2590
    else if (s->prefix & PREFIX_REPZ)
2591
        b1 = 2;
2592
    else if (s->prefix & PREFIX_REPNZ)
2593
        b1 = 3;
2594
    else
2595
        b1 = 0;
2596
    sse_op2 = sse_op_table1[b][b1];
2597
    if (!sse_op2)
2598
        goto illegal_op;
2599
    if (b <= 0x5f || b == 0xc6 || b == 0xc2) {
2600
        is_xmm = 1;
2601
    } else {
2602
        if (b1 == 0) {
2603
            /* MMX case */
2604
            is_xmm = 0;
2605
        } else {
2606
            is_xmm = 1;
2607
        }
2608
    }
2609
    /* simple MMX/SSE operation */
2610
    if (s->flags & HF_TS_MASK) {
2611
        gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2612
        return;
2613
    }
2614
    if (s->flags & HF_EM_MASK) {
2615
    illegal_op:
2616
        gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2617
        return;
2618
    }
2619
    if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2620
        goto illegal_op;
2621
    if (b == 0x77) {
2622
        /* emms */
2623
        gen_op_emms();
2624
        return;
2625
    }
2626
    /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2627
       the static cpu state) */
2628
    if (!is_xmm) {
2629
        gen_op_enter_mmx();
2630
    }
2631

    
2632
    modrm = ldub_code(s->pc++);
2633
    reg = ((modrm >> 3) & 7);
2634
    if (is_xmm)
2635
        reg |= rex_r;
2636
    mod = (modrm >> 6) & 3;
2637
    if (sse_op2 == SSE_SPECIAL) {
2638
        b |= (b1 << 8);
2639
        switch(b) {
2640
        case 0x0e7: /* movntq */
2641
            if (mod == 3)
2642
                goto illegal_op;
2643
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2644
            gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2645
            break;
2646
        case 0x1e7: /* movntdq */
2647
        case 0x02b: /* movntps */
2648
        case 0x12b: /* movntps */
2649
        case 0x3f0: /* lddqu */
2650
            if (mod == 3)
2651
                goto illegal_op;
2652
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2653
            gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2654
            break;
2655
        case 0x6e: /* movd mm, ea */
2656
#ifdef TARGET_X86_64
2657
            if (s->dflag == 2) {
2658
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2659
                gen_op_movq_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2660
            } else
2661
#endif
2662
            {
2663
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2664
                gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2665
            }
2666
            break;
2667
        case 0x16e: /* movd xmm, ea */
2668
#ifdef TARGET_X86_64
2669
            if (s->dflag == 2) {
2670
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2671
                gen_op_movq_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2672
            } else
2673
#endif
2674
            {
2675
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2676
                gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2677
            }
2678
            break;
2679
        case 0x6f: /* movq mm, ea */
2680
            if (mod != 3) {
2681
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2682
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2683
            } else {
2684
                rm = (modrm & 7);
2685
                gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2686
                            offsetof(CPUX86State,fpregs[rm].mmx));
2687
            }
2688
            break;
2689
        case 0x010: /* movups */
2690
        case 0x110: /* movupd */
2691
        case 0x028: /* movaps */
2692
        case 0x128: /* movapd */
2693
        case 0x16f: /* movdqa xmm, ea */
2694
        case 0x26f: /* movdqu xmm, ea */
2695
            if (mod != 3) {
2696
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2697
                gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2698
            } else {
2699
                rm = (modrm & 7) | REX_B(s);
2700
                gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2701
                            offsetof(CPUX86State,xmm_regs[rm]));
2702
            }
2703
            break;
2704
        case 0x210: /* movss xmm, ea */
2705
            if (mod != 3) {
2706
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2707
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
2708
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2709
                gen_op_movl_T0_0();
2710
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2711
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2712
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2713
            } else {
2714
                rm = (modrm & 7) | REX_B(s);
2715
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2716
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2717
            }
2718
            break;
2719
        case 0x310: /* movsd xmm, ea */
2720
            if (mod != 3) {
2721
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2722
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2723
                gen_op_movl_T0_0();
2724
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2725
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2726
            } else {
2727
                rm = (modrm & 7) | REX_B(s);
2728
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2729
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2730
            }
2731
            break;
2732
        case 0x012: /* movlps */
2733
        case 0x112: /* movlpd */
2734
            if (mod != 3) {
2735
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2736
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2737
            } else {
2738
                /* movhlps */
2739
                rm = (modrm & 7) | REX_B(s);
2740
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2741
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2742
            }
2743
            break;
2744
        case 0x212: /* movsldup */
2745
            if (mod != 3) {
2746
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2747
                gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2748
            } else {
2749
                rm = (modrm & 7) | REX_B(s);
2750
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2751
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2752
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2753
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2754
            }
2755
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2756
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2757
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2758
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2759
            break;
2760
        case 0x312: /* movddup */
2761
            if (mod != 3) {
2762
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2763
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2764
            } else {
2765
                rm = (modrm & 7) | REX_B(s);
2766
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2767
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2768
            }
2769
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2770
                        offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2771
            break;
2772
        case 0x016: /* movhps */
2773
        case 0x116: /* movhpd */
2774
            if (mod != 3) {
2775
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2776
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2777
            } else {
2778
                /* movlhps */
2779
                rm = (modrm & 7) | REX_B(s);
2780
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2781
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2782
            }
2783
            break;
2784
        case 0x216: /* movshdup */
2785
            if (mod != 3) {
2786
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2787
                gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2788
            } else {
2789
                rm = (modrm & 7) | REX_B(s);
2790
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2791
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2792
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2793
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2794
            }
2795
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2796
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2797
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2798
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2799
            break;
2800
        case 0x7e: /* movd ea, mm */
2801
#ifdef TARGET_X86_64
2802
            if (s->dflag == 2) {
2803
                gen_op_movq_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2804
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2805
            } else
2806
#endif
2807
            {
2808
                gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2809
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2810
            }
2811
            break;
2812
        case 0x17e: /* movd ea, xmm */
2813
#ifdef TARGET_X86_64
2814
            if (s->dflag == 2) {
2815
                gen_op_movq_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2816
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2817
            } else
2818
#endif
2819
            {
2820
                gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2821
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2822
            }
2823
            break;
2824
        case 0x27e: /* movq xmm, ea */
2825
            if (mod != 3) {
2826
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2827
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2828
            } else {
2829
                rm = (modrm & 7) | REX_B(s);
2830
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2831
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2832
            }
2833
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2834
            break;
2835
        case 0x7f: /* movq ea, mm */
2836
            if (mod != 3) {
2837
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2838
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2839
            } else {
2840
                rm = (modrm & 7);
2841
                gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2842
                            offsetof(CPUX86State,fpregs[reg].mmx));
2843
            }
2844
            break;
2845
        case 0x011: /* movups */
2846
        case 0x111: /* movupd */
2847
        case 0x029: /* movaps */
2848
        case 0x129: /* movapd */
2849
        case 0x17f: /* movdqa ea, xmm */
2850
        case 0x27f: /* movdqu ea, xmm */
2851
            if (mod != 3) {
2852
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2853
                gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2854
            } else {
2855
                rm = (modrm & 7) | REX_B(s);
2856
                gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2857
                            offsetof(CPUX86State,xmm_regs[reg]));
2858
            }
2859
            break;
2860
        case 0x211: /* movss ea, xmm */
2861
            if (mod != 3) {
2862
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2863
                gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2864
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
2865
            } else {
2866
                rm = (modrm & 7) | REX_B(s);
2867
                gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2868
                            offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2869
            }
2870
            break;
2871
        case 0x311: /* movsd ea, xmm */
2872
            if (mod != 3) {
2873
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2874
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2875
            } else {
2876
                rm = (modrm & 7) | REX_B(s);
2877
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2878
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2879
            }
2880
            break;
2881
        case 0x013: /* movlps */
2882
        case 0x113: /* movlpd */
2883
            if (mod != 3) {
2884
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2885
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2886
            } else {
2887
                goto illegal_op;
2888
            }
2889
            break;
2890
        case 0x017: /* movhps */
2891
        case 0x117: /* movhpd */
2892
            if (mod != 3) {
2893
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2894
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2895
            } else {
2896
                goto illegal_op;
2897
            }
2898
            break;
2899
        case 0x71: /* shift mm, im */
2900
        case 0x72:
2901
        case 0x73:
2902
        case 0x171: /* shift xmm, im */
2903
        case 0x172:
2904
        case 0x173:
2905
            val = ldub_code(s->pc++);
2906
            if (is_xmm) {
2907
                gen_op_movl_T0_im(val);
2908
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2909
                gen_op_movl_T0_0();
2910
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2911
                op1_offset = offsetof(CPUX86State,xmm_t0);
2912
            } else {
2913
                gen_op_movl_T0_im(val);
2914
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2915
                gen_op_movl_T0_0();
2916
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2917
                op1_offset = offsetof(CPUX86State,mmx_t0);
2918
            }
2919
            sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2920
            if (!sse_op2)
2921
                goto illegal_op;
2922
            if (is_xmm) {
2923
                rm = (modrm & 7) | REX_B(s);
2924
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2925
            } else {
2926
                rm = (modrm & 7);
2927
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2928
            }
2929
            sse_op2(op2_offset, op1_offset);
2930
            break;
2931
        case 0x050: /* movmskps */
2932
            rm = (modrm & 7) | REX_B(s);
2933
            gen_op_movmskps(offsetof(CPUX86State,xmm_regs[rm]));
2934
            gen_op_mov_reg_T0(OT_LONG, reg);
2935
            break;
2936
        case 0x150: /* movmskpd */
2937
            rm = (modrm & 7) | REX_B(s);
2938
            gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[rm]));
2939
            gen_op_mov_reg_T0(OT_LONG, reg);
2940
            break;
2941
        case 0x02a: /* cvtpi2ps */
2942
        case 0x12a: /* cvtpi2pd */
2943
            gen_op_enter_mmx();
2944
            if (mod != 3) {
2945
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2946
                op2_offset = offsetof(CPUX86State,mmx_t0);
2947
                gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2948
            } else {
2949
                rm = (modrm & 7);
2950
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2951
            }
2952
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2953
            switch(b >> 8) {
2954
            case 0x0:
2955
                gen_op_cvtpi2ps(op1_offset, op2_offset);
2956
                break;
2957
            default:
2958
            case 0x1:
2959
                gen_op_cvtpi2pd(op1_offset, op2_offset);
2960
                break;
2961
            }
2962
            break;
2963
        case 0x22a: /* cvtsi2ss */
2964
        case 0x32a: /* cvtsi2sd */
2965
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2966
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2967
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2968
            sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
2969
            break;
2970
        case 0x02c: /* cvttps2pi */
2971
        case 0x12c: /* cvttpd2pi */
2972
        case 0x02d: /* cvtps2pi */
2973
        case 0x12d: /* cvtpd2pi */
2974
            gen_op_enter_mmx();
2975
            if (mod != 3) {
2976
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2977
                op2_offset = offsetof(CPUX86State,xmm_t0);
2978
                gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
2979
            } else {
2980
                rm = (modrm & 7) | REX_B(s);
2981
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2982
            }
2983
            op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
2984
            switch(b) {
2985
            case 0x02c:
2986
                gen_op_cvttps2pi(op1_offset, op2_offset);
2987
                break;
2988
            case 0x12c:
2989
                gen_op_cvttpd2pi(op1_offset, op2_offset);
2990
                break;
2991
            case 0x02d:
2992
                gen_op_cvtps2pi(op1_offset, op2_offset);
2993
                break;
2994
            case 0x12d:
2995
                gen_op_cvtpd2pi(op1_offset, op2_offset);
2996
                break;
2997
            }
2998
            break;
2999
        case 0x22c: /* cvttss2si */
3000
        case 0x32c: /* cvttsd2si */
3001
        case 0x22d: /* cvtss2si */
3002
        case 0x32d: /* cvtsd2si */
3003
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3004
            if (mod != 3) {
3005
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3006
                if ((b >> 8) & 1) {
3007
                    gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3008
                } else {
3009
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3010
                    gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3011
                }
3012
                op2_offset = offsetof(CPUX86State,xmm_t0);
3013
            } else {
3014
                rm = (modrm & 7) | REX_B(s);
3015
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3016
            }
3017
            sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3018
                          (b & 1) * 4](op2_offset);
3019
            gen_op_mov_reg_T0(ot, reg);
3020
            break;
3021
        case 0xc4: /* pinsrw */
3022
        case 0x1c4:
3023
            s->rip_offset = 1;
3024
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3025
            val = ldub_code(s->pc++);
3026
            if (b1) {
3027
                val &= 7;
3028
                gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
3029
            } else {
3030
                val &= 3;
3031
                gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
3032
            }
3033
            break;
3034
        case 0xc5: /* pextrw */
3035
        case 0x1c5:
3036
            if (mod != 3)
3037
                goto illegal_op;
3038
            val = ldub_code(s->pc++);
3039
            if (b1) {
3040
                val &= 7;
3041
                rm = (modrm & 7) | REX_B(s);
3042
                gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
3043
            } else {
3044
                val &= 3;
3045
                rm = (modrm & 7);
3046
                gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
3047
            }
3048
            reg = ((modrm >> 3) & 7) | rex_r;
3049
            gen_op_mov_reg_T0(OT_LONG, reg);
3050
            break;
3051
        case 0x1d6: /* movq ea, xmm */
3052
            if (mod != 3) {
3053
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3054
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3055
            } else {
3056
                rm = (modrm & 7) | REX_B(s);
3057
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3058
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3059
                gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3060
            }
3061
            break;
3062
        case 0x2d6: /* movq2dq */
3063
            gen_op_enter_mmx();
3064
            rm = (modrm & 7);
3065
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3066
                        offsetof(CPUX86State,fpregs[rm].mmx));
3067
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3068
            break;
3069
        case 0x3d6: /* movdq2q */
3070
            gen_op_enter_mmx();
3071
            rm = (modrm & 7) | REX_B(s);
3072
            gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3073
                        offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3074
            break;
3075
        case 0xd7: /* pmovmskb */
3076
        case 0x1d7:
3077
            if (mod != 3)
3078
                goto illegal_op;
3079
            if (b1) {
3080
                rm = (modrm & 7) | REX_B(s);
3081
                gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
3082
            } else {
3083
                rm = (modrm & 7);
3084
                gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
3085
            }
3086
            reg = ((modrm >> 3) & 7) | rex_r;
3087
            gen_op_mov_reg_T0(OT_LONG, reg);
3088
            break;
3089
        default:
3090
            goto illegal_op;
3091
        }
3092
    } else {
3093
        /* generic MMX or SSE operation */
3094
        switch(b) {
3095
        case 0xf7:
3096
            /* maskmov : we must prepare A0 */
3097
            if (mod != 3)
3098
                goto illegal_op;
3099
#ifdef TARGET_X86_64
3100
            if (s->aflag == 2) {
3101
                gen_op_movq_A0_reg(R_EDI);
3102
            } else
3103
#endif
3104
            {
3105
                gen_op_movl_A0_reg(R_EDI);
3106
                if (s->aflag == 0)
3107
                    gen_op_andl_A0_ffff();
3108
            }
3109
            gen_add_A0_ds_seg(s);
3110
            break;
3111
        case 0x70: /* pshufx insn */
3112
        case 0xc6: /* pshufx insn */
3113
        case 0xc2: /* compare insns */
3114
            s->rip_offset = 1;
3115
            break;
3116
        default:
3117
            break;
3118
        }
3119
        if (is_xmm) {
3120
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3121
            if (mod != 3) {
3122
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3123
                op2_offset = offsetof(CPUX86State,xmm_t0);
3124
                if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3125
                                b == 0xc2)) {
3126
                    /* specific case for SSE single instructions */
3127
                    if (b1 == 2) {
3128
                        /* 32 bit access */
3129
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3130
                        gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3131
                    } else {
3132
                        /* 64 bit access */
3133
                        gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3134
                    }
3135
                } else {
3136
                    gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3137
                }
3138
            } else {
3139
                rm = (modrm & 7) | REX_B(s);
3140
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3141
            }
3142
        } else {
3143
            op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3144
            if (mod != 3) {
3145
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3146
                op2_offset = offsetof(CPUX86State,mmx_t0);
3147
                gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
3148
            } else {
3149
                rm = (modrm & 7);
3150
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3151
            }
3152
        }
3153
        switch(b) {
3154
        case 0x70: /* pshufx insn */
3155
        case 0xc6: /* pshufx insn */
3156
            val = ldub_code(s->pc++);
3157
            sse_op3 = (GenOpFunc3 *)sse_op2;
3158
            sse_op3(op1_offset, op2_offset, val);
3159
            break;
3160
        case 0xc2:
3161
            /* compare insns */
3162
            val = ldub_code(s->pc++);
3163
            if (val >= 8)
3164
                goto illegal_op;
3165
            sse_op2 = sse_op_table4[val][b1];
3166
            sse_op2(op1_offset, op2_offset);
3167
            break;
3168
        default:
3169
            sse_op2(op1_offset, op2_offset);
3170
            break;
3171
        }
3172
        if (b == 0x2e || b == 0x2f) {
3173
            s->cc_op = CC_OP_EFLAGS;
3174
        }
3175
    }
3176
}
3177

    
3178

    
3179
/* convert one instruction. s->is_jmp is set if the translation must
3180
   be stopped. Return the next pc value */
3181
static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3182
{
3183
    int b, prefixes, aflag, dflag;
3184
    int shift, ot;
3185
    int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3186
    target_ulong next_eip, tval;
3187
    int rex_w, rex_r;
3188

    
3189
    s->pc = pc_start;
3190
    prefixes = 0;
3191
    aflag = s->code32;
3192
    dflag = s->code32;
3193
    s->override = -1;
3194
    rex_w = -1;
3195
    rex_r = 0;
3196
#ifdef TARGET_X86_64
3197
    s->rex_x = 0;
3198
    s->rex_b = 0;
3199
    x86_64_hregs = 0;
3200
#endif
3201
    s->rip_offset = 0; /* for relative ip address */
3202
 next_byte:
3203
    b = ldub_code(s->pc);
3204
    s->pc++;
3205
    /* check prefixes */
3206
#ifdef TARGET_X86_64
3207
    if (CODE64(s)) {
3208
        switch (b) {
3209
        case 0xf3:
3210
            prefixes |= PREFIX_REPZ;
3211
            goto next_byte;
3212
        case 0xf2:
3213
            prefixes |= PREFIX_REPNZ;
3214
            goto next_byte;
3215
        case 0xf0:
3216
            prefixes |= PREFIX_LOCK;
3217
            goto next_byte;
3218
        case 0x2e:
3219
            s->override = R_CS;
3220
            goto next_byte;
3221
        case 0x36:
3222
            s->override = R_SS;
3223
            goto next_byte;
3224
        case 0x3e:
3225
            s->override = R_DS;
3226
            goto next_byte;
3227
        case 0x26:
3228
            s->override = R_ES;
3229
            goto next_byte;
3230
        case 0x64:
3231
            s->override = R_FS;
3232
            goto next_byte;
3233
        case 0x65:
3234
            s->override = R_GS;
3235
            goto next_byte;
3236
        case 0x66:
3237
            prefixes |= PREFIX_DATA;
3238
            goto next_byte;
3239
        case 0x67:
3240
            prefixes |= PREFIX_ADR;
3241
            goto next_byte;
3242
        case 0x40 ... 0x4f:
3243
            /* REX prefix */
3244
            rex_w = (b >> 3) & 1;
3245
            rex_r = (b & 0x4) << 1;
3246
            s->rex_x = (b & 0x2) << 2;
3247
            REX_B(s) = (b & 0x1) << 3;
3248
            x86_64_hregs = 1; /* select uniform byte register addressing */
3249
            goto next_byte;
3250
        }
3251
        if (rex_w == 1) {
3252
            /* 0x66 is ignored if rex.w is set */
3253
            dflag = 2;
3254
        } else {
3255
            if (prefixes & PREFIX_DATA)
3256
                dflag ^= 1;
3257
        }
3258
        if (!(prefixes & PREFIX_ADR))
3259
            aflag = 2;
3260
    } else
3261
#endif
3262
    {
3263
        switch (b) {
3264
        case 0xf3:
3265
            prefixes |= PREFIX_REPZ;
3266
            goto next_byte;
3267
        case 0xf2:
3268
            prefixes |= PREFIX_REPNZ;
3269
            goto next_byte;
3270
        case 0xf0:
3271
            prefixes |= PREFIX_LOCK;
3272
            goto next_byte;
3273
        case 0x2e:
3274
            s->override = R_CS;
3275
            goto next_byte;
3276
        case 0x36:
3277
            s->override = R_SS;
3278
            goto next_byte;
3279
        case 0x3e:
3280
            s->override = R_DS;
3281
            goto next_byte;
3282
        case 0x26:
3283
            s->override = R_ES;
3284
            goto next_byte;
3285
        case 0x64:
3286
            s->override = R_FS;
3287
            goto next_byte;
3288
        case 0x65:
3289
            s->override = R_GS;
3290
            goto next_byte;
3291
        case 0x66:
3292
            prefixes |= PREFIX_DATA;
3293
            goto next_byte;
3294
        case 0x67:
3295
            prefixes |= PREFIX_ADR;
3296
            goto next_byte;
3297
        }
3298
        if (prefixes & PREFIX_DATA)
3299
            dflag ^= 1;
3300
        if (prefixes & PREFIX_ADR)
3301
            aflag ^= 1;
3302
    }
3303

    
3304
    s->prefix = prefixes;
3305
    s->aflag = aflag;
3306
    s->dflag = dflag;
3307

    
3308
    /* lock generation */
3309
    if (prefixes & PREFIX_LOCK)
3310
        gen_op_lock();
3311

    
3312
    /* now check op code */
3313
 reswitch:
3314
    switch(b) {
3315
    case 0x0f:
3316
        /**************************/
3317
        /* extended op code */
3318
        b = ldub_code(s->pc++) | 0x100;
3319
        goto reswitch;
3320

    
3321
        /**************************/
3322
        /* arith & logic */
3323
    case 0x00 ... 0x05:
3324
    case 0x08 ... 0x0d:
3325
    case 0x10 ... 0x15:
3326
    case 0x18 ... 0x1d:
3327
    case 0x20 ... 0x25:
3328
    case 0x28 ... 0x2d:
3329
    case 0x30 ... 0x35:
3330
    case 0x38 ... 0x3d:
3331
        {
3332
            int op, f, val;
3333
            op = (b >> 3) & 7;
3334
            f = (b >> 1) & 3;
3335

    
3336
            if ((b & 1) == 0)
3337
                ot = OT_BYTE;
3338
            else
3339
                ot = dflag + OT_WORD;
3340

    
3341
            switch(f) {
3342
            case 0: /* OP Ev, Gv */
3343
                modrm = ldub_code(s->pc++);
3344
                reg = ((modrm >> 3) & 7) | rex_r;
3345
                mod = (modrm >> 6) & 3;
3346
                rm = (modrm & 7) | REX_B(s);
3347
                if (mod != 3) {
3348
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3349
                    opreg = OR_TMP0;
3350
                } else if (op == OP_XORL && rm == reg) {
3351
                xor_zero:
3352
                    /* xor reg, reg optimisation */
3353
                    gen_op_movl_T0_0();
3354
                    s->cc_op = CC_OP_LOGICB + ot;
3355
                    gen_op_mov_reg_T0(ot, reg);
3356
                    gen_op_update1_cc();
3357
                    break;
3358
                } else {
3359
                    opreg = rm;
3360
                }
3361
                gen_op_mov_TN_reg(ot, 1, reg);
3362
                gen_op(s, op, ot, opreg);
3363
                break;
3364
            case 1: /* OP Gv, Ev */
3365
                modrm = ldub_code(s->pc++);
3366
                mod = (modrm >> 6) & 3;
3367
                reg = ((modrm >> 3) & 7) | rex_r;
3368
                rm = (modrm & 7) | REX_B(s);
3369
                if (mod != 3) {
3370
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3371
                    gen_op_ld_T1_A0(ot + s->mem_index);
3372
                } else if (op == OP_XORL && rm == reg) {
3373
                    goto xor_zero;
3374
                } else {
3375
                    gen_op_mov_TN_reg(ot, 1, rm);
3376
                }
3377
                gen_op(s, op, ot, reg);
3378
                break;
3379
            case 2: /* OP A, Iv */
3380
                val = insn_get(s, ot);
3381
                gen_op_movl_T1_im(val);
3382
                gen_op(s, op, ot, OR_EAX);
3383
                break;
3384
            }
3385
        }
3386
        break;
3387

    
3388
    case 0x80: /* GRP1 */
3389
    case 0x81:
3390
    case 0x82:
3391
    case 0x83:
3392
        {
3393
            int val;
3394

    
3395
            if ((b & 1) == 0)
3396
                ot = OT_BYTE;
3397
            else
3398
                ot = dflag + OT_WORD;
3399

    
3400
            modrm = ldub_code(s->pc++);
3401
            mod = (modrm >> 6) & 3;
3402
            rm = (modrm & 7) | REX_B(s);
3403
            op = (modrm >> 3) & 7;
3404

    
3405
            if (mod != 3) {
3406
                if (b == 0x83)
3407
                    s->rip_offset = 1;
3408
                else
3409
                    s->rip_offset = insn_const_size(ot);
3410
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3411
                opreg = OR_TMP0;
3412
            } else {
3413
                opreg = rm;
3414
            }
3415

    
3416
            switch(b) {
3417
            default:
3418
            case 0x80:
3419
            case 0x81:
3420
            case 0x82:
3421
                val = insn_get(s, ot);
3422
                break;
3423
            case 0x83:
3424
                val = (int8_t)insn_get(s, OT_BYTE);
3425
                break;
3426
            }
3427
            gen_op_movl_T1_im(val);
3428
            gen_op(s, op, ot, opreg);
3429
        }
3430
        break;
3431

    
3432
        /**************************/
3433
        /* inc, dec, and other misc arith */
3434
    case 0x40 ... 0x47: /* inc Gv */
3435
        ot = dflag ? OT_LONG : OT_WORD;
3436
        gen_inc(s, ot, OR_EAX + (b & 7), 1);
3437
        break;
3438
    case 0x48 ... 0x4f: /* dec Gv */
3439
        ot = dflag ? OT_LONG : OT_WORD;
3440
        gen_inc(s, ot, OR_EAX + (b & 7), -1);
3441
        break;
3442
    case 0xf6: /* GRP3 */
3443
    case 0xf7:
3444
        if ((b & 1) == 0)
3445
            ot = OT_BYTE;
3446
        else
3447
            ot = dflag + OT_WORD;
3448

    
3449
        modrm = ldub_code(s->pc++);
3450
        mod = (modrm >> 6) & 3;
3451
        rm = (modrm & 7) | REX_B(s);
3452
        op = (modrm >> 3) & 7;
3453
        if (mod != 3) {
3454
            if (op == 0)
3455
                s->rip_offset = insn_const_size(ot);
3456
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3457
            gen_op_ld_T0_A0(ot + s->mem_index);
3458
        } else {
3459
            gen_op_mov_TN_reg(ot, 0, rm);
3460
        }
3461

    
3462
        switch(op) {
3463
        case 0: /* test */
3464
            val = insn_get(s, ot);
3465
            gen_op_movl_T1_im(val);
3466
            gen_op_testl_T0_T1_cc();
3467
            s->cc_op = CC_OP_LOGICB + ot;
3468
            break;
3469
        case 2: /* not */
3470
            gen_op_notl_T0();
3471
            if (mod != 3) {
3472
                gen_op_st_T0_A0(ot + s->mem_index);
3473
            } else {
3474
                gen_op_mov_reg_T0(ot, rm);
3475
            }
3476
            break;
3477
        case 3: /* neg */
3478
            gen_op_negl_T0();
3479
            if (mod != 3) {
3480
                gen_op_st_T0_A0(ot + s->mem_index);
3481
            } else {
3482
                gen_op_mov_reg_T0(ot, rm);
3483
            }
3484
            gen_op_update_neg_cc();
3485
            s->cc_op = CC_OP_SUBB + ot;
3486
            break;
3487
        case 4: /* mul */
3488
            switch(ot) {
3489
            case OT_BYTE:
3490
                gen_op_mulb_AL_T0();
3491
                s->cc_op = CC_OP_MULB;
3492
                break;
3493
            case OT_WORD:
3494
                gen_op_mulw_AX_T0();
3495
                s->cc_op = CC_OP_MULW;
3496
                break;
3497
            default:
3498
            case OT_LONG:
3499
                gen_op_mull_EAX_T0();
3500
                s->cc_op = CC_OP_MULL;
3501
                break;
3502
#ifdef TARGET_X86_64
3503
            case OT_QUAD:
3504
                gen_op_mulq_EAX_T0();
3505
                s->cc_op = CC_OP_MULQ;
3506
                break;
3507
#endif
3508
            }
3509
            break;
3510
        case 5: /* imul */
3511
            switch(ot) {
3512
            case OT_BYTE:
3513
                gen_op_imulb_AL_T0();
3514
                s->cc_op = CC_OP_MULB;
3515
                break;
3516
            case OT_WORD:
3517
                gen_op_imulw_AX_T0();
3518
                s->cc_op = CC_OP_MULW;
3519
                break;
3520
            default:
3521
            case OT_LONG:
3522
                gen_op_imull_EAX_T0();
3523
                s->cc_op = CC_OP_MULL;
3524
                break;
3525
#ifdef TARGET_X86_64
3526
            case OT_QUAD:
3527
                gen_op_imulq_EAX_T0();
3528
                s->cc_op = CC_OP_MULQ;
3529
                break;
3530
#endif
3531
            }
3532
            break;
3533
        case 6: /* div */
3534
            switch(ot) {
3535
            case OT_BYTE:
3536
                gen_jmp_im(pc_start - s->cs_base);
3537
                gen_op_divb_AL_T0();
3538
                break;
3539
            case OT_WORD:
3540
                gen_jmp_im(pc_start - s->cs_base);
3541
                gen_op_divw_AX_T0();
3542
                break;
3543
            default:
3544
            case OT_LONG:
3545
                gen_jmp_im(pc_start - s->cs_base);
3546
#ifdef MACRO_TEST
3547
                /* XXX: this is just a test */
3548
                tcg_gen_macro_2(cpu_T[0], cpu_T[0], MACRO_TEST);
3549
#else
3550
                tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
3551
#endif
3552
                break;
3553
#ifdef TARGET_X86_64
3554
            case OT_QUAD:
3555
                gen_jmp_im(pc_start - s->cs_base);
3556
                gen_op_divq_EAX_T0();
3557
                break;
3558
#endif
3559
            }
3560
            break;
3561
        case 7: /* idiv */
3562
            switch(ot) {
3563
            case OT_BYTE:
3564
                gen_jmp_im(pc_start - s->cs_base);
3565
                gen_op_idivb_AL_T0();
3566
                break;
3567
            case OT_WORD:
3568
                gen_jmp_im(pc_start - s->cs_base);
3569
                gen_op_idivw_AX_T0();
3570
                break;
3571
            default:
3572
            case OT_LONG:
3573
                gen_jmp_im(pc_start - s->cs_base);
3574
                tcg_gen_helper_0_1(helper_idivl_EAX_T0, cpu_T[0]);
3575
                break;
3576
#ifdef TARGET_X86_64
3577
            case OT_QUAD:
3578
                gen_jmp_im(pc_start - s->cs_base);
3579
                gen_op_idivq_EAX_T0();
3580
                break;
3581
#endif
3582
            }
3583
            break;
3584
        default:
3585
            goto illegal_op;
3586
        }
3587
        break;
3588

    
3589
    case 0xfe: /* GRP4 */
3590
    case 0xff: /* GRP5 */
3591
        if ((b & 1) == 0)
3592
            ot = OT_BYTE;
3593
        else
3594
            ot = dflag + OT_WORD;
3595

    
3596
        modrm = ldub_code(s->pc++);
3597
        mod = (modrm >> 6) & 3;
3598
        rm = (modrm & 7) | REX_B(s);
3599
        op = (modrm >> 3) & 7;
3600
        if (op >= 2 && b == 0xfe) {
3601
            goto illegal_op;
3602
        }
3603
        if (CODE64(s)) {
3604
            if (op == 2 || op == 4) {
3605
                /* operand size for jumps is 64 bit */
3606
                ot = OT_QUAD;
3607
            } else if (op == 3 || op == 5) {
3608
                /* for call calls, the operand is 16 or 32 bit, even
3609
                   in long mode */
3610
                ot = dflag ? OT_LONG : OT_WORD;
3611
            } else if (op == 6) {
3612
                /* default push size is 64 bit */
3613
                ot = dflag ? OT_QUAD : OT_WORD;
3614
            }
3615
        }
3616
        if (mod != 3) {
3617
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3618
            if (op >= 2 && op != 3 && op != 5)
3619
                gen_op_ld_T0_A0(ot + s->mem_index);
3620
        } else {
3621
            gen_op_mov_TN_reg(ot, 0, rm);
3622
        }
3623

    
3624
        switch(op) {
3625
        case 0: /* inc Ev */
3626
            if (mod != 3)
3627
                opreg = OR_TMP0;
3628
            else
3629
                opreg = rm;
3630
            gen_inc(s, ot, opreg, 1);
3631
            break;
3632
        case 1: /* dec Ev */
3633
            if (mod != 3)
3634
                opreg = OR_TMP0;
3635
            else
3636
                opreg = rm;
3637
            gen_inc(s, ot, opreg, -1);
3638
            break;
3639
        case 2: /* call Ev */
3640
            /* XXX: optimize if memory (no 'and' is necessary) */
3641
            if (s->dflag == 0)
3642
                gen_op_andl_T0_ffff();
3643
            next_eip = s->pc - s->cs_base;
3644
            gen_movtl_T1_im(next_eip);
3645
            gen_push_T1(s);
3646
            gen_op_jmp_T0();
3647
            gen_eob(s);
3648
            break;
3649
        case 3: /* lcall Ev */
3650
            gen_op_ld_T1_A0(ot + s->mem_index);
3651
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3652
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3653
        do_lcall:
3654
            if (s->pe && !s->vm86) {
3655
                if (s->cc_op != CC_OP_DYNAMIC)
3656
                    gen_op_set_cc_op(s->cc_op);
3657
                gen_jmp_im(pc_start - s->cs_base);
3658
                gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3659
            } else {
3660
                gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3661
            }
3662
            gen_eob(s);
3663
            break;
3664
        case 4: /* jmp Ev */
3665
            if (s->dflag == 0)
3666
                gen_op_andl_T0_ffff();
3667
            gen_op_jmp_T0();
3668
            gen_eob(s);
3669
            break;
3670
        case 5: /* ljmp Ev */
3671
            gen_op_ld_T1_A0(ot + s->mem_index);
3672
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3673
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3674
        do_ljmp:
3675
            if (s->pe && !s->vm86) {
3676
                if (s->cc_op != CC_OP_DYNAMIC)
3677
                    gen_op_set_cc_op(s->cc_op);
3678
                gen_jmp_im(pc_start - s->cs_base);
3679
                gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3680
            } else {
3681
                gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3682
                gen_op_movl_T0_T1();
3683
                gen_op_jmp_T0();
3684
            }
3685
            gen_eob(s);
3686
            break;
3687
        case 6: /* push Ev */
3688
            gen_push_T0(s);
3689
            break;
3690
        default:
3691
            goto illegal_op;
3692
        }
3693
        break;
3694

    
3695
    case 0x84: /* test Ev, Gv */
3696
    case 0x85:
3697
        if ((b & 1) == 0)
3698
            ot = OT_BYTE;
3699
        else
3700
            ot = dflag + OT_WORD;
3701

    
3702
        modrm = ldub_code(s->pc++);
3703
        mod = (modrm >> 6) & 3;
3704
        rm = (modrm & 7) | REX_B(s);
3705
        reg = ((modrm >> 3) & 7) | rex_r;
3706

    
3707
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3708
        gen_op_mov_TN_reg(ot, 1, reg);
3709
        gen_op_testl_T0_T1_cc();
3710
        s->cc_op = CC_OP_LOGICB + ot;
3711
        break;
3712

    
3713
    case 0xa8: /* test eAX, Iv */
3714
    case 0xa9:
3715
        if ((b & 1) == 0)
3716
            ot = OT_BYTE;
3717
        else
3718
            ot = dflag + OT_WORD;
3719
        val = insn_get(s, ot);
3720

    
3721
        gen_op_mov_TN_reg(ot, 0, OR_EAX);
3722
        gen_op_movl_T1_im(val);
3723
        gen_op_testl_T0_T1_cc();
3724
        s->cc_op = CC_OP_LOGICB + ot;
3725
        break;
3726

    
3727
    case 0x98: /* CWDE/CBW */
3728
#ifdef TARGET_X86_64
3729
        if (dflag == 2) {
3730
            gen_op_movslq_RAX_EAX();
3731
        } else
3732
#endif
3733
        if (dflag == 1)
3734
            gen_op_movswl_EAX_AX();
3735
        else
3736
            gen_op_movsbw_AX_AL();
3737
        break;
3738
    case 0x99: /* CDQ/CWD */
3739
#ifdef TARGET_X86_64
3740
        if (dflag == 2) {
3741
            gen_op_movsqo_RDX_RAX();
3742
        } else
3743
#endif
3744
        if (dflag == 1)
3745
            gen_op_movslq_EDX_EAX();
3746
        else
3747
            gen_op_movswl_DX_AX();
3748
        break;
3749
    case 0x1af: /* imul Gv, Ev */
3750
    case 0x69: /* imul Gv, Ev, I */
3751
    case 0x6b:
3752
        ot = dflag + OT_WORD;
3753
        modrm = ldub_code(s->pc++);
3754
        reg = ((modrm >> 3) & 7) | rex_r;
3755
        if (b == 0x69)
3756
            s->rip_offset = insn_const_size(ot);
3757
        else if (b == 0x6b)
3758
            s->rip_offset = 1;
3759
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3760
        if (b == 0x69) {
3761
            val = insn_get(s, ot);
3762
            gen_op_movl_T1_im(val);
3763
        } else if (b == 0x6b) {
3764
            val = (int8_t)insn_get(s, OT_BYTE);
3765
            gen_op_movl_T1_im(val);
3766
        } else {
3767
            gen_op_mov_TN_reg(ot, 1, reg);
3768
        }
3769

    
3770
#ifdef TARGET_X86_64
3771
        if (ot == OT_QUAD) {
3772
            gen_op_imulq_T0_T1();
3773
        } else
3774
#endif
3775
        if (ot == OT_LONG) {
3776
            gen_op_imull_T0_T1();
3777
        } else {
3778
            gen_op_imulw_T0_T1();
3779
        }
3780
        gen_op_mov_reg_T0(ot, reg);
3781
        s->cc_op = CC_OP_MULB + ot;
3782
        break;
3783
    case 0x1c0:
3784
    case 0x1c1: /* xadd Ev, Gv */
3785
        if ((b & 1) == 0)
3786
            ot = OT_BYTE;
3787
        else
3788
            ot = dflag + OT_WORD;
3789
        modrm = ldub_code(s->pc++);
3790
        reg = ((modrm >> 3) & 7) | rex_r;
3791
        mod = (modrm >> 6) & 3;
3792
        if (mod == 3) {
3793
            rm = (modrm & 7) | REX_B(s);
3794
            gen_op_mov_TN_reg(ot, 0, reg);
3795
            gen_op_mov_TN_reg(ot, 1, rm);
3796
            gen_op_addl_T0_T1();
3797
            gen_op_mov_reg_T1(ot, reg);
3798
            gen_op_mov_reg_T0(ot, rm);
3799
        } else {
3800
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3801
            gen_op_mov_TN_reg(ot, 0, reg);
3802
            gen_op_ld_T1_A0(ot + s->mem_index);
3803
            gen_op_addl_T0_T1();
3804
            gen_op_st_T0_A0(ot + s->mem_index);
3805
            gen_op_mov_reg_T1(ot, reg);
3806
        }
3807
        gen_op_update2_cc();
3808
        s->cc_op = CC_OP_ADDB + ot;
3809
        break;
3810
    case 0x1b0:
3811
    case 0x1b1: /* cmpxchg Ev, Gv */
3812
        if ((b & 1) == 0)
3813
            ot = OT_BYTE;
3814
        else
3815
            ot = dflag + OT_WORD;
3816
        modrm = ldub_code(s->pc++);
3817
        reg = ((modrm >> 3) & 7) | rex_r;
3818
        mod = (modrm >> 6) & 3;
3819
        gen_op_mov_TN_reg(ot, 1, reg);
3820
        if (mod == 3) {
3821
            rm = (modrm & 7) | REX_B(s);
3822
            gen_op_mov_TN_reg(ot, 0, rm);
3823
            gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3824
            gen_op_mov_reg_T0(ot, rm);
3825
        } else {
3826
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3827
            gen_op_ld_T0_A0(ot + s->mem_index);
3828
            gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3829
        }
3830
        s->cc_op = CC_OP_SUBB + ot;
3831
        break;
3832
    case 0x1c7: /* cmpxchg8b */
3833
        modrm = ldub_code(s->pc++);
3834
        mod = (modrm >> 6) & 3;
3835
        if ((mod == 3) || ((modrm & 0x38) != 0x8))
3836
            goto illegal_op;
3837
        gen_jmp_im(pc_start - s->cs_base);
3838
        if (s->cc_op != CC_OP_DYNAMIC)
3839
            gen_op_set_cc_op(s->cc_op);
3840
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3841
        gen_op_cmpxchg8b();
3842
        s->cc_op = CC_OP_EFLAGS;
3843
        break;
3844

    
3845
        /**************************/
3846
        /* push/pop */
3847
    case 0x50 ... 0x57: /* push */
3848
        gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
3849
        gen_push_T0(s);
3850
        break;
3851
    case 0x58 ... 0x5f: /* pop */
3852
        if (CODE64(s)) {
3853
            ot = dflag ? OT_QUAD : OT_WORD;
3854
        } else {
3855
            ot = dflag + OT_WORD;
3856
        }
3857
        gen_pop_T0(s);
3858
        /* NOTE: order is important for pop %sp */
3859
        gen_pop_update(s);
3860
        gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
3861
        break;
3862
    case 0x60: /* pusha */
3863
        if (CODE64(s))
3864
            goto illegal_op;
3865
        gen_pusha(s);
3866
        break;
3867
    case 0x61: /* popa */
3868
        if (CODE64(s))
3869
            goto illegal_op;
3870
        gen_popa(s);
3871
        break;
3872
    case 0x68: /* push Iv */
3873
    case 0x6a:
3874
        if (CODE64(s)) {
3875
            ot = dflag ? OT_QUAD : OT_WORD;
3876
        } else {
3877
            ot = dflag + OT_WORD;
3878
        }
3879
        if (b == 0x68)
3880
            val = insn_get(s, ot);
3881
        else
3882
            val = (int8_t)insn_get(s, OT_BYTE);
3883
        gen_op_movl_T0_im(val);
3884
        gen_push_T0(s);
3885
        break;
3886
    case 0x8f: /* pop Ev */
3887
        if (CODE64(s)) {
3888
            ot = dflag ? OT_QUAD : OT_WORD;
3889
        } else {
3890
            ot = dflag + OT_WORD;
3891
        }
3892
        modrm = ldub_code(s->pc++);
3893
        mod = (modrm >> 6) & 3;
3894
        gen_pop_T0(s);
3895
        if (mod == 3) {
3896
            /* NOTE: order is important for pop %sp */
3897
            gen_pop_update(s);
3898
            rm = (modrm & 7) | REX_B(s);
3899
            gen_op_mov_reg_T0(ot, rm);
3900
        } else {
3901
            /* NOTE: order is important too for MMU exceptions */
3902
            s->popl_esp_hack = 1 << ot;
3903
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3904
            s->popl_esp_hack = 0;
3905
            gen_pop_update(s);
3906
        }
3907
        break;
3908
    case 0xc8: /* enter */
3909
        {
3910
            int level;
3911
            val = lduw_code(s->pc);
3912
            s->pc += 2;
3913
            level = ldub_code(s->pc++);
3914
            gen_enter(s, val, level);
3915
        }
3916
        break;
3917
    case 0xc9: /* leave */
3918
        /* XXX: exception not precise (ESP is updated before potential exception) */
3919
        if (CODE64(s)) {
3920
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
3921
            gen_op_mov_reg_T0(OT_QUAD, R_ESP);
3922
        } else if (s->ss32) {
3923
            gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3924
            gen_op_mov_reg_T0(OT_LONG, R_ESP);
3925
        } else {
3926
            gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
3927
            gen_op_mov_reg_T0(OT_WORD, R_ESP);
3928
        }
3929
        gen_pop_T0(s);
3930
        if (CODE64(s)) {
3931
            ot = dflag ? OT_QUAD : OT_WORD;
3932
        } else {
3933
            ot = dflag + OT_WORD;
3934
        }
3935
        gen_op_mov_reg_T0(ot, R_EBP);
3936
        gen_pop_update(s);
3937
        break;
3938
    case 0x06: /* push es */
3939
    case 0x0e: /* push cs */
3940
    case 0x16: /* push ss */
3941
    case 0x1e: /* push ds */
3942
        if (CODE64(s))
3943
            goto illegal_op;
3944
        gen_op_movl_T0_seg(b >> 3);
3945
        gen_push_T0(s);
3946
        break;
3947
    case 0x1a0: /* push fs */
3948
    case 0x1a8: /* push gs */
3949
        gen_op_movl_T0_seg((b >> 3) & 7);
3950
        gen_push_T0(s);
3951
        break;
3952
    case 0x07: /* pop es */
3953
    case 0x17: /* pop ss */
3954
    case 0x1f: /* pop ds */
3955
        if (CODE64(s))
3956
            goto illegal_op;
3957
        reg = b >> 3;
3958
        gen_pop_T0(s);
3959
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3960
        gen_pop_update(s);
3961
        if (reg == R_SS) {
3962
            /* if reg == SS, inhibit interrupts/trace. */
3963
            /* If several instructions disable interrupts, only the
3964
               _first_ does it */
3965
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3966
                gen_op_set_inhibit_irq();
3967
            s->tf = 0;
3968
        }
3969
        if (s->is_jmp) {
3970
            gen_jmp_im(s->pc - s->cs_base);
3971
            gen_eob(s);
3972
        }
3973
        break;
3974
    case 0x1a1: /* pop fs */
3975
    case 0x1a9: /* pop gs */
3976
        gen_pop_T0(s);
3977
        gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
3978
        gen_pop_update(s);
3979
        if (s->is_jmp) {
3980
            gen_jmp_im(s->pc - s->cs_base);
3981
            gen_eob(s);
3982
        }
3983
        break;
3984

    
3985
        /**************************/
3986
        /* mov */
3987
    case 0x88:
3988
    case 0x89: /* mov Gv, Ev */
3989
        if ((b & 1) == 0)
3990
            ot = OT_BYTE;
3991
        else
3992
            ot = dflag + OT_WORD;
3993
        modrm = ldub_code(s->pc++);
3994
        reg = ((modrm >> 3) & 7) | rex_r;
3995

    
3996
        /* generate a generic store */
3997
        gen_ldst_modrm(s, modrm, ot, reg, 1);
3998
        break;
3999
    case 0xc6:
4000
    case 0xc7: /* mov Ev, Iv */
4001
        if ((b & 1) == 0)
4002
            ot = OT_BYTE;
4003
        else
4004
            ot = dflag + OT_WORD;
4005
        modrm = ldub_code(s->pc++);
4006
        mod = (modrm >> 6) & 3;
4007
        if (mod != 3) {
4008
            s->rip_offset = insn_const_size(ot);
4009
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4010
        }
4011
        val = insn_get(s, ot);
4012
        gen_op_movl_T0_im(val);
4013
        if (mod != 3)
4014
            gen_op_st_T0_A0(ot + s->mem_index);
4015
        else
4016
            gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4017
        break;
4018
    case 0x8a:
4019
    case 0x8b: /* mov Ev, Gv */
4020
        if ((b & 1) == 0)
4021
            ot = OT_BYTE;
4022
        else
4023
            ot = OT_WORD + dflag;
4024
        modrm = ldub_code(s->pc++);
4025
        reg = ((modrm >> 3) & 7) | rex_r;
4026

    
4027
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4028
        gen_op_mov_reg_T0(ot, reg);
4029
        break;
4030
    case 0x8e: /* mov seg, Gv */
4031
        modrm = ldub_code(s->pc++);
4032
        reg = (modrm >> 3) & 7;
4033
        if (reg >= 6 || reg == R_CS)
4034
            goto illegal_op;
4035
        gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4036
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4037
        if (reg == R_SS) {
4038
            /* if reg == SS, inhibit interrupts/trace */
4039
            /* If several instructions disable interrupts, only the
4040
               _first_ does it */
4041
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4042
                gen_op_set_inhibit_irq();
4043
            s->tf = 0;
4044
        }
4045
        if (s->is_jmp) {
4046
            gen_jmp_im(s->pc - s->cs_base);
4047
            gen_eob(s);
4048
        }
4049
        break;
4050
    case 0x8c: /* mov Gv, seg */
4051
        modrm = ldub_code(s->pc++);
4052
        reg = (modrm >> 3) & 7;
4053
        mod = (modrm >> 6) & 3;
4054
        if (reg >= 6)
4055
            goto illegal_op;
4056
        gen_op_movl_T0_seg(reg);
4057
        if (mod == 3)
4058
            ot = OT_WORD + dflag;
4059
        else
4060
            ot = OT_WORD;
4061
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4062
        break;
4063

    
4064
    case 0x1b6: /* movzbS Gv, Eb */
4065
    case 0x1b7: /* movzwS Gv, Eb */
4066
    case 0x1be: /* movsbS Gv, Eb */
4067
    case 0x1bf: /* movswS Gv, Eb */
4068
        {
4069
            int d_ot;
4070
            /* d_ot is the size of destination */
4071
            d_ot = dflag + OT_WORD;
4072
            /* ot is the size of source */
4073
            ot = (b & 1) + OT_BYTE;
4074
            modrm = ldub_code(s->pc++);
4075
            reg = ((modrm >> 3) & 7) | rex_r;
4076
            mod = (modrm >> 6) & 3;
4077
            rm = (modrm & 7) | REX_B(s);
4078

    
4079
            if (mod == 3) {
4080
                gen_op_mov_TN_reg(ot, 0, rm);
4081
                switch(ot | (b & 8)) {
4082
                case OT_BYTE:
4083
                    gen_op_movzbl_T0_T0();
4084
                    break;
4085
                case OT_BYTE | 8:
4086
                    gen_op_movsbl_T0_T0();
4087
                    break;
4088
                case OT_WORD:
4089
                    gen_op_movzwl_T0_T0();
4090
                    break;
4091
                default:
4092
                case OT_WORD | 8:
4093
                    gen_op_movswl_T0_T0();
4094
                    break;
4095
                }
4096
                gen_op_mov_reg_T0(d_ot, reg);
4097
            } else {
4098
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4099
                if (b & 8) {
4100
                    gen_op_lds_T0_A0(ot + s->mem_index);
4101
                } else {
4102
                    gen_op_ldu_T0_A0(ot + s->mem_index);
4103
                }
4104
                gen_op_mov_reg_T0(d_ot, reg);
4105
            }
4106
        }
4107
        break;
4108

    
4109
    case 0x8d: /* lea */
4110
        ot = dflag + OT_WORD;
4111
        modrm = ldub_code(s->pc++);
4112
        mod = (modrm >> 6) & 3;
4113
        if (mod == 3)
4114
            goto illegal_op;
4115
        reg = ((modrm >> 3) & 7) | rex_r;
4116
        /* we must ensure that no segment is added */
4117
        s->override = -1;
4118
        val = s->addseg;
4119
        s->addseg = 0;
4120
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4121
        s->addseg = val;
4122
        gen_op_mov_reg_A0(ot - OT_WORD, reg);
4123
        break;
4124

    
4125
    case 0xa0: /* mov EAX, Ov */
4126
    case 0xa1:
4127
    case 0xa2: /* mov Ov, EAX */
4128
    case 0xa3:
4129
        {
4130
            target_ulong offset_addr;
4131

    
4132
            if ((b & 1) == 0)
4133
                ot = OT_BYTE;
4134
            else
4135
                ot = dflag + OT_WORD;
4136
#ifdef TARGET_X86_64
4137
            if (s->aflag == 2) {
4138
                offset_addr = ldq_code(s->pc);
4139
                s->pc += 8;
4140
                gen_op_movq_A0_im(offset_addr);
4141
            } else
4142
#endif
4143
            {
4144
                if (s->aflag) {
4145
                    offset_addr = insn_get(s, OT_LONG);
4146
                } else {
4147
                    offset_addr = insn_get(s, OT_WORD);
4148
                }
4149
                gen_op_movl_A0_im(offset_addr);
4150
            }
4151
            gen_add_A0_ds_seg(s);
4152
            if ((b & 2) == 0) {
4153
                gen_op_ld_T0_A0(ot + s->mem_index);
4154
                gen_op_mov_reg_T0(ot, R_EAX);
4155
            } else {
4156
                gen_op_mov_TN_reg(ot, 0, R_EAX);
4157
                gen_op_st_T0_A0(ot + s->mem_index);
4158
            }
4159
        }
4160
        break;
4161
    case 0xd7: /* xlat */
4162
#ifdef TARGET_X86_64
4163
        if (s->aflag == 2) {
4164
            gen_op_movq_A0_reg(R_EBX);
4165
            gen_op_addq_A0_AL();
4166
        } else
4167
#endif
4168
        {
4169
            gen_op_movl_A0_reg(R_EBX);
4170
            gen_op_addl_A0_AL();
4171
            if (s->aflag == 0)
4172
                gen_op_andl_A0_ffff();
4173
        }
4174
        gen_add_A0_ds_seg(s);
4175
        gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4176
        gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4177
        break;
4178
    case 0xb0 ... 0xb7: /* mov R, Ib */
4179
        val = insn_get(s, OT_BYTE);
4180
        gen_op_movl_T0_im(val);
4181
        gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4182
        break;
4183
    case 0xb8 ... 0xbf: /* mov R, Iv */
4184
#ifdef TARGET_X86_64
4185
        if (dflag == 2) {
4186
            uint64_t tmp;
4187
            /* 64 bit case */
4188
            tmp = ldq_code(s->pc);
4189
            s->pc += 8;
4190
            reg = (b & 7) | REX_B(s);
4191
            gen_movtl_T0_im(tmp);
4192
            gen_op_mov_reg_T0(OT_QUAD, reg);
4193
        } else
4194
#endif
4195
        {
4196
            ot = dflag ? OT_LONG : OT_WORD;
4197
            val = insn_get(s, ot);
4198
            reg = (b & 7) | REX_B(s);
4199
            gen_op_movl_T0_im(val);
4200
            gen_op_mov_reg_T0(ot, reg);
4201
        }
4202
        break;
4203

    
4204
    case 0x91 ... 0x97: /* xchg R, EAX */
4205
        ot = dflag + OT_WORD;
4206
        reg = (b & 7) | REX_B(s);
4207
        rm = R_EAX;
4208
        goto do_xchg_reg;
4209
    case 0x86:
4210
    case 0x87: /* xchg Ev, Gv */
4211
        if ((b & 1) == 0)
4212
            ot = OT_BYTE;
4213
        else
4214
            ot = dflag + OT_WORD;
4215
        modrm = ldub_code(s->pc++);
4216
        reg = ((modrm >> 3) & 7) | rex_r;
4217
        mod = (modrm >> 6) & 3;
4218
        if (mod == 3) {
4219
            rm = (modrm & 7) | REX_B(s);
4220
        do_xchg_reg:
4221
            gen_op_mov_TN_reg(ot, 0, reg);
4222
            gen_op_mov_TN_reg(ot, 1, rm);
4223
            gen_op_mov_reg_T0(ot, rm);
4224
            gen_op_mov_reg_T1(ot, reg);
4225
        } else {
4226
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4227
            gen_op_mov_TN_reg(ot, 0, reg);
4228
            /* for xchg, lock is implicit */
4229
            if (!(prefixes & PREFIX_LOCK))
4230
                gen_op_lock();
4231
            gen_op_ld_T1_A0(ot + s->mem_index);
4232
            gen_op_st_T0_A0(ot + s->mem_index);
4233
            if (!(prefixes & PREFIX_LOCK))
4234
                gen_op_unlock();
4235
            gen_op_mov_reg_T1(ot, reg);
4236
        }
4237
        break;
4238
    case 0xc4: /* les Gv */
4239
        if (CODE64(s))
4240
            goto illegal_op;
4241
        op = R_ES;
4242
        goto do_lxx;
4243
    case 0xc5: /* lds Gv */
4244
        if (CODE64(s))
4245
            goto illegal_op;
4246
        op = R_DS;
4247
        goto do_lxx;
4248
    case 0x1b2: /* lss Gv */
4249
        op = R_SS;
4250
        goto do_lxx;
4251
    case 0x1b4: /* lfs Gv */
4252
        op = R_FS;
4253
        goto do_lxx;
4254
    case 0x1b5: /* lgs Gv */
4255
        op = R_GS;
4256
    do_lxx:
4257
        ot = dflag ? OT_LONG : OT_WORD;
4258
        modrm = ldub_code(s->pc++);
4259
        reg = ((modrm >> 3) & 7) | rex_r;
4260
        mod = (modrm >> 6) & 3;
4261
        if (mod == 3)
4262
            goto illegal_op;
4263
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4264
        gen_op_ld_T1_A0(ot + s->mem_index);
4265
        gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4266
        /* load the segment first to handle exceptions properly */
4267
        gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4268
        gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4269
        /* then put the data */
4270
        gen_op_mov_reg_T1(ot, reg);
4271
        if (s->is_jmp) {
4272
            gen_jmp_im(s->pc - s->cs_base);
4273
            gen_eob(s);
4274
        }
4275
        break;
4276

    
4277
        /************************/
4278
        /* shifts */
4279
    case 0xc0:
4280
    case 0xc1:
4281
        /* shift Ev,Ib */
4282
        shift = 2;
4283
    grp2:
4284
        {
4285
            if ((b & 1) == 0)
4286
                ot = OT_BYTE;
4287
            else
4288
                ot = dflag + OT_WORD;
4289

    
4290
            modrm = ldub_code(s->pc++);
4291
            mod = (modrm >> 6) & 3;
4292
            op = (modrm >> 3) & 7;
4293

    
4294
            if (mod != 3) {
4295
                if (shift == 2) {
4296
                    s->rip_offset = 1;
4297
                }
4298
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4299
                opreg = OR_TMP0;
4300
            } else {
4301
                opreg = (modrm & 7) | REX_B(s);
4302
            }
4303

    
4304
            /* simpler op */
4305
            if (shift == 0) {
4306
                gen_shift(s, op, ot, opreg, OR_ECX);
4307
            } else {
4308
                if (shift == 2) {
4309
                    shift = ldub_code(s->pc++);
4310
                }
4311
                gen_shifti(s, op, ot, opreg, shift);
4312
            }
4313
        }
4314
        break;
4315
    case 0xd0:
4316
    case 0xd1:
4317
        /* shift Ev,1 */
4318
        shift = 1;
4319
        goto grp2;
4320
    case 0xd2:
4321
    case 0xd3:
4322
        /* shift Ev,cl */
4323
        shift = 0;
4324
        goto grp2;
4325

    
4326
    case 0x1a4: /* shld imm */
4327
        op = 0;
4328
        shift = 1;
4329
        goto do_shiftd;
4330
    case 0x1a5: /* shld cl */
4331
        op = 0;
4332
        shift = 0;
4333
        goto do_shiftd;
4334
    case 0x1ac: /* shrd imm */
4335
        op = 1;
4336
        shift = 1;
4337
        goto do_shiftd;
4338
    case 0x1ad: /* shrd cl */
4339
        op = 1;
4340
        shift = 0;
4341
    do_shiftd:
4342
        ot = dflag + OT_WORD;
4343
        modrm = ldub_code(s->pc++);
4344
        mod = (modrm >> 6) & 3;
4345
        rm = (modrm & 7) | REX_B(s);
4346
        reg = ((modrm >> 3) & 7) | rex_r;
4347

    
4348
        if (mod != 3) {
4349
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4350
            gen_op_ld_T0_A0(ot + s->mem_index);
4351
        } else {
4352
            gen_op_mov_TN_reg(ot, 0, rm);
4353
        }
4354
        gen_op_mov_TN_reg(ot, 1, reg);
4355

    
4356
        if (shift) {
4357
            val = ldub_code(s->pc++);
4358
            if (ot == OT_QUAD)
4359
                val &= 0x3f;
4360
            else
4361
                val &= 0x1f;
4362
            if (val) {
4363
                if (mod == 3)
4364
                    gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4365
                else
4366
                    gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4367
                if (op == 0 && ot != OT_WORD)
4368
                    s->cc_op = CC_OP_SHLB + ot;
4369
                else
4370
                    s->cc_op = CC_OP_SARB + ot;
4371
            }
4372
        } else {
4373
            if (s->cc_op != CC_OP_DYNAMIC)
4374
                gen_op_set_cc_op(s->cc_op);
4375
            if (mod == 3)
4376
                gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4377
            else
4378
                gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4379
            s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4380
        }
4381
        if (mod == 3) {
4382
            gen_op_mov_reg_T0(ot, rm);
4383
        }
4384
        break;
4385

    
4386
        /************************/
4387
        /* floats */
4388
    case 0xd8 ... 0xdf:
4389
        if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4390
            /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4391
            /* XXX: what to do if illegal op ? */
4392
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4393
            break;
4394
        }
4395
        modrm = ldub_code(s->pc++);
4396
        mod = (modrm >> 6) & 3;
4397
        rm = modrm & 7;
4398
        op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4399
        if (mod != 3) {
4400
            /* memory op */
4401
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4402
            switch(op) {
4403
            case 0x00 ... 0x07: /* fxxxs */
4404
            case 0x10 ... 0x17: /* fixxxl */
4405
            case 0x20 ... 0x27: /* fxxxl */
4406
            case 0x30 ... 0x37: /* fixxx */
4407
                {
4408
                    int op1;
4409
                    op1 = op & 7;
4410

    
4411
                    switch(op >> 4) {
4412
                    case 0:
4413
                        gen_op_flds_FT0_A0();
4414
                        break;
4415
                    case 1:
4416
                        gen_op_fildl_FT0_A0();
4417
                        break;
4418
                    case 2:
4419
                        gen_op_fldl_FT0_A0();
4420
                        break;
4421
                    case 3:
4422
                    default:
4423
                        gen_op_fild_FT0_A0();
4424
                        break;
4425
                    }
4426

    
4427
                    gen_op_fp_arith_ST0_FT0[op1]();
4428
                    if (op1 == 3) {
4429
                        /* fcomp needs pop */
4430
                        gen_op_fpop();
4431
                    }
4432
                }
4433
                break;
4434
            case 0x08: /* flds */
4435
            case 0x0a: /* fsts */
4436
            case 0x0b: /* fstps */
4437
            case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4438
            case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4439
            case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4440
                switch(op & 7) {
4441
                case 0:
4442
                    switch(op >> 4) {
4443
                    case 0:
4444
                        gen_op_flds_ST0_A0();
4445
                        break;
4446
                    case 1:
4447
                        gen_op_fildl_ST0_A0();
4448
                        break;
4449
                    case 2:
4450
                        gen_op_fldl_ST0_A0();
4451
                        break;
4452
                    case 3:
4453
                    default:
4454
                        gen_op_fild_ST0_A0();
4455
                        break;
4456
                    }
4457
                    break;
4458
                case 1:
4459
                    switch(op >> 4) {
4460
                    case 1:
4461
                        gen_op_fisttl_ST0_A0();
4462
                        break;
4463
                    case 2:
4464
                        gen_op_fisttll_ST0_A0();
4465
                        break;
4466
                    case 3:
4467
                    default:
4468
                        gen_op_fistt_ST0_A0();
4469
                    }
4470
                    gen_op_fpop();
4471
                    break;
4472
                default:
4473
                    switch(op >> 4) {
4474
                    case 0:
4475
                        gen_op_fsts_ST0_A0();
4476
                        break;
4477
                    case 1:
4478
                        gen_op_fistl_ST0_A0();
4479
                        break;
4480
                    case 2:
4481
                        gen_op_fstl_ST0_A0();
4482
                        break;
4483
                    case 3:
4484
                    default:
4485
                        gen_op_fist_ST0_A0();
4486
                        break;
4487
                    }
4488
                    if ((op & 7) == 3)
4489
                        gen_op_fpop();
4490
                    break;
4491
                }
4492
                break;
4493
            case 0x0c: /* fldenv mem */
4494
                gen_op_fldenv_A0(s->dflag);
4495
                break;
4496
            case 0x0d: /* fldcw mem */
4497
                gen_op_fldcw_A0();
4498
                break;
4499
            case 0x0e: /* fnstenv mem */
4500
                gen_op_fnstenv_A0(s->dflag);
4501
                break;
4502
            case 0x0f: /* fnstcw mem */
4503
                gen_op_fnstcw_A0();
4504
                break;
4505
            case 0x1d: /* fldt mem */
4506
                gen_op_fldt_ST0_A0();
4507
                break;
4508
            case 0x1f: /* fstpt mem */
4509
                gen_op_fstt_ST0_A0();
4510
                gen_op_fpop();
4511
                break;
4512
            case 0x2c: /* frstor mem */
4513
                gen_op_frstor_A0(s->dflag);
4514
                break;
4515
            case 0x2e: /* fnsave mem */
4516
                gen_op_fnsave_A0(s->dflag);
4517
                break;
4518
            case 0x2f: /* fnstsw mem */
4519
                gen_op_fnstsw_A0();
4520
                break;
4521
            case 0x3c: /* fbld */
4522
                gen_op_fbld_ST0_A0();
4523
                break;
4524
            case 0x3e: /* fbstp */
4525
                gen_op_fbst_ST0_A0();
4526
                gen_op_fpop();
4527
                break;
4528
            case 0x3d: /* fildll */
4529
                gen_op_fildll_ST0_A0();
4530
                break;
4531
            case 0x3f: /* fistpll */
4532
                gen_op_fistll_ST0_A0();
4533
                gen_op_fpop();
4534
                break;
4535
            default:
4536
                goto illegal_op;
4537
            }
4538
        } else {
4539
            /* register float ops */
4540
            opreg = rm;
4541

    
4542
            switch(op) {
4543
            case 0x08: /* fld sti */
4544
                gen_op_fpush();
4545
                gen_op_fmov_ST0_STN((opreg + 1) & 7);
4546
                break;
4547
            case 0x09: /* fxchg sti */
4548
            case 0x29: /* fxchg4 sti, undocumented op */
4549
            case 0x39: /* fxchg7 sti, undocumented op */
4550
                gen_op_fxchg_ST0_STN(opreg);
4551
                break;
4552
            case 0x0a: /* grp d9/2 */
4553
                switch(rm) {
4554
                case 0: /* fnop */
4555
                    /* check exceptions (FreeBSD FPU probe) */
4556
                    if (s->cc_op != CC_OP_DYNAMIC)
4557
                        gen_op_set_cc_op(s->cc_op);
4558
                    gen_jmp_im(pc_start - s->cs_base);
4559
                    gen_op_fwait();
4560
                    break;
4561
                default:
4562
                    goto illegal_op;
4563
                }
4564
                break;
4565
            case 0x0c: /* grp d9/4 */
4566
                switch(rm) {
4567
                case 0: /* fchs */
4568
                    gen_op_fchs_ST0();
4569
                    break;
4570
                case 1: /* fabs */
4571
                    gen_op_fabs_ST0();
4572
                    break;
4573
                case 4: /* ftst */
4574
                    gen_op_fldz_FT0();
4575
                    gen_op_fcom_ST0_FT0();
4576
                    break;
4577
                case 5: /* fxam */
4578
                    gen_op_fxam_ST0();
4579
                    break;
4580
                default:
4581
                    goto illegal_op;
4582
                }
4583
                break;
4584
            case 0x0d: /* grp d9/5 */
4585
                {
4586
                    switch(rm) {
4587
                    case 0:
4588
                        gen_op_fpush();
4589
                        gen_op_fld1_ST0();
4590
                        break;
4591
                    case 1:
4592
                        gen_op_fpush();
4593
                        gen_op_fldl2t_ST0();
4594
                        break;
4595
                    case 2:
4596
                        gen_op_fpush();
4597
                        gen_op_fldl2e_ST0();
4598
                        break;
4599
                    case 3:
4600
                        gen_op_fpush();
4601
                        gen_op_fldpi_ST0();
4602
                        break;
4603
                    case 4:
4604
                        gen_op_fpush();
4605
                        gen_op_fldlg2_ST0();
4606
                        break;
4607
                    case 5:
4608
                        gen_op_fpush();
4609
                        gen_op_fldln2_ST0();
4610
                        break;
4611
                    case 6:
4612
                        gen_op_fpush();
4613
                        gen_op_fldz_ST0();
4614
                        break;
4615
                    default:
4616
                        goto illegal_op;
4617
                    }
4618
                }
4619
                break;
4620
            case 0x0e: /* grp d9/6 */
4621
                switch(rm) {
4622
                case 0: /* f2xm1 */
4623
                    gen_op_f2xm1();
4624
                    break;
4625
                case 1: /* fyl2x */
4626
                    gen_op_fyl2x();
4627
                    break;
4628
                case 2: /* fptan */
4629
                    gen_op_fptan();
4630
                    break;
4631
                case 3: /* fpatan */
4632
                    gen_op_fpatan();
4633
                    break;
4634
                case 4: /* fxtract */
4635
                    gen_op_fxtract();
4636
                    break;
4637
                case 5: /* fprem1 */
4638
                    gen_op_fprem1();
4639
                    break;
4640
                case 6: /* fdecstp */
4641
                    gen_op_fdecstp();
4642
                    break;
4643
                default:
4644
                case 7: /* fincstp */
4645
                    gen_op_fincstp();
4646
                    break;
4647
                }
4648
                break;
4649
            case 0x0f: /* grp d9/7 */
4650
                switch(rm) {
4651
                case 0: /* fprem */
4652
                    gen_op_fprem();
4653
                    break;
4654
                case 1: /* fyl2xp1 */
4655
                    gen_op_fyl2xp1();
4656
                    break;
4657
                case 2: /* fsqrt */
4658
                    gen_op_fsqrt();
4659
                    break;
4660
                case 3: /* fsincos */
4661
                    gen_op_fsincos();
4662
                    break;
4663
                case 5: /* fscale */
4664
                    gen_op_fscale();
4665
                    break;
4666
                case 4: /* frndint */
4667
                    gen_op_frndint();
4668
                    break;
4669
                case 6: /* fsin */
4670
                    gen_op_fsin();
4671
                    break;
4672
                default:
4673
                case 7: /* fcos */
4674
                    gen_op_fcos();
4675
                    break;
4676
                }
4677
                break;
4678
            case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4679
            case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4680
            case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4681
                {
4682
                    int op1;
4683

    
4684
                    op1 = op & 7;
4685
                    if (op >= 0x20) {
4686
                        gen_op_fp_arith_STN_ST0[op1](opreg);
4687
                        if (op >= 0x30)
4688
                            gen_op_fpop();
4689
                    } else {
4690
                        gen_op_fmov_FT0_STN(opreg);
4691
                        gen_op_fp_arith_ST0_FT0[op1]();
4692
                    }
4693
                }
4694
                break;
4695
            case 0x02: /* fcom */
4696
            case 0x22: /* fcom2, undocumented op */
4697
                gen_op_fmov_FT0_STN(opreg);
4698
                gen_op_fcom_ST0_FT0();
4699
                break;
4700
            case 0x03: /* fcomp */
4701
            case 0x23: /* fcomp3, undocumented op */
4702
            case 0x32: /* fcomp5, undocumented op */
4703
                gen_op_fmov_FT0_STN(opreg);
4704
                gen_op_fcom_ST0_FT0();
4705
                gen_op_fpop();
4706
                break;
4707
            case 0x15: /* da/5 */
4708
                switch(rm) {
4709
                case 1: /* fucompp */
4710
                    gen_op_fmov_FT0_STN(1);
4711
                    gen_op_fucom_ST0_FT0();
4712
                    gen_op_fpop();
4713
                    gen_op_fpop();
4714
                    break;
4715
                default:
4716
                    goto illegal_op;
4717
                }
4718
                break;
4719
            case 0x1c:
4720
                switch(rm) {
4721
                case 0: /* feni (287 only, just do nop here) */
4722
                    break;
4723
                case 1: /* fdisi (287 only, just do nop here) */
4724
                    break;
4725
                case 2: /* fclex */
4726
                    gen_op_fclex();
4727
                    break;
4728
                case 3: /* fninit */
4729
                    gen_op_fninit();
4730
                    break;
4731
                case 4: /* fsetpm (287 only, just do nop here) */
4732
                    break;
4733
                default:
4734
                    goto illegal_op;
4735
                }
4736
                break;
4737
            case 0x1d: /* fucomi */
4738
                if (s->cc_op != CC_OP_DYNAMIC)
4739
                    gen_op_set_cc_op(s->cc_op);
4740
                gen_op_fmov_FT0_STN(opreg);
4741
                gen_op_fucomi_ST0_FT0();
4742
                s->cc_op = CC_OP_EFLAGS;
4743
                break;
4744
            case 0x1e: /* fcomi */
4745
                if (s->cc_op != CC_OP_DYNAMIC)
4746
                    gen_op_set_cc_op(s->cc_op);
4747
                gen_op_fmov_FT0_STN(opreg);
4748
                gen_op_fcomi_ST0_FT0();
4749
                s->cc_op = CC_OP_EFLAGS;
4750
                break;
4751
            case 0x28: /* ffree sti */
4752
                gen_op_ffree_STN(opreg);
4753
                break;
4754
            case 0x2a: /* fst sti */
4755
                gen_op_fmov_STN_ST0(opreg);
4756
                break;
4757
            case 0x2b: /* fstp sti */
4758
            case 0x0b: /* fstp1 sti, undocumented op */
4759
            case 0x3a: /* fstp8 sti, undocumented op */
4760
            case 0x3b: /* fstp9 sti, undocumented op */
4761
                gen_op_fmov_STN_ST0(opreg);
4762
                gen_op_fpop();
4763
                break;
4764
            case 0x2c: /* fucom st(i) */
4765
                gen_op_fmov_FT0_STN(opreg);
4766
                gen_op_fucom_ST0_FT0();
4767
                break;
4768
            case 0x2d: /* fucomp st(i) */
4769
                gen_op_fmov_FT0_STN(opreg);
4770
                gen_op_fucom_ST0_FT0();
4771
                gen_op_fpop();
4772
                break;
4773
            case 0x33: /* de/3 */
4774
                switch(rm) {
4775
                case 1: /* fcompp */
4776
                    gen_op_fmov_FT0_STN(1);
4777
                    gen_op_fcom_ST0_FT0();
4778
                    gen_op_fpop();
4779
                    gen_op_fpop();
4780
                    break;
4781
                default:
4782
                    goto illegal_op;
4783
                }
4784
                break;
4785
            case 0x38: /* ffreep sti, undocumented op */
4786
                gen_op_ffree_STN(opreg);
4787
                gen_op_fpop();
4788
                break;
4789
            case 0x3c: /* df/4 */
4790
                switch(rm) {
4791
                case 0:
4792
                    gen_op_fnstsw_EAX();
4793
                    break;
4794
                default:
4795
                    goto illegal_op;
4796
                }
4797
                break;
4798
            case 0x3d: /* fucomip */
4799
                if (s->cc_op != CC_OP_DYNAMIC)
4800
                    gen_op_set_cc_op(s->cc_op);
4801
                gen_op_fmov_FT0_STN(opreg);
4802
                gen_op_fucomi_ST0_FT0();
4803
                gen_op_fpop();
4804
                s->cc_op = CC_OP_EFLAGS;
4805
                break;
4806
            case 0x3e: /* fcomip */
4807
                if (s->cc_op != CC_OP_DYNAMIC)
4808
                    gen_op_set_cc_op(s->cc_op);
4809
                gen_op_fmov_FT0_STN(opreg);
4810
                gen_op_fcomi_ST0_FT0();
4811
                gen_op_fpop();
4812
                s->cc_op = CC_OP_EFLAGS;
4813
                break;
4814
            case 0x10 ... 0x13: /* fcmovxx */
4815
            case 0x18 ... 0x1b:
4816
                {
4817
                    int op1;
4818
                    const static uint8_t fcmov_cc[8] = {
4819
                        (JCC_B << 1),
4820
                        (JCC_Z << 1),
4821
                        (JCC_BE << 1),
4822
                        (JCC_P << 1),
4823
                    };
4824
                    op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
4825
                    gen_setcc(s, op1);
4826
                    gen_op_fcmov_ST0_STN_T0(opreg);
4827
                }
4828
                break;
4829
            default:
4830
                goto illegal_op;
4831
            }
4832
        }
4833
        break;
4834
        /************************/
4835
        /* string ops */
4836

    
4837
    case 0xa4: /* movsS */
4838
    case 0xa5:
4839
        if ((b & 1) == 0)
4840
            ot = OT_BYTE;
4841
        else
4842
            ot = dflag + OT_WORD;
4843

    
4844
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4845
            gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4846
        } else {
4847
            gen_movs(s, ot);
4848
        }
4849
        break;
4850

    
4851
    case 0xaa: /* stosS */
4852
    case 0xab:
4853
        if ((b & 1) == 0)
4854
            ot = OT_BYTE;
4855
        else
4856
            ot = dflag + OT_WORD;
4857

    
4858
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4859
            gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4860
        } else {
4861
            gen_stos(s, ot);
4862
        }
4863
        break;
4864
    case 0xac: /* lodsS */
4865
    case 0xad:
4866
        if ((b & 1) == 0)
4867
            ot = OT_BYTE;
4868
        else
4869
            ot = dflag + OT_WORD;
4870
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4871
            gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4872
        } else {
4873
            gen_lods(s, ot);
4874
        }
4875
        break;
4876
    case 0xae: /* scasS */
4877
    case 0xaf:
4878
        if ((b & 1) == 0)
4879
            ot = OT_BYTE;
4880
        else
4881
            ot = dflag + OT_WORD;
4882
        if (prefixes & PREFIX_REPNZ) {
4883
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4884
        } else if (prefixes & PREFIX_REPZ) {
4885
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4886
        } else {
4887
            gen_scas(s, ot);
4888
            s->cc_op = CC_OP_SUBB + ot;
4889
        }
4890
        break;
4891

    
4892
    case 0xa6: /* cmpsS */
4893
    case 0xa7:
4894
        if ((b & 1) == 0)
4895
            ot = OT_BYTE;
4896
        else
4897
            ot = dflag + OT_WORD;
4898
        if (prefixes & PREFIX_REPNZ) {
4899
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4900
        } else if (prefixes & PREFIX_REPZ) {
4901
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4902
        } else {
4903
            gen_cmps(s, ot);
4904
            s->cc_op = CC_OP_SUBB + ot;
4905
        }
4906
        break;
4907
    case 0x6c: /* insS */
4908
    case 0x6d:
4909
        if ((b & 1) == 0)
4910
            ot = OT_BYTE;
4911
        else
4912
            ot = dflag ? OT_LONG : OT_WORD;
4913
        gen_check_io(s, ot, 1, pc_start - s->cs_base);
4914
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
4915
        gen_op_andl_T0_ffff();
4916
        if (gen_svm_check_io(s, pc_start,
4917
                             SVM_IOIO_TYPE_MASK | (1 << (4+ot)) |
4918
                             svm_is_rep(prefixes) | 4 | (1 << (7+s->aflag))))
4919
            break;
4920
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4921
            gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4922
        } else {
4923
            gen_ins(s, ot);
4924
        }
4925
        break;
4926
    case 0x6e: /* outsS */
4927
    case 0x6f:
4928
        if ((b & 1) == 0)
4929
            ot = OT_BYTE;
4930
        else
4931
            ot = dflag ? OT_LONG : OT_WORD;
4932
        gen_check_io(s, ot, 1, pc_start - s->cs_base);
4933
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
4934
        gen_op_andl_T0_ffff();
4935
        if (gen_svm_check_io(s, pc_start,
4936
                             (1 << (4+ot)) | svm_is_rep(prefixes) |
4937
                             4 | (1 << (7+s->aflag))))
4938
            break;
4939
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4940
            gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4941
        } else {
4942
            gen_outs(s, ot);
4943
        }
4944
        break;
4945

    
4946
        /************************/
4947
        /* port I/O */
4948

    
4949
    case 0xe4:
4950
    case 0xe5:
4951
        if ((b & 1) == 0)
4952
            ot = OT_BYTE;
4953
        else
4954
            ot = dflag ? OT_LONG : OT_WORD;
4955
        val = ldub_code(s->pc++);
4956
        gen_op_movl_T0_im(val);
4957
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
4958
        if (gen_svm_check_io(s, pc_start,
4959
                             SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
4960
                             (1 << (4+ot))))
4961
            break;
4962
        gen_op_in[ot]();
4963
        gen_op_mov_reg_T1(ot, R_EAX);
4964
        break;
4965
    case 0xe6:
4966
    case 0xe7:
4967
        if ((b & 1) == 0)
4968
            ot = OT_BYTE;
4969
        else
4970
            ot = dflag ? OT_LONG : OT_WORD;
4971
        val = ldub_code(s->pc++);
4972
        gen_op_movl_T0_im(val);
4973
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
4974
        if (gen_svm_check_io(s, pc_start, svm_is_rep(prefixes) |
4975
                             (1 << (4+ot))))
4976
            break;
4977
        gen_op_mov_TN_reg(ot, 1, R_EAX);
4978
        gen_op_out[ot]();
4979
        break;
4980
    case 0xec:
4981
    case 0xed:
4982
        if ((b & 1) == 0)
4983
            ot = OT_BYTE;
4984
        else
4985
            ot = dflag ? OT_LONG : OT_WORD;
4986
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
4987
        gen_op_andl_T0_ffff();
4988
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
4989
        if (gen_svm_check_io(s, pc_start,
4990
                             SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
4991
                             (1 << (4+ot))))
4992
            break;
4993
        gen_op_in[ot]();
4994
        gen_op_mov_reg_T1(ot, R_EAX);
4995
        break;
4996
    case 0xee:
4997
    case 0xef:
4998
        if ((b & 1) == 0)
4999
            ot = OT_BYTE;
5000
        else
5001
            ot = dflag ? OT_LONG : OT_WORD;
5002
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5003
        gen_op_andl_T0_ffff();
5004
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5005
        if (gen_svm_check_io(s, pc_start,
5006
                             svm_is_rep(prefixes) | (1 << (4+ot))))
5007
            break;
5008
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5009
        gen_op_out[ot]();
5010
        break;
5011

    
5012
        /************************/
5013
        /* control */
5014
    case 0xc2: /* ret im */
5015
        val = ldsw_code(s->pc);
5016
        s->pc += 2;
5017
        gen_pop_T0(s);
5018
        if (CODE64(s) && s->dflag)
5019
            s->dflag = 2;
5020
        gen_stack_update(s, val + (2 << s->dflag));
5021
        if (s->dflag == 0)
5022
            gen_op_andl_T0_ffff();
5023
        gen_op_jmp_T0();
5024
        gen_eob(s);
5025
        break;
5026
    case 0xc3: /* ret */
5027
        gen_pop_T0(s);
5028
        gen_pop_update(s);
5029
        if (s->dflag == 0)
5030
            gen_op_andl_T0_ffff();
5031
        gen_op_jmp_T0();
5032
        gen_eob(s);
5033
        break;
5034
    case 0xca: /* lret im */
5035
        val = ldsw_code(s->pc);
5036
        s->pc += 2;
5037
    do_lret:
5038
        if (s->pe && !s->vm86) {
5039
            if (s->cc_op != CC_OP_DYNAMIC)
5040
                gen_op_set_cc_op(s->cc_op);
5041
            gen_jmp_im(pc_start - s->cs_base);
5042
            gen_op_lret_protected(s->dflag, val);
5043
        } else {
5044
            gen_stack_A0(s);
5045
            /* pop offset */
5046
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5047
            if (s->dflag == 0)
5048
                gen_op_andl_T0_ffff();
5049
            /* NOTE: keeping EIP updated is not a problem in case of
5050
               exception */
5051
            gen_op_jmp_T0();
5052
            /* pop selector */
5053
            gen_op_addl_A0_im(2 << s->dflag);
5054
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5055
            gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5056
            /* add stack offset */
5057
            gen_stack_update(s, val + (4 << s->dflag));
5058
        }
5059
        gen_eob(s);
5060
        break;
5061
    case 0xcb: /* lret */
5062
        val = 0;
5063
        goto do_lret;
5064
    case 0xcf: /* iret */
5065
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5066
            break;
5067
        if (!s->pe) {
5068
            /* real mode */
5069
            gen_op_iret_real(s->dflag);
5070
            s->cc_op = CC_OP_EFLAGS;
5071
        } else if (s->vm86) {
5072
            if (s->iopl != 3) {
5073
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5074
            } else {
5075
                gen_op_iret_real(s->dflag);
5076
                s->cc_op = CC_OP_EFLAGS;
5077
            }
5078
        } else {
5079
            if (s->cc_op != CC_OP_DYNAMIC)
5080
                gen_op_set_cc_op(s->cc_op);
5081
            gen_jmp_im(pc_start - s->cs_base);
5082
            gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
5083
            s->cc_op = CC_OP_EFLAGS;
5084
        }
5085
        gen_eob(s);
5086
        break;
5087
    case 0xe8: /* call im */
5088
        {
5089
            if (dflag)
5090
                tval = (int32_t)insn_get(s, OT_LONG);
5091
            else
5092
                tval = (int16_t)insn_get(s, OT_WORD);
5093
            next_eip = s->pc - s->cs_base;
5094
            tval += next_eip;
5095
            if (s->dflag == 0)
5096
                tval &= 0xffff;
5097
            gen_movtl_T0_im(next_eip);
5098
            gen_push_T0(s);
5099
            gen_jmp(s, tval);
5100
        }
5101
        break;
5102
    case 0x9a: /* lcall im */
5103
        {
5104
            unsigned int selector, offset;
5105

    
5106
            if (CODE64(s))
5107
                goto illegal_op;
5108
            ot = dflag ? OT_LONG : OT_WORD;
5109
            offset = insn_get(s, ot);
5110
            selector = insn_get(s, OT_WORD);
5111

    
5112
            gen_op_movl_T0_im(selector);
5113
            gen_op_movl_T1_imu(offset);
5114
        }
5115
        goto do_lcall;
5116
    case 0xe9: /* jmp im */
5117
        if (dflag)
5118
            tval = (int32_t)insn_get(s, OT_LONG);
5119
        else
5120
            tval = (int16_t)insn_get(s, OT_WORD);
5121
        tval += s->pc - s->cs_base;
5122
        if (s->dflag == 0)
5123
            tval &= 0xffff;
5124
        gen_jmp(s, tval);
5125
        break;
5126
    case 0xea: /* ljmp im */
5127
        {
5128
            unsigned int selector, offset;
5129

    
5130
            if (CODE64(s))
5131
                goto illegal_op;
5132
            ot = dflag ? OT_LONG : OT_WORD;
5133
            offset = insn_get(s, ot);
5134
            selector = insn_get(s, OT_WORD);
5135

    
5136
            gen_op_movl_T0_im(selector);
5137
            gen_op_movl_T1_imu(offset);
5138
        }
5139
        goto do_ljmp;
5140
    case 0xeb: /* jmp Jb */
5141
        tval = (int8_t)insn_get(s, OT_BYTE);
5142
        tval += s->pc - s->cs_base;
5143
        if (s->dflag == 0)
5144
            tval &= 0xffff;
5145
        gen_jmp(s, tval);
5146
        break;
5147
    case 0x70 ... 0x7f: /* jcc Jb */
5148
        tval = (int8_t)insn_get(s, OT_BYTE);
5149
        goto do_jcc;
5150
    case 0x180 ... 0x18f: /* jcc Jv */
5151
        if (dflag) {
5152
            tval = (int32_t)insn_get(s, OT_LONG);
5153
        } else {
5154
            tval = (int16_t)insn_get(s, OT_WORD);
5155
        }
5156
    do_jcc:
5157
        next_eip = s->pc - s->cs_base;
5158
        tval += next_eip;
5159
        if (s->dflag == 0)
5160
            tval &= 0xffff;
5161
        gen_jcc(s, b, tval, next_eip);
5162
        break;
5163

    
5164
    case 0x190 ... 0x19f: /* setcc Gv */
5165
        modrm = ldub_code(s->pc++);
5166
        gen_setcc(s, b);
5167
        gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5168
        break;
5169
    case 0x140 ... 0x14f: /* cmov Gv, Ev */
5170
        ot = dflag + OT_WORD;
5171
        modrm = ldub_code(s->pc++);
5172
        reg = ((modrm >> 3) & 7) | rex_r;
5173
        mod = (modrm >> 6) & 3;
5174
        gen_setcc(s, b);
5175
        if (mod != 3) {
5176
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5177
            gen_op_ld_T1_A0(ot + s->mem_index);
5178
        } else {
5179
            rm = (modrm & 7) | REX_B(s);
5180
            gen_op_mov_TN_reg(ot, 1, rm);
5181
        }
5182
        gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5183
        break;
5184

    
5185
        /************************/
5186
        /* flags */
5187
    case 0x9c: /* pushf */
5188
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5189
            break;
5190
        if (s->vm86 && s->iopl != 3) {
5191
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5192
        } else {
5193
            if (s->cc_op != CC_OP_DYNAMIC)
5194
                gen_op_set_cc_op(s->cc_op);
5195
            gen_op_movl_T0_eflags();
5196
            gen_push_T0(s);
5197
        }
5198
        break;
5199
    case 0x9d: /* popf */
5200
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5201
            break;
5202
        if (s->vm86 && s->iopl != 3) {
5203
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5204
        } else {
5205
            gen_pop_T0(s);
5206
            if (s->cpl == 0) {
5207
                if (s->dflag) {
5208
                    gen_op_movl_eflags_T0_cpl0();
5209
                } else {
5210
                    gen_op_movw_eflags_T0_cpl0();
5211
                }
5212
            } else {
5213
                if (s->cpl <= s->iopl) {
5214
                    if (s->dflag) {
5215
                        gen_op_movl_eflags_T0_io();
5216
                    } else {
5217
                        gen_op_movw_eflags_T0_io();
5218
                    }
5219
                } else {
5220
                    if (s->dflag) {
5221
                        gen_op_movl_eflags_T0();
5222
                    } else {
5223
                        gen_op_movw_eflags_T0();
5224
                    }
5225
                }
5226
            }
5227
            gen_pop_update(s);
5228
            s->cc_op = CC_OP_EFLAGS;
5229
            /* abort translation because TF flag may change */
5230
            gen_jmp_im(s->pc - s->cs_base);
5231
            gen_eob(s);
5232
        }
5233
        break;
5234
    case 0x9e: /* sahf */
5235
        if (CODE64(s))
5236
            goto illegal_op;
5237
        gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5238
        if (s->cc_op != CC_OP_DYNAMIC)
5239
            gen_op_set_cc_op(s->cc_op);
5240
        gen_op_movb_eflags_T0();
5241
        s->cc_op = CC_OP_EFLAGS;
5242
        break;
5243
    case 0x9f: /* lahf */
5244
        if (CODE64(s))
5245
            goto illegal_op;
5246
        if (s->cc_op != CC_OP_DYNAMIC)
5247
            gen_op_set_cc_op(s->cc_op);
5248
        gen_op_movl_T0_eflags();
5249
        gen_op_mov_reg_T0(OT_BYTE, R_AH);
5250
        break;
5251
    case 0xf5: /* cmc */
5252
        if (s->cc_op != CC_OP_DYNAMIC)
5253
            gen_op_set_cc_op(s->cc_op);
5254
        gen_op_cmc();
5255
        s->cc_op = CC_OP_EFLAGS;
5256
        break;
5257
    case 0xf8: /* clc */
5258
        if (s->cc_op != CC_OP_DYNAMIC)
5259
            gen_op_set_cc_op(s->cc_op);
5260
        gen_op_clc();
5261
        s->cc_op = CC_OP_EFLAGS;
5262
        break;
5263
    case 0xf9: /* stc */
5264
        if (s->cc_op != CC_OP_DYNAMIC)
5265
            gen_op_set_cc_op(s->cc_op);
5266
        gen_op_stc();
5267
        s->cc_op = CC_OP_EFLAGS;
5268
        break;
5269
    case 0xfc: /* cld */
5270
        gen_op_cld();
5271
        break;
5272
    case 0xfd: /* std */
5273
        gen_op_std();
5274
        break;
5275

    
5276
        /************************/
5277
        /* bit operations */
5278
    case 0x1ba: /* bt/bts/btr/btc Gv, im */
5279
        ot = dflag + OT_WORD;
5280
        modrm = ldub_code(s->pc++);
5281
        op = (modrm >> 3) & 7;
5282
        mod = (modrm >> 6) & 3;
5283
        rm = (modrm & 7) | REX_B(s);
5284
        if (mod != 3) {
5285
            s->rip_offset = 1;
5286
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5287
            gen_op_ld_T0_A0(ot + s->mem_index);
5288
        } else {
5289
            gen_op_mov_TN_reg(ot, 0, rm);
5290
        }
5291
        /* load shift */
5292
        val = ldub_code(s->pc++);
5293
        gen_op_movl_T1_im(val);
5294
        if (op < 4)
5295
            goto illegal_op;
5296
        op -= 4;
5297
        gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5298
        s->cc_op = CC_OP_SARB + ot;
5299
        if (op != 0) {
5300
            if (mod != 3)
5301
                gen_op_st_T0_A0(ot + s->mem_index);
5302
            else
5303
                gen_op_mov_reg_T0(ot, rm);
5304
            gen_op_update_bt_cc();
5305
        }
5306
        break;
5307
    case 0x1a3: /* bt Gv, Ev */
5308
        op = 0;
5309
        goto do_btx;
5310
    case 0x1ab: /* bts */
5311
        op = 1;
5312
        goto do_btx;
5313
    case 0x1b3: /* btr */
5314
        op = 2;
5315
        goto do_btx;
5316
    case 0x1bb: /* btc */
5317
        op = 3;
5318
    do_btx:
5319
        ot = dflag + OT_WORD;
5320
        modrm = ldub_code(s->pc++);
5321
        reg = ((modrm >> 3) & 7) | rex_r;
5322
        mod = (modrm >> 6) & 3;
5323
        rm = (modrm & 7) | REX_B(s);
5324
        gen_op_mov_TN_reg(OT_LONG, 1, reg);
5325
        if (mod != 3) {
5326
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5327
            /* specific case: we need to add a displacement */
5328
            gen_op_add_bit_A0_T1[ot - OT_WORD]();
5329
            gen_op_ld_T0_A0(ot + s->mem_index);
5330
        } else {
5331
            gen_op_mov_TN_reg(ot, 0, rm);
5332
        }
5333
        gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5334
        s->cc_op = CC_OP_SARB + ot;
5335
        if (op != 0) {
5336
            if (mod != 3)
5337
                gen_op_st_T0_A0(ot + s->mem_index);
5338
            else
5339
                gen_op_mov_reg_T0(ot, rm);
5340
            gen_op_update_bt_cc();
5341
        }
5342
        break;
5343
    case 0x1bc: /* bsf */
5344
    case 0x1bd: /* bsr */
5345
        ot = dflag + OT_WORD;
5346
        modrm = ldub_code(s->pc++);
5347
        reg = ((modrm >> 3) & 7) | rex_r;
5348
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5349
        /* NOTE: in order to handle the 0 case, we must load the
5350
           result. It could be optimized with a generated jump */
5351
        gen_op_mov_TN_reg(ot, 1, reg);
5352
        gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5353
        gen_op_mov_reg_T1(ot, reg);
5354
        s->cc_op = CC_OP_LOGICB + ot;
5355
        break;
5356
        /************************/
5357
        /* bcd */
5358
    case 0x27: /* daa */
5359
        if (CODE64(s))
5360
            goto illegal_op;
5361
        if (s->cc_op != CC_OP_DYNAMIC)
5362
            gen_op_set_cc_op(s->cc_op);
5363
        gen_op_daa();
5364
        s->cc_op = CC_OP_EFLAGS;
5365
        break;
5366
    case 0x2f: /* das */
5367
        if (CODE64(s))
5368
            goto illegal_op;
5369
        if (s->cc_op != CC_OP_DYNAMIC)
5370
            gen_op_set_cc_op(s->cc_op);
5371
        gen_op_das();
5372
        s->cc_op = CC_OP_EFLAGS;
5373
        break;
5374
    case 0x37: /* aaa */
5375
        if (CODE64(s))
5376
            goto illegal_op;
5377
        if (s->cc_op != CC_OP_DYNAMIC)
5378
            gen_op_set_cc_op(s->cc_op);
5379
        gen_op_aaa();
5380
        s->cc_op = CC_OP_EFLAGS;
5381
        break;
5382
    case 0x3f: /* aas */
5383
        if (CODE64(s))
5384
            goto illegal_op;
5385
        if (s->cc_op != CC_OP_DYNAMIC)
5386
            gen_op_set_cc_op(s->cc_op);
5387
        gen_op_aas();
5388
        s->cc_op = CC_OP_EFLAGS;
5389
        break;
5390
    case 0xd4: /* aam */
5391
        if (CODE64(s))
5392
            goto illegal_op;
5393
        val = ldub_code(s->pc++);
5394
        if (val == 0) {
5395
            gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5396
        } else {
5397
            gen_op_aam(val);
5398
            s->cc_op = CC_OP_LOGICB;
5399
        }
5400
        break;
5401
    case 0xd5: /* aad */
5402
        if (CODE64(s))
5403
            goto illegal_op;
5404
        val = ldub_code(s->pc++);
5405
        gen_op_aad(val);
5406
        s->cc_op = CC_OP_LOGICB;
5407
        break;
5408
        /************************/
5409
        /* misc */
5410
    case 0x90: /* nop */
5411
        /* XXX: xchg + rex handling */
5412
        /* XXX: correct lock test for all insn */
5413
        if (prefixes & PREFIX_LOCK)
5414
            goto illegal_op;
5415
        if (prefixes & PREFIX_REPZ) {
5416
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
5417
        }
5418
        break;
5419
    case 0x9b: /* fwait */
5420
        if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5421
            (HF_MP_MASK | HF_TS_MASK)) {
5422
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5423
        } else {
5424
            if (s->cc_op != CC_OP_DYNAMIC)
5425
                gen_op_set_cc_op(s->cc_op);
5426
            gen_jmp_im(pc_start - s->cs_base);
5427
            gen_op_fwait();
5428
        }
5429
        break;
5430
    case 0xcc: /* int3 */
5431
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5432
            break;
5433
        gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5434
        break;
5435
    case 0xcd: /* int N */
5436
        val = ldub_code(s->pc++);
5437
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5438
            break;
5439
        if (s->vm86 && s->iopl != 3) {
5440
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5441
        } else {
5442
            gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5443
        }
5444
        break;
5445
    case 0xce: /* into */
5446
        if (CODE64(s))
5447
            goto illegal_op;
5448
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5449
            break;
5450
        if (s->cc_op != CC_OP_DYNAMIC)
5451
            gen_op_set_cc_op(s->cc_op);
5452
        gen_jmp_im(pc_start - s->cs_base);
5453
        gen_op_into(s->pc - pc_start);
5454
        break;
5455
    case 0xf1: /* icebp (undocumented, exits to external debugger) */
5456
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
5457
            break;
5458
#if 1
5459
        gen_debug(s, pc_start - s->cs_base);
5460
#else
5461
        /* start debug */
5462
        tb_flush(cpu_single_env);
5463
        cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5464
#endif
5465
        break;
5466
    case 0xfa: /* cli */
5467
        if (!s->vm86) {
5468
            if (s->cpl <= s->iopl) {
5469
                gen_op_cli();
5470
            } else {
5471
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5472
            }
5473
        } else {
5474
            if (s->iopl == 3) {
5475
                gen_op_cli();
5476
            } else {
5477
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5478
            }
5479
        }
5480
        break;
5481
    case 0xfb: /* sti */
5482
        if (!s->vm86) {
5483
            if (s->cpl <= s->iopl) {
5484
            gen_sti:
5485
                gen_op_sti();
5486
                /* interruptions are enabled only the first insn after sti */
5487
                /* If several instructions disable interrupts, only the
5488
                   _first_ does it */
5489
                if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5490
                    gen_op_set_inhibit_irq();
5491
                /* give a chance to handle pending irqs */
5492
                gen_jmp_im(s->pc - s->cs_base);
5493
                gen_eob(s);
5494
            } else {
5495
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5496
            }
5497
        } else {
5498
            if (s->iopl == 3) {
5499
                goto gen_sti;
5500
            } else {
5501
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5502
            }
5503
        }
5504
        break;
5505
    case 0x62: /* bound */
5506
        if (CODE64(s))
5507
            goto illegal_op;
5508
        ot = dflag ? OT_LONG : OT_WORD;
5509
        modrm = ldub_code(s->pc++);
5510
        reg = (modrm >> 3) & 7;
5511
        mod = (modrm >> 6) & 3;
5512
        if (mod == 3)
5513
            goto illegal_op;
5514
        gen_op_mov_TN_reg(ot, 0, reg);
5515
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5516
        gen_jmp_im(pc_start - s->cs_base);
5517
        if (ot == OT_WORD)
5518
            gen_op_boundw();
5519
        else
5520
            gen_op_boundl();
5521
        break;
5522
    case 0x1c8 ... 0x1cf: /* bswap reg */
5523
        reg = (b & 7) | REX_B(s);
5524
#ifdef TARGET_X86_64
5525
        if (dflag == 2) {
5526
            gen_op_mov_TN_reg(OT_QUAD, 0, reg);
5527
            tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
5528
            gen_op_mov_reg_T0(OT_QUAD, reg);
5529
        } else
5530
        {
5531
            TCGv tmp0;
5532
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
5533
            
5534
            tmp0 = tcg_temp_new(TCG_TYPE_I32);
5535
            tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
5536
            tcg_gen_bswap_i32(tmp0, tmp0);
5537
            tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
5538
            gen_op_mov_reg_T0(OT_LONG, reg);
5539
        }
5540
#else
5541
        {
5542
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
5543
            tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
5544
            gen_op_mov_reg_T0(OT_LONG, reg);
5545
        }
5546
#endif
5547
        break;
5548
    case 0xd6: /* salc */
5549
        if (CODE64(s))
5550
            goto illegal_op;
5551
        if (s->cc_op != CC_OP_DYNAMIC)
5552
            gen_op_set_cc_op(s->cc_op);
5553
        gen_op_salc();
5554
        break;
5555
    case 0xe0: /* loopnz */
5556
    case 0xe1: /* loopz */
5557
        if (s->cc_op != CC_OP_DYNAMIC)
5558
            gen_op_set_cc_op(s->cc_op);
5559
        /* FALL THRU */
5560
    case 0xe2: /* loop */
5561
    case 0xe3: /* jecxz */
5562
        {
5563
            int l1, l2;
5564

    
5565
            tval = (int8_t)insn_get(s, OT_BYTE);
5566
            next_eip = s->pc - s->cs_base;
5567
            tval += next_eip;
5568
            if (s->dflag == 0)
5569
                tval &= 0xffff;
5570

    
5571
            l1 = gen_new_label();
5572
            l2 = gen_new_label();
5573
            b &= 3;
5574
            if (b == 3) {
5575
                gen_op_jz_ecx[s->aflag](l1);
5576
            } else {
5577
                gen_op_dec_ECX[s->aflag]();
5578
                if (b <= 1)
5579
                    gen_op_mov_T0_cc();
5580
                gen_op_loop[s->aflag][b](l1);
5581
            }
5582

    
5583
            gen_jmp_im(next_eip);
5584
            gen_op_jmp_label(l2);
5585
            gen_set_label(l1);
5586
            gen_jmp_im(tval);
5587
            gen_set_label(l2);
5588
            gen_eob(s);
5589
        }
5590
        break;
5591
    case 0x130: /* wrmsr */
5592
    case 0x132: /* rdmsr */
5593
        if (s->cpl != 0) {
5594
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5595
        } else {
5596
            int retval = 0;
5597
            if (b & 2) {
5598
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
5599
                gen_op_rdmsr();
5600
            } else {
5601
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
5602
                gen_op_wrmsr();
5603
            }
5604
            if(retval)
5605
                gen_eob(s);
5606
        }
5607
        break;
5608
    case 0x131: /* rdtsc */
5609
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
5610
            break;
5611
        gen_jmp_im(pc_start - s->cs_base);
5612
        gen_op_rdtsc();
5613
        break;
5614
    case 0x133: /* rdpmc */
5615
        gen_jmp_im(pc_start - s->cs_base);
5616
        gen_op_rdpmc();
5617
        break;
5618
    case 0x134: /* sysenter */
5619
        if (CODE64(s))
5620
            goto illegal_op;
5621
        if (!s->pe) {
5622
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5623
        } else {
5624
            if (s->cc_op != CC_OP_DYNAMIC) {
5625
                gen_op_set_cc_op(s->cc_op);
5626
                s->cc_op = CC_OP_DYNAMIC;
5627
            }
5628
            gen_jmp_im(pc_start - s->cs_base);
5629
            gen_op_sysenter();
5630
            gen_eob(s);
5631
        }
5632
        break;
5633
    case 0x135: /* sysexit */
5634
        if (CODE64(s))
5635
            goto illegal_op;
5636
        if (!s->pe) {
5637
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5638
        } else {
5639
            if (s->cc_op != CC_OP_DYNAMIC) {
5640
                gen_op_set_cc_op(s->cc_op);
5641
                s->cc_op = CC_OP_DYNAMIC;
5642
            }
5643
            gen_jmp_im(pc_start - s->cs_base);
5644
            gen_op_sysexit();
5645
            gen_eob(s);
5646
        }
5647
        break;
5648
#ifdef TARGET_X86_64
5649
    case 0x105: /* syscall */
5650
        /* XXX: is it usable in real mode ? */
5651
        if (s->cc_op != CC_OP_DYNAMIC) {
5652
            gen_op_set_cc_op(s->cc_op);
5653
            s->cc_op = CC_OP_DYNAMIC;
5654
        }
5655
        gen_jmp_im(pc_start - s->cs_base);
5656
        gen_op_syscall(s->pc - pc_start);
5657
        gen_eob(s);
5658
        break;
5659
    case 0x107: /* sysret */
5660
        if (!s->pe) {
5661
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5662
        } else {
5663
            if (s->cc_op != CC_OP_DYNAMIC) {
5664
                gen_op_set_cc_op(s->cc_op);
5665
                s->cc_op = CC_OP_DYNAMIC;
5666
            }
5667
            gen_jmp_im(pc_start - s->cs_base);
5668
            gen_op_sysret(s->dflag);
5669
            /* condition codes are modified only in long mode */
5670
            if (s->lma)
5671
                s->cc_op = CC_OP_EFLAGS;
5672
            gen_eob(s);
5673
        }
5674
        break;
5675
#endif
5676
    case 0x1a2: /* cpuid */
5677
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
5678
            break;
5679
        gen_op_cpuid();
5680
        break;
5681
    case 0xf4: /* hlt */
5682
        if (s->cpl != 0) {
5683
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5684
        } else {
5685
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
5686
                break;
5687
            if (s->cc_op != CC_OP_DYNAMIC)
5688
                gen_op_set_cc_op(s->cc_op);
5689
            gen_jmp_im(s->pc - s->cs_base);
5690
            gen_op_hlt();
5691
            s->is_jmp = 3;
5692
        }
5693
        break;
5694
    case 0x100:
5695
        modrm = ldub_code(s->pc++);
5696
        mod = (modrm >> 6) & 3;
5697
        op = (modrm >> 3) & 7;
5698
        switch(op) {
5699
        case 0: /* sldt */
5700
            if (!s->pe || s->vm86)
5701
                goto illegal_op;
5702
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
5703
                break;
5704
            gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5705
            ot = OT_WORD;
5706
            if (mod == 3)
5707
                ot += s->dflag;
5708
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5709
            break;
5710
        case 2: /* lldt */
5711
            if (!s->pe || s->vm86)
5712
                goto illegal_op;
5713
            if (s->cpl != 0) {
5714
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5715
            } else {
5716
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
5717
                    break;
5718
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5719
                gen_jmp_im(pc_start - s->cs_base);
5720
                gen_op_lldt_T0();
5721
            }
5722
            break;
5723
        case 1: /* str */
5724
            if (!s->pe || s->vm86)
5725
                goto illegal_op;
5726
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
5727
                break;
5728
            gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5729
            ot = OT_WORD;
5730
            if (mod == 3)
5731
                ot += s->dflag;
5732
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5733
            break;
5734
        case 3: /* ltr */
5735
            if (!s->pe || s->vm86)
5736
                goto illegal_op;
5737
            if (s->cpl != 0) {
5738
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5739
            } else {
5740
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
5741
                    break;
5742
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5743
                gen_jmp_im(pc_start - s->cs_base);
5744
                gen_op_ltr_T0();
5745
            }
5746
            break;
5747
        case 4: /* verr */
5748
        case 5: /* verw */
5749
            if (!s->pe || s->vm86)
5750
                goto illegal_op;
5751
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5752
            if (s->cc_op != CC_OP_DYNAMIC)
5753
                gen_op_set_cc_op(s->cc_op);
5754
            if (op == 4)
5755
                gen_op_verr();
5756
            else
5757
                gen_op_verw();
5758
            s->cc_op = CC_OP_EFLAGS;
5759
            break;
5760
        default:
5761
            goto illegal_op;
5762
        }
5763
        break;
5764
    case 0x101:
5765
        modrm = ldub_code(s->pc++);
5766
        mod = (modrm >> 6) & 3;
5767
        op = (modrm >> 3) & 7;
5768
        rm = modrm & 7;
5769
        switch(op) {
5770
        case 0: /* sgdt */
5771
            if (mod == 3)
5772
                goto illegal_op;
5773
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
5774
                break;
5775
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5776
            gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5777
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
5778
            gen_add_A0_im(s, 2);
5779
            gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5780
            if (!s->dflag)
5781
                gen_op_andl_T0_im(0xffffff);
5782
            gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5783
            break;
5784
        case 1:
5785
            if (mod == 3) {
5786
                switch (rm) {
5787
                case 0: /* monitor */
5788
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5789
                        s->cpl != 0)
5790
                        goto illegal_op;
5791
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
5792
                        break;
5793
                    gen_jmp_im(pc_start - s->cs_base);
5794
#ifdef TARGET_X86_64
5795
                    if (s->aflag == 2) {
5796
                        gen_op_movq_A0_reg(R_EBX);
5797
                        gen_op_addq_A0_AL();
5798
                    } else
5799
#endif
5800
                    {
5801
                        gen_op_movl_A0_reg(R_EBX);
5802
                        gen_op_addl_A0_AL();
5803
                        if (s->aflag == 0)
5804
                            gen_op_andl_A0_ffff();
5805
                    }
5806
                    gen_add_A0_ds_seg(s);
5807
                    gen_op_monitor();
5808
                    break;
5809
                case 1: /* mwait */
5810
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5811
                        s->cpl != 0)
5812
                        goto illegal_op;
5813
                    if (s->cc_op != CC_OP_DYNAMIC) {
5814
                        gen_op_set_cc_op(s->cc_op);
5815
                        s->cc_op = CC_OP_DYNAMIC;
5816
                    }
5817
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
5818
                        break;
5819
                    gen_jmp_im(s->pc - s->cs_base);
5820
                    gen_op_mwait();
5821
                    gen_eob(s);
5822
                    break;
5823
                default:
5824
                    goto illegal_op;
5825
                }
5826
            } else { /* sidt */
5827
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
5828
                    break;
5829
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5830
                gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
5831
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
5832
                gen_add_A0_im(s, 2);
5833
                gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
5834
                if (!s->dflag)
5835
                    gen_op_andl_T0_im(0xffffff);
5836
                gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5837
            }
5838
            break;
5839
        case 2: /* lgdt */
5840
        case 3: /* lidt */
5841
            if (mod == 3) {
5842
                switch(rm) {
5843
                case 0: /* VMRUN */
5844
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
5845
                        break;
5846
                    if (s->cc_op != CC_OP_DYNAMIC)
5847
                        gen_op_set_cc_op(s->cc_op);
5848
                    gen_jmp_im(s->pc - s->cs_base);
5849
                    gen_op_vmrun();
5850
                    s->cc_op = CC_OP_EFLAGS;
5851
                    gen_eob(s);
5852
                    break;
5853
                case 1: /* VMMCALL */
5854
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
5855
                         break;
5856
                    /* FIXME: cause #UD if hflags & SVM */
5857
                    gen_op_vmmcall();
5858
                    break;
5859
                case 2: /* VMLOAD */
5860
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
5861
                         break;
5862
                    gen_op_vmload();
5863
                    break;
5864
                case 3: /* VMSAVE */
5865
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
5866
                         break;
5867
                    gen_op_vmsave();
5868
                    break;
5869
                case 4: /* STGI */
5870
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
5871
                         break;
5872
                    gen_op_stgi();
5873
                    break;
5874
                case 5: /* CLGI */
5875
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
5876
                         break;
5877
                    gen_op_clgi();
5878
                    break;
5879
                case 6: /* SKINIT */
5880
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
5881
                         break;
5882
                    gen_op_skinit();
5883
                    break;
5884
                case 7: /* INVLPGA */
5885
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
5886
                         break;
5887
                    gen_op_invlpga();
5888
                    break;
5889
                default:
5890
                    goto illegal_op;
5891
                }
5892
            } else if (s->cpl != 0) {
5893
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5894
            } else {
5895
                if (gen_svm_check_intercept(s, pc_start,
5896
                                            op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
5897
                    break;
5898
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5899
                gen_op_ld_T1_A0(OT_WORD + s->mem_index);
5900
                gen_add_A0_im(s, 2);
5901
                gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5902
                if (!s->dflag)
5903
                    gen_op_andl_T0_im(0xffffff);
5904
                if (op == 2) {
5905
                    gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
5906
                    gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
5907
                } else {
5908
                    gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
5909
                    gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
5910
                }
5911
            }
5912
            break;
5913
        case 4: /* smsw */
5914
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
5915
                break;
5916
            gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
5917
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
5918
            break;
5919
        case 6: /* lmsw */
5920
            if (s->cpl != 0) {
5921
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5922
            } else {
5923
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
5924
                    break;
5925
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5926
                gen_op_lmsw_T0();
5927
                gen_jmp_im(s->pc - s->cs_base);
5928
                gen_eob(s);
5929
            }
5930
            break;
5931
        case 7: /* invlpg */
5932
            if (s->cpl != 0) {
5933
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5934
            } else {
5935
                if (mod == 3) {
5936
#ifdef TARGET_X86_64
5937
                    if (CODE64(s) && rm == 0) {
5938
                        /* swapgs */
5939
                        gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
5940
                        gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
5941
                        gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
5942
                        gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
5943
                    } else
5944
#endif
5945
                    {
5946
                        goto illegal_op;
5947
                    }
5948
                } else {
5949
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
5950
                        break;
5951
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5952
                    gen_op_invlpg_A0();
5953
                    gen_jmp_im(s->pc - s->cs_base);
5954
                    gen_eob(s);
5955
                }
5956
            }
5957
            break;
5958
        default:
5959
            goto illegal_op;
5960
        }
5961
        break;
5962
    case 0x108: /* invd */
5963
    case 0x109: /* wbinvd */
5964
        if (s->cpl != 0) {
5965
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5966
        } else {
5967
            if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
5968
                break;
5969
            /* nothing to do */
5970
        }
5971
        break;
5972
    case 0x63: /* arpl or movslS (x86_64) */
5973
#ifdef TARGET_X86_64
5974
        if (CODE64(s)) {
5975
            int d_ot;
5976
            /* d_ot is the size of destination */
5977
            d_ot = dflag + OT_WORD;
5978

    
5979
            modrm = ldub_code(s->pc++);
5980
            reg = ((modrm >> 3) & 7) | rex_r;
5981
            mod = (modrm >> 6) & 3;
5982
            rm = (modrm & 7) | REX_B(s);
5983

    
5984
            if (mod == 3) {
5985
                gen_op_mov_TN_reg(OT_LONG, 0, rm);
5986
                /* sign extend */
5987
                if (d_ot == OT_QUAD)
5988
                    gen_op_movslq_T0_T0();
5989
                gen_op_mov_reg_T0(d_ot, reg);
5990
            } else {
5991
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5992
                if (d_ot == OT_QUAD) {
5993
                    gen_op_lds_T0_A0(OT_LONG + s->mem_index);
5994
                } else {
5995
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5996
                }
5997
                gen_op_mov_reg_T0(d_ot, reg);
5998
            }
5999
        } else
6000
#endif
6001
        {
6002
            if (!s->pe || s->vm86)
6003
                goto illegal_op;
6004
            ot = dflag ? OT_LONG : OT_WORD;
6005
            modrm = ldub_code(s->pc++);
6006
            reg = (modrm >> 3) & 7;
6007
            mod = (modrm >> 6) & 3;
6008
            rm = modrm & 7;
6009
            if (mod != 3) {
6010
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6011
                gen_op_ld_T0_A0(ot + s->mem_index);
6012
            } else {
6013
                gen_op_mov_TN_reg(ot, 0, rm);
6014
            }
6015
            if (s->cc_op != CC_OP_DYNAMIC)
6016
                gen_op_set_cc_op(s->cc_op);
6017
            gen_op_arpl();
6018
            s->cc_op = CC_OP_EFLAGS;
6019
            if (mod != 3) {
6020
                gen_op_st_T0_A0(ot + s->mem_index);
6021
            } else {
6022
                gen_op_mov_reg_T0(ot, rm);
6023
            }
6024
            gen_op_arpl_update();
6025
        }
6026
        break;
6027
    case 0x102: /* lar */
6028
    case 0x103: /* lsl */
6029
        if (!s->pe || s->vm86)
6030
            goto illegal_op;
6031
        ot = dflag ? OT_LONG : OT_WORD;
6032
        modrm = ldub_code(s->pc++);
6033
        reg = ((modrm >> 3) & 7) | rex_r;
6034
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6035
        gen_op_mov_TN_reg(ot, 1, reg);
6036
        if (s->cc_op != CC_OP_DYNAMIC)
6037
            gen_op_set_cc_op(s->cc_op);
6038
        if (b == 0x102)
6039
            gen_op_lar();
6040
        else
6041
            gen_op_lsl();
6042
        s->cc_op = CC_OP_EFLAGS;
6043
        gen_op_mov_reg_T1(ot, reg);
6044
        break;
6045
    case 0x118:
6046
        modrm = ldub_code(s->pc++);
6047
        mod = (modrm >> 6) & 3;
6048
        op = (modrm >> 3) & 7;
6049
        switch(op) {
6050
        case 0: /* prefetchnta */
6051
        case 1: /* prefetchnt0 */
6052
        case 2: /* prefetchnt0 */
6053
        case 3: /* prefetchnt0 */
6054
            if (mod == 3)
6055
                goto illegal_op;
6056
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6057
            /* nothing more to do */
6058
            break;
6059
        default: /* nop (multi byte) */
6060
            gen_nop_modrm(s, modrm);
6061
            break;
6062
        }
6063
        break;
6064
    case 0x119 ... 0x11f: /* nop (multi byte) */
6065
        modrm = ldub_code(s->pc++);
6066
        gen_nop_modrm(s, modrm);
6067
        break;
6068
    case 0x120: /* mov reg, crN */
6069
    case 0x122: /* mov crN, reg */
6070
        if (s->cpl != 0) {
6071
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6072
        } else {
6073
            modrm = ldub_code(s->pc++);
6074
            if ((modrm & 0xc0) != 0xc0)
6075
                goto illegal_op;
6076
            rm = (modrm & 7) | REX_B(s);
6077
            reg = ((modrm >> 3) & 7) | rex_r;
6078
            if (CODE64(s))
6079
                ot = OT_QUAD;
6080
            else
6081
                ot = OT_LONG;
6082
            switch(reg) {
6083
            case 0:
6084
            case 2:
6085
            case 3:
6086
            case 4:
6087
            case 8:
6088
                if (b & 2) {
6089
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6090
                    gen_op_mov_TN_reg(ot, 0, rm);
6091
                    gen_op_movl_crN_T0(reg);
6092
                    gen_jmp_im(s->pc - s->cs_base);
6093
                    gen_eob(s);
6094
                } else {
6095
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6096
#if !defined(CONFIG_USER_ONLY)
6097
                    if (reg == 8)
6098
                        gen_op_movtl_T0_cr8();
6099
                    else
6100
#endif
6101
                        gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6102
                    gen_op_mov_reg_T0(ot, rm);
6103
                }
6104
                break;
6105
            default:
6106
                goto illegal_op;
6107
            }
6108
        }
6109
        break;
6110
    case 0x121: /* mov reg, drN */
6111
    case 0x123: /* mov drN, reg */
6112
        if (s->cpl != 0) {
6113
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6114
        } else {
6115
            modrm = ldub_code(s->pc++);
6116
            if ((modrm & 0xc0) != 0xc0)
6117
                goto illegal_op;
6118
            rm = (modrm & 7) | REX_B(s);
6119
            reg = ((modrm >> 3) & 7) | rex_r;
6120
            if (CODE64(s))
6121
                ot = OT_QUAD;
6122
            else
6123
                ot = OT_LONG;
6124
            /* XXX: do it dynamically with CR4.DE bit */
6125
            if (reg == 4 || reg == 5 || reg >= 8)
6126
                goto illegal_op;
6127
            if (b & 2) {
6128
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6129
                gen_op_mov_TN_reg(ot, 0, rm);
6130
                gen_op_movl_drN_T0(reg);
6131
                gen_jmp_im(s->pc - s->cs_base);
6132
                gen_eob(s);
6133
            } else {
6134
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6135
                gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6136
                gen_op_mov_reg_T0(ot, rm);
6137
            }
6138
        }
6139
        break;
6140
    case 0x106: /* clts */
6141
        if (s->cpl != 0) {
6142
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6143
        } else {
6144
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6145
            gen_op_clts();
6146
            /* abort block because static cpu state changed */
6147
            gen_jmp_im(s->pc - s->cs_base);
6148
            gen_eob(s);
6149
        }
6150
        break;
6151
    /* MMX/SSE/SSE2/PNI support */
6152
    case 0x1c3: /* MOVNTI reg, mem */
6153
        if (!(s->cpuid_features & CPUID_SSE2))
6154
            goto illegal_op;
6155
        ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6156
        modrm = ldub_code(s->pc++);
6157
        mod = (modrm >> 6) & 3;
6158
        if (mod == 3)
6159
            goto illegal_op;
6160
        reg = ((modrm >> 3) & 7) | rex_r;
6161
        /* generate a generic store */
6162
        gen_ldst_modrm(s, modrm, ot, reg, 1);
6163
        break;
6164
    case 0x1ae:
6165
        modrm = ldub_code(s->pc++);
6166
        mod = (modrm >> 6) & 3;
6167
        op = (modrm >> 3) & 7;
6168
        switch(op) {
6169
        case 0: /* fxsave */
6170
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6171
                (s->flags & HF_EM_MASK))
6172
                goto illegal_op;
6173
            if (s->flags & HF_TS_MASK) {
6174
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6175
                break;
6176
            }
6177
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6178
            gen_op_fxsave_A0((s->dflag == 2));
6179
            break;
6180
        case 1: /* fxrstor */
6181
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6182
                (s->flags & HF_EM_MASK))
6183
                goto illegal_op;
6184
            if (s->flags & HF_TS_MASK) {
6185
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6186
                break;
6187
            }
6188
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6189
            gen_op_fxrstor_A0((s->dflag == 2));
6190
            break;
6191
        case 2: /* ldmxcsr */
6192
        case 3: /* stmxcsr */
6193
            if (s->flags & HF_TS_MASK) {
6194
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6195
                break;
6196
            }
6197
            if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6198
                mod == 3)
6199
                goto illegal_op;
6200
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6201
            if (op == 2) {
6202
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6203
                gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6204
            } else {
6205
                gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6206
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
6207
            }
6208
            break;
6209
        case 5: /* lfence */
6210
        case 6: /* mfence */
6211
            if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6212
                goto illegal_op;
6213
            break;
6214
        case 7: /* sfence / clflush */
6215
            if ((modrm & 0xc7) == 0xc0) {
6216
                /* sfence */
6217
                if (!(s->cpuid_features & CPUID_SSE))
6218
                    goto illegal_op;
6219
            } else {
6220
                /* clflush */
6221
                if (!(s->cpuid_features & CPUID_CLFLUSH))
6222
                    goto illegal_op;
6223
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6224
            }
6225
            break;
6226
        default:
6227
            goto illegal_op;
6228
        }
6229
        break;
6230
    case 0x10d: /* prefetch */
6231
        modrm = ldub_code(s->pc++);
6232
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6233
        /* ignore for now */
6234
        break;
6235
    case 0x1aa: /* rsm */
6236
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6237
            break;
6238
        if (!(s->flags & HF_SMM_MASK))
6239
            goto illegal_op;
6240
        if (s->cc_op != CC_OP_DYNAMIC) {
6241
            gen_op_set_cc_op(s->cc_op);
6242
            s->cc_op = CC_OP_DYNAMIC;
6243
        }
6244
        gen_jmp_im(s->pc - s->cs_base);
6245
        gen_op_rsm();
6246
        gen_eob(s);
6247
        break;
6248
    case 0x110 ... 0x117:
6249
    case 0x128 ... 0x12f:
6250
    case 0x150 ... 0x177:
6251
    case 0x17c ... 0x17f:
6252
    case 0x1c2:
6253
    case 0x1c4 ... 0x1c6:
6254
    case 0x1d0 ... 0x1fe:
6255
        gen_sse(s, b, pc_start, rex_r);
6256
        break;
6257
    default:
6258
        goto illegal_op;
6259
    }
6260
    /* lock generation */
6261
    if (s->prefix & PREFIX_LOCK)
6262
        gen_op_unlock();
6263
    return s->pc;
6264
 illegal_op:
6265
    if (s->prefix & PREFIX_LOCK)
6266
        gen_op_unlock();
6267
    /* XXX: ensure that no lock was generated */
6268
    gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6269
    return s->pc;
6270
}
6271

    
6272
#define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6273
#define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6274

    
6275
/* flags read by an operation */
6276
static uint16_t opc_read_flags[NB_OPS] = {
6277
    [INDEX_op_aas] = CC_A,
6278
    [INDEX_op_aaa] = CC_A,
6279
    [INDEX_op_das] = CC_A | CC_C,
6280
    [INDEX_op_daa] = CC_A | CC_C,
6281

    
6282
    /* subtle: due to the incl/decl implementation, C is used */
6283
    [INDEX_op_update_inc_cc] = CC_C,
6284

    
6285
    [INDEX_op_into] = CC_O,
6286

    
6287
    [INDEX_op_jb_subb] = CC_C,
6288
    [INDEX_op_jb_subw] = CC_C,
6289
    [INDEX_op_jb_subl] = CC_C,
6290

    
6291
    [INDEX_op_jz_subb] = CC_Z,
6292
    [INDEX_op_jz_subw] = CC_Z,
6293
    [INDEX_op_jz_subl] = CC_Z,
6294

    
6295
    [INDEX_op_jbe_subb] = CC_Z | CC_C,
6296
    [INDEX_op_jbe_subw] = CC_Z | CC_C,
6297
    [INDEX_op_jbe_subl] = CC_Z | CC_C,
6298

    
6299
    [INDEX_op_js_subb] = CC_S,
6300
    [INDEX_op_js_subw] = CC_S,
6301
    [INDEX_op_js_subl] = CC_S,
6302

    
6303
    [INDEX_op_jl_subb] = CC_O | CC_S,
6304
    [INDEX_op_jl_subw] = CC_O | CC_S,
6305
    [INDEX_op_jl_subl] = CC_O | CC_S,
6306

    
6307
    [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6308
    [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6309
    [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6310

    
6311
    [INDEX_op_loopnzw] = CC_Z,
6312
    [INDEX_op_loopnzl] = CC_Z,
6313
    [INDEX_op_loopzw] = CC_Z,
6314
    [INDEX_op_loopzl] = CC_Z,
6315

    
6316
    [INDEX_op_seto_T0_cc] = CC_O,
6317
    [INDEX_op_setb_T0_cc] = CC_C,
6318
    [INDEX_op_setz_T0_cc] = CC_Z,
6319
    [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6320
    [INDEX_op_sets_T0_cc] = CC_S,
6321
    [INDEX_op_setp_T0_cc] = CC_P,
6322
    [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6323
    [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6324

    
6325
    [INDEX_op_setb_T0_subb] = CC_C,
6326
    [INDEX_op_setb_T0_subw] = CC_C,
6327
    [INDEX_op_setb_T0_subl] = CC_C,
6328

    
6329
    [INDEX_op_setz_T0_subb] = CC_Z,
6330
    [INDEX_op_setz_T0_subw] = CC_Z,
6331
    [INDEX_op_setz_T0_subl] = CC_Z,
6332

    
6333
    [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6334
    [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6335
    [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6336

    
6337
    [INDEX_op_sets_T0_subb] = CC_S,
6338
    [INDEX_op_sets_T0_subw] = CC_S,
6339
    [INDEX_op_sets_T0_subl] = CC_S,
6340

    
6341
    [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6342
    [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6343
    [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6344

    
6345
    [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6346
    [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6347
    [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6348

    
6349
    [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6350
    [INDEX_op_cmc] = CC_C,
6351
    [INDEX_op_salc] = CC_C,
6352

    
6353
    /* needed for correct flag optimisation before string ops */
6354
    [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6355
    [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6356
    [INDEX_op_jz_ecxw] = CC_OSZAPC,
6357
    [INDEX_op_jz_ecxl] = CC_OSZAPC,
6358

    
6359
#ifdef TARGET_X86_64
6360
    [INDEX_op_jb_subq] = CC_C,
6361
    [INDEX_op_jz_subq] = CC_Z,
6362
    [INDEX_op_jbe_subq] = CC_Z | CC_C,
6363
    [INDEX_op_js_subq] = CC_S,
6364
    [INDEX_op_jl_subq] = CC_O | CC_S,
6365
    [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6366

    
6367
    [INDEX_op_loopnzq] = CC_Z,
6368
    [INDEX_op_loopzq] = CC_Z,
6369

    
6370
    [INDEX_op_setb_T0_subq] = CC_C,
6371
    [INDEX_op_setz_T0_subq] = CC_Z,
6372
    [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6373
    [INDEX_op_sets_T0_subq] = CC_S,
6374
    [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6375
    [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6376

    
6377
    [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6378
    [INDEX_op_jz_ecxq] = CC_OSZAPC,
6379
#endif
6380

    
6381
#define DEF_READF(SUFFIX)\
6382
    [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6383
    [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6384
    [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6385
    X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6386
    [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6387
    [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6388
    [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6389
    X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6390
\
6391
    [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6392
    [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6393
    [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6394
    X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6395
    [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6396
    [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6397
    [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6398
    X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6399

    
6400
    DEF_READF( )
6401
    DEF_READF(_raw)
6402
#ifndef CONFIG_USER_ONLY
6403
    DEF_READF(_kernel)
6404
    DEF_READF(_user)
6405
#endif
6406
};
6407

    
6408
/* flags written by an operation */
6409
static uint16_t opc_write_flags[NB_OPS] = {
6410
    [INDEX_op_update2_cc] = CC_OSZAPC,
6411
    [INDEX_op_update1_cc] = CC_OSZAPC,
6412
    [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6413
    [INDEX_op_update_neg_cc] = CC_OSZAPC,
6414
    /* subtle: due to the incl/decl implementation, C is used */
6415
    [INDEX_op_update_inc_cc] = CC_OSZAPC,
6416
    [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6417

    
6418
    [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6419
    [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6420
    [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6421
    X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6422
    [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6423
    [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6424
    [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6425
    X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6426
    [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6427
    [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6428
    X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6429

    
6430
    /* sse */
6431
    [INDEX_op_ucomiss] = CC_OSZAPC,
6432
    [INDEX_op_ucomisd] = CC_OSZAPC,
6433
    [INDEX_op_comiss] = CC_OSZAPC,
6434
    [INDEX_op_comisd] = CC_OSZAPC,
6435

    
6436
    /* bcd */
6437
    [INDEX_op_aam] = CC_OSZAPC,
6438
    [INDEX_op_aad] = CC_OSZAPC,
6439
    [INDEX_op_aas] = CC_OSZAPC,
6440
    [INDEX_op_aaa] = CC_OSZAPC,
6441
    [INDEX_op_das] = CC_OSZAPC,
6442
    [INDEX_op_daa] = CC_OSZAPC,
6443

    
6444
    [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6445
    [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6446
    [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6447
    [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6448
    [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6449
    [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6450
    [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6451
    [INDEX_op_clc] = CC_C,
6452
    [INDEX_op_stc] = CC_C,
6453
    [INDEX_op_cmc] = CC_C,
6454

    
6455
    [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6456
    [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6457
    X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6458
    [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6459
    [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6460
    X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6461
    [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6462
    [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6463
    X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6464
    [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6465
    [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6466
    X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6467

    
6468
    [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6469
    [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6470
    X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6471
    [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6472
    [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6473
    X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6474

    
6475
    [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6476
    [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6477
    [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6478
    X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6479

    
6480
    [INDEX_op_cmpxchg8b] = CC_Z,
6481
    [INDEX_op_lar] = CC_Z,
6482
    [INDEX_op_lsl] = CC_Z,
6483
    [INDEX_op_verr] = CC_Z,
6484
    [INDEX_op_verw] = CC_Z,
6485
    [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6486
    [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6487

    
6488
#define DEF_WRITEF(SUFFIX)\
6489
    [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6490
    [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6491
    [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6492
    X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6493
    [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6494
    [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6495
    [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6496
    X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6497
\
6498
    [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6499
    [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6500
    [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6501
    X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6502
    [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6503
    [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6504
    [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6505
    X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6506
\
6507
    [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6508
    [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6509
    [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6510
    X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6511
    [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6512
    [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6513
    [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6514
    X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6515
\
6516
    [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6517
    [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6518
    [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6519
    X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6520
\
6521
    [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6522
    [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6523
    [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6524
    X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6525
\
6526
    [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6527
    [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6528
    [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6529
    X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6530
\
6531
    [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6532
    [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6533
    X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6534
    [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6535
    [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6536
    X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6537
\
6538
    [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6539
    [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6540
    X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6541
    [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6542
    [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6543
    X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6544
\
6545
    [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6546
    [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6547
    [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6548
    X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6549

    
6550

    
6551
    DEF_WRITEF( )
6552
    DEF_WRITEF(_raw)
6553
#ifndef CONFIG_USER_ONLY
6554
    DEF_WRITEF(_kernel)
6555
    DEF_WRITEF(_user)
6556
#endif
6557
};
6558

    
6559
/* simpler form of an operation if no flags need to be generated */
6560
static uint16_t opc_simpler[NB_OPS] = {
6561
    [INDEX_op_update2_cc] = INDEX_op_nop,
6562
    [INDEX_op_update1_cc] = INDEX_op_nop,
6563
    [INDEX_op_update_neg_cc] = INDEX_op_nop,
6564
#if 0
6565
    /* broken: CC_OP logic must be rewritten */
6566
    [INDEX_op_update_inc_cc] = INDEX_op_nop,
6567
#endif
6568

    
6569
    [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6570
    [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6571
    [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6572
    X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6573

    
6574
    [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6575
    [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6576
    [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6577
    X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6578

    
6579
    [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6580
    [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6581
    [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6582
    X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6583

    
6584
#define DEF_SIMPLER(SUFFIX)\
6585
    [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6586
    [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6587
    [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6588
    X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6589
\
6590
    [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6591
    [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6592
    [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6593
    X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6594

    
6595
    DEF_SIMPLER( )
6596
    DEF_SIMPLER(_raw)
6597
#ifndef CONFIG_USER_ONLY
6598
    DEF_SIMPLER(_kernel)
6599
    DEF_SIMPLER(_user)
6600
#endif
6601
};
6602

    
6603
static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
6604
{
6605
    switch(macro_id) {
6606
#ifdef MACRO_TEST
6607
    case MACRO_TEST:
6608
        tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
6609
        break;
6610
#endif
6611
    }
6612
}
6613

    
6614
void optimize_flags_init(void)
6615
{
6616
    int i;
6617
    /* put default values in arrays */
6618
    for(i = 0; i < NB_OPS; i++) {
6619
        if (opc_simpler[i] == 0)
6620
            opc_simpler[i] = i;
6621
    }
6622

    
6623
    tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
6624

    
6625
    cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
6626
#if TARGET_LONG_BITS > HOST_LONG_BITS
6627
    cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL, 
6628
                                  TCG_AREG0, offsetof(CPUState, t0), "T0");
6629
    cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
6630
                                  TCG_AREG0, offsetof(CPUState, t1), "T1");
6631
    cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
6632
                                TCG_AREG0, offsetof(CPUState, t2), "A0");
6633
#else
6634
    cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
6635
    cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
6636
    cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
6637
#endif
6638
    /* the helpers are only registered to print debug info */
6639
    TCG_HELPER(helper_divl_EAX_T0);
6640
    TCG_HELPER(helper_idivl_EAX_T0);
6641
}
6642

    
6643
/* CPU flags computation optimization: we move backward thru the
6644
   generated code to see which flags are needed. The operation is
6645
   modified if suitable */
6646
static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6647
{
6648
    uint16_t *opc_ptr;
6649
    int live_flags, write_flags, op;
6650

    
6651
    opc_ptr = opc_buf + opc_buf_len;
6652
    /* live_flags contains the flags needed by the next instructions
6653
       in the code. At the end of the block, we consider that all the
6654
       flags are live. */
6655
    live_flags = CC_OSZAPC;
6656
    while (opc_ptr > opc_buf) {
6657
        op = *--opc_ptr;
6658
        /* if none of the flags written by the instruction is used,
6659
           then we can try to find a simpler instruction */
6660
        write_flags = opc_write_flags[op];
6661
        if ((live_flags & write_flags) == 0) {
6662
            *opc_ptr = opc_simpler[op];
6663
        }
6664
        /* compute the live flags before the instruction */
6665
        live_flags &= ~write_flags;
6666
        live_flags |= opc_read_flags[op];
6667
    }
6668
}
6669

    
6670
/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6671
   basic block 'tb'. If search_pc is TRUE, also generate PC
6672
   information for each intermediate instruction. */
6673
static inline int gen_intermediate_code_internal(CPUState *env,
6674
                                                 TranslationBlock *tb,
6675
                                                 int search_pc)
6676
{
6677
    DisasContext dc1, *dc = &dc1;
6678
    target_ulong pc_ptr;
6679
    uint16_t *gen_opc_end;
6680
    int j, lj, cflags;
6681
    uint64_t flags;
6682
    target_ulong pc_start;
6683
    target_ulong cs_base;
6684

    
6685
    /* generate intermediate code */
6686
    pc_start = tb->pc;
6687
    cs_base = tb->cs_base;
6688
    flags = tb->flags;
6689
    cflags = tb->cflags;
6690

    
6691
    dc->pe = (flags >> HF_PE_SHIFT) & 1;
6692
    dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6693
    dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6694
    dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6695
    dc->f_st = 0;
6696
    dc->vm86 = (flags >> VM_SHIFT) & 1;
6697
    dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6698
    dc->iopl = (flags >> IOPL_SHIFT) & 3;
6699
    dc->tf = (flags >> TF_SHIFT) & 1;
6700
    dc->singlestep_enabled = env->singlestep_enabled;
6701
    dc->cc_op = CC_OP_DYNAMIC;
6702
    dc->cs_base = cs_base;
6703
    dc->tb = tb;
6704
    dc->popl_esp_hack = 0;
6705
    /* select memory access functions */
6706
    dc->mem_index = 0;
6707
    if (flags & HF_SOFTMMU_MASK) {
6708
        if (dc->cpl == 3)
6709
            dc->mem_index = 2 * 4;
6710
        else
6711
            dc->mem_index = 1 * 4;
6712
    }
6713
    dc->cpuid_features = env->cpuid_features;
6714
    dc->cpuid_ext_features = env->cpuid_ext_features;
6715
#ifdef TARGET_X86_64
6716
    dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6717
    dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6718
#endif
6719
    dc->flags = flags;
6720
    dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6721
                    (flags & HF_INHIBIT_IRQ_MASK)
6722
#ifndef CONFIG_SOFTMMU
6723
                    || (flags & HF_SOFTMMU_MASK)
6724
#endif
6725
                    );
6726
#if 0
6727
    /* check addseg logic */
6728
    if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6729
        printf("ERROR addseg\n");
6730
#endif
6731

    
6732
    cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
6733

    
6734
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6735

    
6736
    dc->is_jmp = DISAS_NEXT;
6737
    pc_ptr = pc_start;
6738
    lj = -1;
6739

    
6740
    for(;;) {
6741
        if (env->nb_breakpoints > 0) {
6742
            for(j = 0; j < env->nb_breakpoints; j++) {
6743
                if (env->breakpoints[j] == pc_ptr) {
6744
                    gen_debug(dc, pc_ptr - dc->cs_base);
6745
                    break;
6746
                }
6747
            }
6748
        }
6749
        if (search_pc) {
6750
            j = gen_opc_ptr - gen_opc_buf;
6751
            if (lj < j) {
6752
                lj++;
6753
                while (lj < j)
6754
                    gen_opc_instr_start[lj++] = 0;
6755
            }
6756
            gen_opc_pc[lj] = pc_ptr;
6757
            gen_opc_cc_op[lj] = dc->cc_op;
6758
            gen_opc_instr_start[lj] = 1;
6759
        }
6760
        pc_ptr = disas_insn(dc, pc_ptr);
6761
        /* stop translation if indicated */
6762
        if (dc->is_jmp)
6763
            break;
6764
        /* if single step mode, we generate only one instruction and
6765
           generate an exception */
6766
        /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6767
           the flag and abort the translation to give the irqs a
6768
           change to be happen */
6769
        if (dc->tf || dc->singlestep_enabled ||
6770
            (flags & HF_INHIBIT_IRQ_MASK) ||
6771
            (cflags & CF_SINGLE_INSN)) {
6772
            gen_jmp_im(pc_ptr - dc->cs_base);
6773
            gen_eob(dc);
6774
            break;
6775
        }
6776
        /* if too long translation, stop generation too */
6777
        if (gen_opc_ptr >= gen_opc_end ||
6778
            (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6779
            gen_jmp_im(pc_ptr - dc->cs_base);
6780
            gen_eob(dc);
6781
            break;
6782
        }
6783
    }
6784
    *gen_opc_ptr = INDEX_op_end;
6785
    /* we don't forget to fill the last values */
6786
    if (search_pc) {
6787
        j = gen_opc_ptr - gen_opc_buf;
6788
        lj++;
6789
        while (lj <= j)
6790
            gen_opc_instr_start[lj++] = 0;
6791
    }
6792

    
6793
#ifdef DEBUG_DISAS
6794
    if (loglevel & CPU_LOG_TB_CPU) {
6795
        cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6796
    }
6797
    if (loglevel & CPU_LOG_TB_IN_ASM) {
6798
        int disas_flags;
6799
        fprintf(logfile, "----------------\n");
6800
        fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6801
#ifdef TARGET_X86_64
6802
        if (dc->code64)
6803
            disas_flags = 2;
6804
        else
6805
#endif
6806
            disas_flags = !dc->code32;
6807
        target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6808
        fprintf(logfile, "\n");
6809
        if (loglevel & CPU_LOG_TB_OP_OPT) {
6810
            fprintf(logfile, "OP before opt:\n");
6811
            tcg_dump_ops(&tcg_ctx, logfile);
6812
            fprintf(logfile, "\n");
6813
        }
6814
    }
6815
#endif
6816

    
6817
    /* optimize flag computations */
6818
    optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6819

    
6820
    if (!search_pc)
6821
        tb->size = pc_ptr - pc_start;
6822
    return 0;
6823
}
6824

    
6825
int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6826
{
6827
    return gen_intermediate_code_internal(env, tb, 0);
6828
}
6829

    
6830
int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6831
{
6832
    return gen_intermediate_code_internal(env, tb, 1);
6833
}
6834