Statistics
| Branch: | Revision:

root / target-i386 / translate.c @ b5b38f61

History | View | Annotate | Download (221 kB)

1
/*
2
 *  i386 translation
3
 *
4
 *  Copyright (c) 2003 Fabrice Bellard
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20
#include <stdarg.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23
#include <string.h>
24
#include <inttypes.h>
25
#include <signal.h>
26
#include <assert.h>
27

    
28
#include "cpu.h"
29
#include "exec-all.h"
30
#include "disas.h"
31
#include "helper.h"
32
#include "tcg-op.h"
33

    
34
#define PREFIX_REPZ   0x01
35
#define PREFIX_REPNZ  0x02
36
#define PREFIX_LOCK   0x04
37
#define PREFIX_DATA   0x08
38
#define PREFIX_ADR    0x10
39

    
40
#ifdef TARGET_X86_64
41
#define X86_64_ONLY(x) x
42
#define X86_64_DEF(x...) x
43
#define CODE64(s) ((s)->code64)
44
#define REX_X(s) ((s)->rex_x)
45
#define REX_B(s) ((s)->rex_b)
46
/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
47
#if 1
48
#define BUGGY_64(x) NULL
49
#endif
50
#else
51
#define X86_64_ONLY(x) NULL
52
#define X86_64_DEF(x...)
53
#define CODE64(s) 0
54
#define REX_X(s) 0
55
#define REX_B(s) 0
56
#endif
57

    
58
//#define MACRO_TEST   1
59

    
60
/* global register indexes */
61
static TCGv cpu_env, cpu_T[2], cpu_A0;
62
/* local register indexes (only used inside old micro ops) */
63
static TCGv cpu_tmp0, cpu_tmp1, cpu_tmp2, cpu_ptr0, cpu_ptr1;
64

    
65
#ifdef TARGET_X86_64
66
static int x86_64_hregs;
67
#endif
68

    
69
typedef struct DisasContext {
70
    /* current insn context */
71
    int override; /* -1 if no override */
72
    int prefix;
73
    int aflag, dflag;
74
    target_ulong pc; /* pc = eip + cs_base */
75
    int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
76
                   static state change (stop translation) */
77
    /* current block context */
78
    target_ulong cs_base; /* base of CS segment */
79
    int pe;     /* protected mode */
80
    int code32; /* 32 bit code segment */
81
#ifdef TARGET_X86_64
82
    int lma;    /* long mode active */
83
    int code64; /* 64 bit code segment */
84
    int rex_x, rex_b;
85
#endif
86
    int ss32;   /* 32 bit stack segment */
87
    int cc_op;  /* current CC operation */
88
    int addseg; /* non zero if either DS/ES/SS have a non zero base */
89
    int f_st;   /* currently unused */
90
    int vm86;   /* vm86 mode */
91
    int cpl;
92
    int iopl;
93
    int tf;     /* TF cpu flag */
94
    int singlestep_enabled; /* "hardware" single step enabled */
95
    int jmp_opt; /* use direct block chaining for direct jumps */
96
    int mem_index; /* select memory access functions */
97
    uint64_t flags; /* all execution flags */
98
    struct TranslationBlock *tb;
99
    int popl_esp_hack; /* for correct popl with esp base handling */
100
    int rip_offset; /* only used in x86_64, but left for simplicity */
101
    int cpuid_features;
102
    int cpuid_ext_features;
103
    int cpuid_ext2_features;
104
} DisasContext;
105

    
106
static void gen_eob(DisasContext *s);
107
static void gen_jmp(DisasContext *s, target_ulong eip);
108
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
109

    
110
/* i386 arith/logic operations */
111
enum {
112
    OP_ADDL,
113
    OP_ORL,
114
    OP_ADCL,
115
    OP_SBBL,
116
    OP_ANDL,
117
    OP_SUBL,
118
    OP_XORL,
119
    OP_CMPL,
120
};
121

    
122
/* i386 shift ops */
123
enum {
124
    OP_ROL,
125
    OP_ROR,
126
    OP_RCL,
127
    OP_RCR,
128
    OP_SHL,
129
    OP_SHR,
130
    OP_SHL1, /* undocumented */
131
    OP_SAR = 7,
132
};
133

    
134
/* operand size */
135
enum {
136
    OT_BYTE = 0,
137
    OT_WORD,
138
    OT_LONG,
139
    OT_QUAD,
140
};
141

    
142
enum {
143
    /* I386 int registers */
144
    OR_EAX,   /* MUST be even numbered */
145
    OR_ECX,
146
    OR_EDX,
147
    OR_EBX,
148
    OR_ESP,
149
    OR_EBP,
150
    OR_ESI,
151
    OR_EDI,
152

    
153
    OR_TMP0 = 16,    /* temporary operand register */
154
    OR_TMP1,
155
    OR_A0, /* temporary register used when doing address evaluation */
156
};
157

    
158
static inline void gen_op_movl_T0_0(void)
159
{
160
    tcg_gen_movi_tl(cpu_T[0], 0);
161
}
162

    
163
static inline void gen_op_movl_T0_im(int32_t val)
164
{
165
    tcg_gen_movi_tl(cpu_T[0], val);
166
}
167

    
168
static inline void gen_op_movl_T0_imu(uint32_t val)
169
{
170
    tcg_gen_movi_tl(cpu_T[0], val);
171
}
172

    
173
static inline void gen_op_movl_T1_im(int32_t val)
174
{
175
    tcg_gen_movi_tl(cpu_T[1], val);
176
}
177

    
178
static inline void gen_op_movl_T1_imu(uint32_t val)
179
{
180
    tcg_gen_movi_tl(cpu_T[1], val);
181
}
182

    
183
static inline void gen_op_movl_A0_im(uint32_t val)
184
{
185
    tcg_gen_movi_tl(cpu_A0, val);
186
}
187

    
188
#ifdef TARGET_X86_64
189
static inline void gen_op_movq_A0_im(int64_t val)
190
{
191
    tcg_gen_movi_tl(cpu_A0, val);
192
}
193
#endif
194

    
195
static inline void gen_movtl_T0_im(target_ulong val)
196
{
197
    tcg_gen_movi_tl(cpu_T[0], val);
198
}
199

    
200
static inline void gen_movtl_T1_im(target_ulong val)
201
{
202
    tcg_gen_movi_tl(cpu_T[1], val);
203
}
204

    
205
static inline void gen_op_andl_T0_ffff(void)
206
{
207
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
208
}
209

    
210
static inline void gen_op_andl_T0_im(uint32_t val)
211
{
212
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
213
}
214

    
215
static inline void gen_op_movl_T0_T1(void)
216
{
217
    tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
218
}
219

    
220
static inline void gen_op_andl_A0_ffff(void)
221
{
222
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
223
}
224

    
225
#ifdef TARGET_X86_64
226

    
227
#define NB_OP_SIZES 4
228

    
229
#define DEF_REGS(prefix, suffix) \
230
  prefix ## EAX ## suffix,\
231
  prefix ## ECX ## suffix,\
232
  prefix ## EDX ## suffix,\
233
  prefix ## EBX ## suffix,\
234
  prefix ## ESP ## suffix,\
235
  prefix ## EBP ## suffix,\
236
  prefix ## ESI ## suffix,\
237
  prefix ## EDI ## suffix,\
238
  prefix ## R8 ## suffix,\
239
  prefix ## R9 ## suffix,\
240
  prefix ## R10 ## suffix,\
241
  prefix ## R11 ## suffix,\
242
  prefix ## R12 ## suffix,\
243
  prefix ## R13 ## suffix,\
244
  prefix ## R14 ## suffix,\
245
  prefix ## R15 ## suffix,
246

    
247
#else /* !TARGET_X86_64 */
248

    
249
#define NB_OP_SIZES 3
250

    
251
#define DEF_REGS(prefix, suffix) \
252
  prefix ## EAX ## suffix,\
253
  prefix ## ECX ## suffix,\
254
  prefix ## EDX ## suffix,\
255
  prefix ## EBX ## suffix,\
256
  prefix ## ESP ## suffix,\
257
  prefix ## EBP ## suffix,\
258
  prefix ## ESI ## suffix,\
259
  prefix ## EDI ## suffix,
260

    
261
#endif /* !TARGET_X86_64 */
262

    
263
#if defined(WORDS_BIGENDIAN)
264
#define REG_B_OFFSET (sizeof(target_ulong) - 1)
265
#define REG_H_OFFSET (sizeof(target_ulong) - 2)
266
#define REG_W_OFFSET (sizeof(target_ulong) - 2)
267
#define REG_L_OFFSET (sizeof(target_ulong) - 4)
268
#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
269
#else
270
#define REG_B_OFFSET 0
271
#define REG_H_OFFSET 1
272
#define REG_W_OFFSET 0
273
#define REG_L_OFFSET 0
274
#define REG_LH_OFFSET 4
275
#endif
276

    
277
static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
278
{
279
    switch(ot) {
280
    case OT_BYTE:
281
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
282
            tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
283
        } else {
284
            tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
285
        }
286
        break;
287
    case OT_WORD:
288
        tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
289
        break;
290
#ifdef TARGET_X86_64
291
    case OT_LONG:
292
        tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
293
        /* high part of register set to zero */
294
        tcg_gen_movi_tl(cpu_tmp0, 0);
295
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
296
        break;
297
    default:
298
    case OT_QUAD:
299
        tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
300
        break;
301
#else
302
    default:
303
    case OT_LONG:
304
        tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
305
        break;
306
#endif
307
    }
308
}
309

    
310
static inline void gen_op_mov_reg_T0(int ot, int reg)
311
{
312
    gen_op_mov_reg_TN(ot, 0, reg);
313
}
314

    
315
static inline void gen_op_mov_reg_T1(int ot, int reg)
316
{
317
    gen_op_mov_reg_TN(ot, 1, reg);
318
}
319

    
320
static inline void gen_op_mov_reg_A0(int size, int reg)
321
{
322
    switch(size) {
323
    case 0:
324
        tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
325
        break;
326
#ifdef TARGET_X86_64
327
    case 1:
328
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
329
        /* high part of register set to zero */
330
        tcg_gen_movi_tl(cpu_tmp0, 0);
331
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
332
        break;
333
    default:
334
    case 2:
335
        tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
336
        break;
337
#else
338
    default:
339
    case 1:
340
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
341
        break;
342
#endif
343
    }
344
}
345

    
346
static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
347
{
348
    switch(ot) {
349
    case OT_BYTE:
350
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
351
            goto std_case;
352
        } else {
353
            tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
354
        }
355
        break;
356
    default:
357
    std_case:
358
        tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
359
        break;
360
    }
361
}
362

    
363
static inline void gen_op_movl_A0_reg(int reg)
364
{
365
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
366
}
367

    
368
static inline void gen_op_addl_A0_im(int32_t val)
369
{
370
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
371
#ifdef TARGET_X86_64
372
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
373
#endif
374
}
375

    
376
#ifdef TARGET_X86_64
377
static inline void gen_op_addq_A0_im(int64_t val)
378
{
379
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
380
}
381
#endif
382
    
383
static void gen_add_A0_im(DisasContext *s, int val)
384
{
385
#ifdef TARGET_X86_64
386
    if (CODE64(s))
387
        gen_op_addq_A0_im(val);
388
    else
389
#endif
390
        gen_op_addl_A0_im(val);
391
}
392

    
393
static inline void gen_op_addl_T0_T1(void)
394
{
395
    tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
396
}
397

    
398
static inline void gen_op_jmp_T0(void)
399
{
400
    tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
401
}
402

    
403
static inline void gen_op_addw_ESP_im(int32_t val)
404
{
405
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
406
    tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
407
    tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]) + REG_W_OFFSET);
408
}
409

    
410
static inline void gen_op_addl_ESP_im(int32_t val)
411
{
412
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
413
    tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
414
#ifdef TARGET_X86_64
415
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
416
#endif
417
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
418
}
419

    
420
#ifdef TARGET_X86_64
421
static inline void gen_op_addq_ESP_im(int32_t val)
422
{
423
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
424
    tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
425
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
426
}
427
#endif
428

    
429
static inline void gen_op_set_cc_op(int32_t val)
430
{
431
    tcg_gen_movi_tl(cpu_tmp0, val);
432
    tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, cc_op));
433
}
434

    
435
static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
436
{
437
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
438
    if (shift != 0) 
439
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
440
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
441
#ifdef TARGET_X86_64
442
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
443
#endif
444
}
445

    
446
static inline void gen_op_movl_A0_seg(int reg)
447
{
448
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
449
}
450

    
451
static inline void gen_op_addl_A0_seg(int reg)
452
{
453
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
454
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
455
#ifdef TARGET_X86_64
456
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
457
#endif
458
}
459

    
460
#ifdef TARGET_X86_64
461
static inline void gen_op_movq_A0_seg(int reg)
462
{
463
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
464
}
465

    
466
static inline void gen_op_addq_A0_seg(int reg)
467
{
468
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
469
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
470
}
471

    
472
static inline void gen_op_movq_A0_reg(int reg)
473
{
474
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
475
}
476

    
477
static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
478
{
479
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
480
    if (shift != 0) 
481
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
482
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
483
}
484
#endif
485

    
486
static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
487
    [0] = {
488
        DEF_REGS(gen_op_cmovw_, _T1_T0)
489
    },
490
    [1] = {
491
        DEF_REGS(gen_op_cmovl_, _T1_T0)
492
    },
493
#ifdef TARGET_X86_64
494
    [2] = {
495
        DEF_REGS(gen_op_cmovq_, _T1_T0)
496
    },
497
#endif
498
};
499

    
500
#define DEF_ARITHC(SUFFIX)\
501
    {\
502
        gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
503
        gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
504
    },\
505
    {\
506
        gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
507
        gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
508
    },\
509
    {\
510
        gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
511
        gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
512
    },\
513
    {\
514
        X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
515
        X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
516
    },
517

    
518
static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
519
    DEF_ARITHC( )
520
};
521

    
522
static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
523
    DEF_ARITHC(_raw)
524
#ifndef CONFIG_USER_ONLY
525
    DEF_ARITHC(_kernel)
526
    DEF_ARITHC(_user)
527
#endif
528
};
529

    
530
static const int cc_op_arithb[8] = {
531
    CC_OP_ADDB,
532
    CC_OP_LOGICB,
533
    CC_OP_ADDB,
534
    CC_OP_SUBB,
535
    CC_OP_LOGICB,
536
    CC_OP_SUBB,
537
    CC_OP_LOGICB,
538
    CC_OP_SUBB,
539
};
540

    
541
#define DEF_CMPXCHG(SUFFIX)\
542
    gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
543
    gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
544
    gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
545
    X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
546

    
547
static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
548
    DEF_CMPXCHG( )
549
};
550

    
551
static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
552
    DEF_CMPXCHG(_raw)
553
#ifndef CONFIG_USER_ONLY
554
    DEF_CMPXCHG(_kernel)
555
    DEF_CMPXCHG(_user)
556
#endif
557
};
558

    
559
#define DEF_SHIFT(SUFFIX)\
560
    {\
561
        gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
562
        gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
563
        gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
564
        gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
565
        gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
566
        gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
567
        gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
568
        gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
569
    },\
570
    {\
571
        gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
572
        gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
573
        gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
574
        gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
575
        gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
576
        gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
577
        gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
578
        gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
579
    },\
580
    {\
581
        gen_op_roll ## SUFFIX ## _T0_T1_cc,\
582
        gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
583
        gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
584
        gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
585
        gen_op_shll ## SUFFIX ## _T0_T1_cc,\
586
        gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
587
        gen_op_shll ## SUFFIX ## _T0_T1_cc,\
588
        gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
589
    },\
590
    {\
591
        X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
592
        X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
593
        X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
594
        X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
595
        X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
596
        X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
597
        X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
598
        X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
599
    },
600

    
601
static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
602
    DEF_SHIFT( )
603
};
604

    
605
static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
606
    DEF_SHIFT(_raw)
607
#ifndef CONFIG_USER_ONLY
608
    DEF_SHIFT(_kernel)
609
    DEF_SHIFT(_user)
610
#endif
611
};
612

    
613
#define DEF_SHIFTD(SUFFIX, op)\
614
    {\
615
        NULL,\
616
        NULL,\
617
    },\
618
    {\
619
        gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
620
        gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
621
     },\
622
    {\
623
        gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
624
        gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
625
    },\
626
    {\
627
X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
628
           gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
629
    },
630

    
631
static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
632
    DEF_SHIFTD(, im)
633
};
634

    
635
static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
636
    DEF_SHIFTD(, ECX)
637
};
638

    
639
static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
640
    DEF_SHIFTD(_raw, im)
641
#ifndef CONFIG_USER_ONLY
642
    DEF_SHIFTD(_kernel, im)
643
    DEF_SHIFTD(_user, im)
644
#endif
645
};
646

    
647
static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
648
    DEF_SHIFTD(_raw, ECX)
649
#ifndef CONFIG_USER_ONLY
650
    DEF_SHIFTD(_kernel, ECX)
651
    DEF_SHIFTD(_user, ECX)
652
#endif
653
};
654

    
655
static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
656
    [0] = {
657
        gen_op_btw_T0_T1_cc,
658
        gen_op_btsw_T0_T1_cc,
659
        gen_op_btrw_T0_T1_cc,
660
        gen_op_btcw_T0_T1_cc,
661
    },
662
    [1] = {
663
        gen_op_btl_T0_T1_cc,
664
        gen_op_btsl_T0_T1_cc,
665
        gen_op_btrl_T0_T1_cc,
666
        gen_op_btcl_T0_T1_cc,
667
    },
668
#ifdef TARGET_X86_64
669
    [2] = {
670
        gen_op_btq_T0_T1_cc,
671
        gen_op_btsq_T0_T1_cc,
672
        gen_op_btrq_T0_T1_cc,
673
        gen_op_btcq_T0_T1_cc,
674
    },
675
#endif
676
};
677

    
678
static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
679
    gen_op_add_bitw_A0_T1,
680
    gen_op_add_bitl_A0_T1,
681
    X86_64_ONLY(gen_op_add_bitq_A0_T1),
682
};
683

    
684
static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
685
    [0] = {
686
        gen_op_bsfw_T0_cc,
687
        gen_op_bsrw_T0_cc,
688
    },
689
    [1] = {
690
        gen_op_bsfl_T0_cc,
691
        gen_op_bsrl_T0_cc,
692
    },
693
#ifdef TARGET_X86_64
694
    [2] = {
695
        gen_op_bsfq_T0_cc,
696
        gen_op_bsrq_T0_cc,
697
    },
698
#endif
699
};
700

    
701
static inline void gen_op_lds_T0_A0(int idx)
702
{
703
    int mem_index = (idx >> 2) - 1;
704
    switch(idx & 3) {
705
    case 0:
706
        tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
707
        break;
708
    case 1:
709
        tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
710
        break;
711
    default:
712
    case 2:
713
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
714
        break;
715
    }
716
}
717

    
718
/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
719
static inline void gen_op_ld_T0_A0(int idx)
720
{
721
    int mem_index = (idx >> 2) - 1;
722
    switch(idx & 3) {
723
    case 0:
724
        tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
725
        break;
726
    case 1:
727
        tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
728
        break;
729
    case 2:
730
        tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
731
        break;
732
    default:
733
    case 3:
734
        tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
735
        break;
736
    }
737
}
738

    
739
static inline void gen_op_ldu_T0_A0(int idx)
740
{
741
    gen_op_ld_T0_A0(idx);
742
}
743

    
744
static inline void gen_op_ld_T1_A0(int idx)
745
{
746
    int mem_index = (idx >> 2) - 1;
747
    switch(idx & 3) {
748
    case 0:
749
        tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
750
        break;
751
    case 1:
752
        tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
753
        break;
754
    case 2:
755
        tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
756
        break;
757
    default:
758
    case 3:
759
        tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
760
        break;
761
    }
762
}
763

    
764
static inline void gen_op_st_T0_A0(int idx)
765
{
766
    int mem_index = (idx >> 2) - 1;
767
    switch(idx & 3) {
768
    case 0:
769
        tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
770
        break;
771
    case 1:
772
        tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
773
        break;
774
    case 2:
775
        tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
776
        break;
777
    default:
778
    case 3:
779
        tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
780
        break;
781
    }
782
}
783

    
784
static inline void gen_op_st_T1_A0(int idx)
785
{
786
    int mem_index = (idx >> 2) - 1;
787
    switch(idx & 3) {
788
    case 0:
789
        tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
790
        break;
791
    case 1:
792
        tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
793
        break;
794
    case 2:
795
        tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
796
        break;
797
    default:
798
    case 3:
799
        tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
800
        break;
801
    }
802
}
803

    
804
static inline void gen_jmp_im(target_ulong pc)
805
{
806
    tcg_gen_movi_tl(cpu_tmp0, pc);
807
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
808
}
809

    
810
static inline void gen_string_movl_A0_ESI(DisasContext *s)
811
{
812
    int override;
813

    
814
    override = s->override;
815
#ifdef TARGET_X86_64
816
    if (s->aflag == 2) {
817
        if (override >= 0) {
818
            gen_op_movq_A0_seg(override);
819
            gen_op_addq_A0_reg_sN(0, R_ESI);
820
        } else {
821
            gen_op_movq_A0_reg(R_ESI);
822
        }
823
    } else
824
#endif
825
    if (s->aflag) {
826
        /* 32 bit address */
827
        if (s->addseg && override < 0)
828
            override = R_DS;
829
        if (override >= 0) {
830
            gen_op_movl_A0_seg(override);
831
            gen_op_addl_A0_reg_sN(0, R_ESI);
832
        } else {
833
            gen_op_movl_A0_reg(R_ESI);
834
        }
835
    } else {
836
        /* 16 address, always override */
837
        if (override < 0)
838
            override = R_DS;
839
        gen_op_movl_A0_reg(R_ESI);
840
        gen_op_andl_A0_ffff();
841
        gen_op_addl_A0_seg(override);
842
    }
843
}
844

    
845
static inline void gen_string_movl_A0_EDI(DisasContext *s)
846
{
847
#ifdef TARGET_X86_64
848
    if (s->aflag == 2) {
849
        gen_op_movq_A0_reg(R_EDI);
850
    } else
851
#endif
852
    if (s->aflag) {
853
        if (s->addseg) {
854
            gen_op_movl_A0_seg(R_ES);
855
            gen_op_addl_A0_reg_sN(0, R_EDI);
856
        } else {
857
            gen_op_movl_A0_reg(R_EDI);
858
        }
859
    } else {
860
        gen_op_movl_A0_reg(R_EDI);
861
        gen_op_andl_A0_ffff();
862
        gen_op_addl_A0_seg(R_ES);
863
    }
864
}
865

    
866
static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
867
    gen_op_movl_T0_Dshiftb,
868
    gen_op_movl_T0_Dshiftw,
869
    gen_op_movl_T0_Dshiftl,
870
    X86_64_ONLY(gen_op_movl_T0_Dshiftq),
871
};
872

    
873
static GenOpFunc1 *gen_op_jnz_ecx[3] = {
874
    gen_op_jnz_ecxw,
875
    gen_op_jnz_ecxl,
876
    X86_64_ONLY(gen_op_jnz_ecxq),
877
};
878

    
879
static GenOpFunc1 *gen_op_jz_ecx[3] = {
880
    gen_op_jz_ecxw,
881
    gen_op_jz_ecxl,
882
    X86_64_ONLY(gen_op_jz_ecxq),
883
};
884

    
885
static GenOpFunc *gen_op_dec_ECX[3] = {
886
    gen_op_decw_ECX,
887
    gen_op_decl_ECX,
888
    X86_64_ONLY(gen_op_decq_ECX),
889
};
890

    
891
static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
892
    {
893
        gen_op_jnz_subb,
894
        gen_op_jnz_subw,
895
        gen_op_jnz_subl,
896
        X86_64_ONLY(gen_op_jnz_subq),
897
    },
898
    {
899
        gen_op_jz_subb,
900
        gen_op_jz_subw,
901
        gen_op_jz_subl,
902
        X86_64_ONLY(gen_op_jz_subq),
903
    },
904
};
905

    
906
static GenOpFunc *gen_op_in_DX_T0[3] = {
907
    gen_op_inb_DX_T0,
908
    gen_op_inw_DX_T0,
909
    gen_op_inl_DX_T0,
910
};
911

    
912
static GenOpFunc *gen_op_out_DX_T0[3] = {
913
    gen_op_outb_DX_T0,
914
    gen_op_outw_DX_T0,
915
    gen_op_outl_DX_T0,
916
};
917

    
918
static GenOpFunc *gen_op_in[3] = {
919
    gen_op_inb_T0_T1,
920
    gen_op_inw_T0_T1,
921
    gen_op_inl_T0_T1,
922
};
923

    
924
static GenOpFunc *gen_op_out[3] = {
925
    gen_op_outb_T0_T1,
926
    gen_op_outw_T0_T1,
927
    gen_op_outl_T0_T1,
928
};
929

    
930
static GenOpFunc *gen_check_io_T0[3] = {
931
    gen_op_check_iob_T0,
932
    gen_op_check_iow_T0,
933
    gen_op_check_iol_T0,
934
};
935

    
936
static GenOpFunc *gen_check_io_DX[3] = {
937
    gen_op_check_iob_DX,
938
    gen_op_check_iow_DX,
939
    gen_op_check_iol_DX,
940
};
941

    
942
static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
943
{
944
    if (s->pe && (s->cpl > s->iopl || s->vm86)) {
945
        if (s->cc_op != CC_OP_DYNAMIC)
946
            gen_op_set_cc_op(s->cc_op);
947
        gen_jmp_im(cur_eip);
948
        if (use_dx)
949
            gen_check_io_DX[ot]();
950
        else
951
            gen_check_io_T0[ot]();
952
    }
953
}
954

    
955
static inline void gen_movs(DisasContext *s, int ot)
956
{
957
    gen_string_movl_A0_ESI(s);
958
    gen_op_ld_T0_A0(ot + s->mem_index);
959
    gen_string_movl_A0_EDI(s);
960
    gen_op_st_T0_A0(ot + s->mem_index);
961
    gen_op_movl_T0_Dshift[ot]();
962
#ifdef TARGET_X86_64
963
    if (s->aflag == 2) {
964
        gen_op_addq_ESI_T0();
965
        gen_op_addq_EDI_T0();
966
    } else
967
#endif
968
    if (s->aflag) {
969
        gen_op_addl_ESI_T0();
970
        gen_op_addl_EDI_T0();
971
    } else {
972
        gen_op_addw_ESI_T0();
973
        gen_op_addw_EDI_T0();
974
    }
975
}
976

    
977
static inline void gen_update_cc_op(DisasContext *s)
978
{
979
    if (s->cc_op != CC_OP_DYNAMIC) {
980
        gen_op_set_cc_op(s->cc_op);
981
        s->cc_op = CC_OP_DYNAMIC;
982
    }
983
}
984

    
985
/* XXX: does not work with gdbstub "ice" single step - not a
986
   serious problem */
987
static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
988
{
989
    int l1, l2;
990

    
991
    l1 = gen_new_label();
992
    l2 = gen_new_label();
993
    gen_op_jnz_ecx[s->aflag](l1);
994
    gen_set_label(l2);
995
    gen_jmp_tb(s, next_eip, 1);
996
    gen_set_label(l1);
997
    return l2;
998
}
999

    
1000
static inline void gen_stos(DisasContext *s, int ot)
1001
{
1002
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1003
    gen_string_movl_A0_EDI(s);
1004
    gen_op_st_T0_A0(ot + s->mem_index);
1005
    gen_op_movl_T0_Dshift[ot]();
1006
#ifdef TARGET_X86_64
1007
    if (s->aflag == 2) {
1008
        gen_op_addq_EDI_T0();
1009
    } else
1010
#endif
1011
    if (s->aflag) {
1012
        gen_op_addl_EDI_T0();
1013
    } else {
1014
        gen_op_addw_EDI_T0();
1015
    }
1016
}
1017

    
1018
static inline void gen_lods(DisasContext *s, int ot)
1019
{
1020
    gen_string_movl_A0_ESI(s);
1021
    gen_op_ld_T0_A0(ot + s->mem_index);
1022
    gen_op_mov_reg_T0(ot, R_EAX);
1023
    gen_op_movl_T0_Dshift[ot]();
1024
#ifdef TARGET_X86_64
1025
    if (s->aflag == 2) {
1026
        gen_op_addq_ESI_T0();
1027
    } else
1028
#endif
1029
    if (s->aflag) {
1030
        gen_op_addl_ESI_T0();
1031
    } else {
1032
        gen_op_addw_ESI_T0();
1033
    }
1034
}
1035

    
1036
static inline void gen_scas(DisasContext *s, int ot)
1037
{
1038
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1039
    gen_string_movl_A0_EDI(s);
1040
    gen_op_ld_T1_A0(ot + s->mem_index);
1041
    gen_op_cmpl_T0_T1_cc();
1042
    gen_op_movl_T0_Dshift[ot]();
1043
#ifdef TARGET_X86_64
1044
    if (s->aflag == 2) {
1045
        gen_op_addq_EDI_T0();
1046
    } else
1047
#endif
1048
    if (s->aflag) {
1049
        gen_op_addl_EDI_T0();
1050
    } else {
1051
        gen_op_addw_EDI_T0();
1052
    }
1053
}
1054

    
1055
static inline void gen_cmps(DisasContext *s, int ot)
1056
{
1057
    gen_string_movl_A0_ESI(s);
1058
    gen_op_ld_T0_A0(ot + s->mem_index);
1059
    gen_string_movl_A0_EDI(s);
1060
    gen_op_ld_T1_A0(ot + s->mem_index);
1061
    gen_op_cmpl_T0_T1_cc();
1062
    gen_op_movl_T0_Dshift[ot]();
1063
#ifdef TARGET_X86_64
1064
    if (s->aflag == 2) {
1065
        gen_op_addq_ESI_T0();
1066
        gen_op_addq_EDI_T0();
1067
    } else
1068
#endif
1069
    if (s->aflag) {
1070
        gen_op_addl_ESI_T0();
1071
        gen_op_addl_EDI_T0();
1072
    } else {
1073
        gen_op_addw_ESI_T0();
1074
        gen_op_addw_EDI_T0();
1075
    }
1076
}
1077

    
1078
static inline void gen_ins(DisasContext *s, int ot)
1079
{
1080
    gen_string_movl_A0_EDI(s);
1081
    gen_op_movl_T0_0();
1082
    gen_op_st_T0_A0(ot + s->mem_index);
1083
    gen_op_in_DX_T0[ot]();
1084
    gen_op_st_T0_A0(ot + s->mem_index);
1085
    gen_op_movl_T0_Dshift[ot]();
1086
#ifdef TARGET_X86_64
1087
    if (s->aflag == 2) {
1088
        gen_op_addq_EDI_T0();
1089
    } else
1090
#endif
1091
    if (s->aflag) {
1092
        gen_op_addl_EDI_T0();
1093
    } else {
1094
        gen_op_addw_EDI_T0();
1095
    }
1096
}
1097

    
1098
static inline void gen_outs(DisasContext *s, int ot)
1099
{
1100
    gen_string_movl_A0_ESI(s);
1101
    gen_op_ld_T0_A0(ot + s->mem_index);
1102
    gen_op_out_DX_T0[ot]();
1103
    gen_op_movl_T0_Dshift[ot]();
1104
#ifdef TARGET_X86_64
1105
    if (s->aflag == 2) {
1106
        gen_op_addq_ESI_T0();
1107
    } else
1108
#endif
1109
    if (s->aflag) {
1110
        gen_op_addl_ESI_T0();
1111
    } else {
1112
        gen_op_addw_ESI_T0();
1113
    }
1114
}
1115

    
1116
/* same method as Valgrind : we generate jumps to current or next
1117
   instruction */
1118
#define GEN_REPZ(op)                                                          \
1119
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1120
                                 target_ulong cur_eip, target_ulong next_eip) \
1121
{                                                                             \
1122
    int l2;\
1123
    gen_update_cc_op(s);                                                      \
1124
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1125
    gen_ ## op(s, ot);                                                        \
1126
    gen_op_dec_ECX[s->aflag]();                                               \
1127
    /* a loop would cause two single step exceptions if ECX = 1               \
1128
       before rep string_insn */                                              \
1129
    if (!s->jmp_opt)                                                          \
1130
        gen_op_jz_ecx[s->aflag](l2);                                          \
1131
    gen_jmp(s, cur_eip);                                                      \
1132
}
1133

    
1134
#define GEN_REPZ2(op)                                                         \
1135
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1136
                                   target_ulong cur_eip,                      \
1137
                                   target_ulong next_eip,                     \
1138
                                   int nz)                                    \
1139
{                                                                             \
1140
    int l2;\
1141
    gen_update_cc_op(s);                                                      \
1142
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1143
    gen_ ## op(s, ot);                                                        \
1144
    gen_op_dec_ECX[s->aflag]();                                               \
1145
    gen_op_set_cc_op(CC_OP_SUBB + ot);                                        \
1146
    gen_op_string_jnz_sub[nz][ot](l2);\
1147
    if (!s->jmp_opt)                                                          \
1148
        gen_op_jz_ecx[s->aflag](l2);                                          \
1149
    gen_jmp(s, cur_eip);                                                      \
1150
}
1151

    
1152
GEN_REPZ(movs)
1153
GEN_REPZ(stos)
1154
GEN_REPZ(lods)
1155
GEN_REPZ(ins)
1156
GEN_REPZ(outs)
1157
GEN_REPZ2(scas)
1158
GEN_REPZ2(cmps)
1159

    
1160
enum {
1161
    JCC_O,
1162
    JCC_B,
1163
    JCC_Z,
1164
    JCC_BE,
1165
    JCC_S,
1166
    JCC_P,
1167
    JCC_L,
1168
    JCC_LE,
1169
};
1170

    
1171
static GenOpFunc1 *gen_jcc_sub[4][8] = {
1172
    [OT_BYTE] = {
1173
        NULL,
1174
        gen_op_jb_subb,
1175
        gen_op_jz_subb,
1176
        gen_op_jbe_subb,
1177
        gen_op_js_subb,
1178
        NULL,
1179
        gen_op_jl_subb,
1180
        gen_op_jle_subb,
1181
    },
1182
    [OT_WORD] = {
1183
        NULL,
1184
        gen_op_jb_subw,
1185
        gen_op_jz_subw,
1186
        gen_op_jbe_subw,
1187
        gen_op_js_subw,
1188
        NULL,
1189
        gen_op_jl_subw,
1190
        gen_op_jle_subw,
1191
    },
1192
    [OT_LONG] = {
1193
        NULL,
1194
        gen_op_jb_subl,
1195
        gen_op_jz_subl,
1196
        gen_op_jbe_subl,
1197
        gen_op_js_subl,
1198
        NULL,
1199
        gen_op_jl_subl,
1200
        gen_op_jle_subl,
1201
    },
1202
#ifdef TARGET_X86_64
1203
    [OT_QUAD] = {
1204
        NULL,
1205
        BUGGY_64(gen_op_jb_subq),
1206
        gen_op_jz_subq,
1207
        BUGGY_64(gen_op_jbe_subq),
1208
        gen_op_js_subq,
1209
        NULL,
1210
        BUGGY_64(gen_op_jl_subq),
1211
        BUGGY_64(gen_op_jle_subq),
1212
    },
1213
#endif
1214
};
1215
static GenOpFunc1 *gen_op_loop[3][4] = {
1216
    [0] = {
1217
        gen_op_loopnzw,
1218
        gen_op_loopzw,
1219
        gen_op_jnz_ecxw,
1220
    },
1221
    [1] = {
1222
        gen_op_loopnzl,
1223
        gen_op_loopzl,
1224
        gen_op_jnz_ecxl,
1225
    },
1226
#ifdef TARGET_X86_64
1227
    [2] = {
1228
        gen_op_loopnzq,
1229
        gen_op_loopzq,
1230
        gen_op_jnz_ecxq,
1231
    },
1232
#endif
1233
};
1234

    
1235
static GenOpFunc *gen_setcc_slow[8] = {
1236
    gen_op_seto_T0_cc,
1237
    gen_op_setb_T0_cc,
1238
    gen_op_setz_T0_cc,
1239
    gen_op_setbe_T0_cc,
1240
    gen_op_sets_T0_cc,
1241
    gen_op_setp_T0_cc,
1242
    gen_op_setl_T0_cc,
1243
    gen_op_setle_T0_cc,
1244
};
1245

    
1246
static GenOpFunc *gen_setcc_sub[4][8] = {
1247
    [OT_BYTE] = {
1248
        NULL,
1249
        gen_op_setb_T0_subb,
1250
        gen_op_setz_T0_subb,
1251
        gen_op_setbe_T0_subb,
1252
        gen_op_sets_T0_subb,
1253
        NULL,
1254
        gen_op_setl_T0_subb,
1255
        gen_op_setle_T0_subb,
1256
    },
1257
    [OT_WORD] = {
1258
        NULL,
1259
        gen_op_setb_T0_subw,
1260
        gen_op_setz_T0_subw,
1261
        gen_op_setbe_T0_subw,
1262
        gen_op_sets_T0_subw,
1263
        NULL,
1264
        gen_op_setl_T0_subw,
1265
        gen_op_setle_T0_subw,
1266
    },
1267
    [OT_LONG] = {
1268
        NULL,
1269
        gen_op_setb_T0_subl,
1270
        gen_op_setz_T0_subl,
1271
        gen_op_setbe_T0_subl,
1272
        gen_op_sets_T0_subl,
1273
        NULL,
1274
        gen_op_setl_T0_subl,
1275
        gen_op_setle_T0_subl,
1276
    },
1277
#ifdef TARGET_X86_64
1278
    [OT_QUAD] = {
1279
        NULL,
1280
        gen_op_setb_T0_subq,
1281
        gen_op_setz_T0_subq,
1282
        gen_op_setbe_T0_subq,
1283
        gen_op_sets_T0_subq,
1284
        NULL,
1285
        gen_op_setl_T0_subq,
1286
        gen_op_setle_T0_subq,
1287
    },
1288
#endif
1289
};
1290

    
1291
static void *helper_fp_arith_ST0_FT0[8] = {
1292
    helper_fadd_ST0_FT0,
1293
    helper_fmul_ST0_FT0,
1294
    helper_fcom_ST0_FT0,
1295
    helper_fcom_ST0_FT0,
1296
    helper_fsub_ST0_FT0,
1297
    helper_fsubr_ST0_FT0,
1298
    helper_fdiv_ST0_FT0,
1299
    helper_fdivr_ST0_FT0,
1300
};
1301

    
1302
/* NOTE the exception in "r" op ordering */
1303
static void *helper_fp_arith_STN_ST0[8] = {
1304
    helper_fadd_STN_ST0,
1305
    helper_fmul_STN_ST0,
1306
    NULL,
1307
    NULL,
1308
    helper_fsubr_STN_ST0,
1309
    helper_fsub_STN_ST0,
1310
    helper_fdivr_STN_ST0,
1311
    helper_fdiv_STN_ST0,
1312
};
1313

    
1314
/* if d == OR_TMP0, it means memory operand (address in A0) */
1315
static void gen_op(DisasContext *s1, int op, int ot, int d)
1316
{
1317
    GenOpFunc *gen_update_cc;
1318

    
1319
    if (d != OR_TMP0) {
1320
        gen_op_mov_TN_reg(ot, 0, d);
1321
    } else {
1322
        gen_op_ld_T0_A0(ot + s1->mem_index);
1323
    }
1324
    switch(op) {
1325
    case OP_ADCL:
1326
    case OP_SBBL:
1327
        if (s1->cc_op != CC_OP_DYNAMIC)
1328
            gen_op_set_cc_op(s1->cc_op);
1329
        if (d != OR_TMP0) {
1330
            gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1331
            gen_op_mov_reg_T0(ot, d);
1332
        } else {
1333
            gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1334
        }
1335
        s1->cc_op = CC_OP_DYNAMIC;
1336
        goto the_end;
1337
    case OP_ADDL:
1338
        gen_op_addl_T0_T1();
1339
        s1->cc_op = CC_OP_ADDB + ot;
1340
        gen_update_cc = gen_op_update2_cc;
1341
        break;
1342
    case OP_SUBL:
1343
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1344
        s1->cc_op = CC_OP_SUBB + ot;
1345
        gen_update_cc = gen_op_update2_cc;
1346
        break;
1347
    default:
1348
    case OP_ANDL:
1349
        tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1350
        s1->cc_op = CC_OP_LOGICB + ot;
1351
        gen_update_cc = gen_op_update1_cc;
1352
        break;
1353
    case OP_ORL:
1354
        tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1355
        s1->cc_op = CC_OP_LOGICB + ot;
1356
        gen_update_cc = gen_op_update1_cc;
1357
        break;
1358
    case OP_XORL:
1359
        tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1360
        s1->cc_op = CC_OP_LOGICB + ot;
1361
        gen_update_cc = gen_op_update1_cc;
1362
        break;
1363
    case OP_CMPL:
1364
        gen_op_cmpl_T0_T1_cc();
1365
        s1->cc_op = CC_OP_SUBB + ot;
1366
        gen_update_cc = NULL;
1367
        break;
1368
    }
1369
    if (op != OP_CMPL) {
1370
        if (d != OR_TMP0)
1371
            gen_op_mov_reg_T0(ot, d);
1372
        else
1373
            gen_op_st_T0_A0(ot + s1->mem_index);
1374
    }
1375
    /* the flags update must happen after the memory write (precise
1376
       exception support) */
1377
    if (gen_update_cc)
1378
        gen_update_cc();
1379
 the_end: ;
1380
}
1381

    
1382
/* if d == OR_TMP0, it means memory operand (address in A0) */
1383
static void gen_inc(DisasContext *s1, int ot, int d, int c)
1384
{
1385
    if (d != OR_TMP0)
1386
        gen_op_mov_TN_reg(ot, 0, d);
1387
    else
1388
        gen_op_ld_T0_A0(ot + s1->mem_index);
1389
    if (s1->cc_op != CC_OP_DYNAMIC)
1390
        gen_op_set_cc_op(s1->cc_op);
1391
    if (c > 0) {
1392
        gen_op_incl_T0();
1393
        s1->cc_op = CC_OP_INCB + ot;
1394
    } else {
1395
        gen_op_decl_T0();
1396
        s1->cc_op = CC_OP_DECB + ot;
1397
    }
1398
    if (d != OR_TMP0)
1399
        gen_op_mov_reg_T0(ot, d);
1400
    else
1401
        gen_op_st_T0_A0(ot + s1->mem_index);
1402
    gen_op_update_inc_cc();
1403
}
1404

    
1405
static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1406
{
1407
    if (d != OR_TMP0)
1408
        gen_op_mov_TN_reg(ot, 0, d);
1409
    else
1410
        gen_op_ld_T0_A0(ot + s1->mem_index);
1411
    if (s != OR_TMP1)
1412
        gen_op_mov_TN_reg(ot, 1, s);
1413
    /* for zero counts, flags are not updated, so must do it dynamically */
1414
    if (s1->cc_op != CC_OP_DYNAMIC)
1415
        gen_op_set_cc_op(s1->cc_op);
1416

    
1417
    if (d != OR_TMP0)
1418
        gen_op_shift_T0_T1_cc[ot][op]();
1419
    else
1420
        gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1421
    if (d != OR_TMP0)
1422
        gen_op_mov_reg_T0(ot, d);
1423
    s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1424
}
1425

    
1426
static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1427
{
1428
    /* currently not optimized */
1429
    gen_op_movl_T1_im(c);
1430
    gen_shift(s1, op, ot, d, OR_TMP1);
1431
}
1432

    
1433
static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1434
{
1435
    target_long disp;
1436
    int havesib;
1437
    int base;
1438
    int index;
1439
    int scale;
1440
    int opreg;
1441
    int mod, rm, code, override, must_add_seg;
1442

    
1443
    override = s->override;
1444
    must_add_seg = s->addseg;
1445
    if (override >= 0)
1446
        must_add_seg = 1;
1447
    mod = (modrm >> 6) & 3;
1448
    rm = modrm & 7;
1449

    
1450
    if (s->aflag) {
1451

    
1452
        havesib = 0;
1453
        base = rm;
1454
        index = 0;
1455
        scale = 0;
1456

    
1457
        if (base == 4) {
1458
            havesib = 1;
1459
            code = ldub_code(s->pc++);
1460
            scale = (code >> 6) & 3;
1461
            index = ((code >> 3) & 7) | REX_X(s);
1462
            base = (code & 7);
1463
        }
1464
        base |= REX_B(s);
1465

    
1466
        switch (mod) {
1467
        case 0:
1468
            if ((base & 7) == 5) {
1469
                base = -1;
1470
                disp = (int32_t)ldl_code(s->pc);
1471
                s->pc += 4;
1472
                if (CODE64(s) && !havesib) {
1473
                    disp += s->pc + s->rip_offset;
1474
                }
1475
            } else {
1476
                disp = 0;
1477
            }
1478
            break;
1479
        case 1:
1480
            disp = (int8_t)ldub_code(s->pc++);
1481
            break;
1482
        default:
1483
        case 2:
1484
            disp = ldl_code(s->pc);
1485
            s->pc += 4;
1486
            break;
1487
        }
1488

    
1489
        if (base >= 0) {
1490
            /* for correct popl handling with esp */
1491
            if (base == 4 && s->popl_esp_hack)
1492
                disp += s->popl_esp_hack;
1493
#ifdef TARGET_X86_64
1494
            if (s->aflag == 2) {
1495
                gen_op_movq_A0_reg(base);
1496
                if (disp != 0) {
1497
                    gen_op_addq_A0_im(disp);
1498
                }
1499
            } else
1500
#endif
1501
            {
1502
                gen_op_movl_A0_reg(base);
1503
                if (disp != 0)
1504
                    gen_op_addl_A0_im(disp);
1505
            }
1506
        } else {
1507
#ifdef TARGET_X86_64
1508
            if (s->aflag == 2) {
1509
                gen_op_movq_A0_im(disp);
1510
            } else
1511
#endif
1512
            {
1513
                gen_op_movl_A0_im(disp);
1514
            }
1515
        }
1516
        /* XXX: index == 4 is always invalid */
1517
        if (havesib && (index != 4 || scale != 0)) {
1518
#ifdef TARGET_X86_64
1519
            if (s->aflag == 2) {
1520
                gen_op_addq_A0_reg_sN(scale, index);
1521
            } else
1522
#endif
1523
            {
1524
                gen_op_addl_A0_reg_sN(scale, index);
1525
            }
1526
        }
1527
        if (must_add_seg) {
1528
            if (override < 0) {
1529
                if (base == R_EBP || base == R_ESP)
1530
                    override = R_SS;
1531
                else
1532
                    override = R_DS;
1533
            }
1534
#ifdef TARGET_X86_64
1535
            if (s->aflag == 2) {
1536
                gen_op_addq_A0_seg(override);
1537
            } else
1538
#endif
1539
            {
1540
                gen_op_addl_A0_seg(override);
1541
            }
1542
        }
1543
    } else {
1544
        switch (mod) {
1545
        case 0:
1546
            if (rm == 6) {
1547
                disp = lduw_code(s->pc);
1548
                s->pc += 2;
1549
                gen_op_movl_A0_im(disp);
1550
                rm = 0; /* avoid SS override */
1551
                goto no_rm;
1552
            } else {
1553
                disp = 0;
1554
            }
1555
            break;
1556
        case 1:
1557
            disp = (int8_t)ldub_code(s->pc++);
1558
            break;
1559
        default:
1560
        case 2:
1561
            disp = lduw_code(s->pc);
1562
            s->pc += 2;
1563
            break;
1564
        }
1565
        switch(rm) {
1566
        case 0:
1567
            gen_op_movl_A0_reg(R_EBX);
1568
            gen_op_addl_A0_reg_sN(0, R_ESI);
1569
            break;
1570
        case 1:
1571
            gen_op_movl_A0_reg(R_EBX);
1572
            gen_op_addl_A0_reg_sN(0, R_EDI);
1573
            break;
1574
        case 2:
1575
            gen_op_movl_A0_reg(R_EBP);
1576
            gen_op_addl_A0_reg_sN(0, R_ESI);
1577
            break;
1578
        case 3:
1579
            gen_op_movl_A0_reg(R_EBP);
1580
            gen_op_addl_A0_reg_sN(0, R_EDI);
1581
            break;
1582
        case 4:
1583
            gen_op_movl_A0_reg(R_ESI);
1584
            break;
1585
        case 5:
1586
            gen_op_movl_A0_reg(R_EDI);
1587
            break;
1588
        case 6:
1589
            gen_op_movl_A0_reg(R_EBP);
1590
            break;
1591
        default:
1592
        case 7:
1593
            gen_op_movl_A0_reg(R_EBX);
1594
            break;
1595
        }
1596
        if (disp != 0)
1597
            gen_op_addl_A0_im(disp);
1598
        gen_op_andl_A0_ffff();
1599
    no_rm:
1600
        if (must_add_seg) {
1601
            if (override < 0) {
1602
                if (rm == 2 || rm == 3 || rm == 6)
1603
                    override = R_SS;
1604
                else
1605
                    override = R_DS;
1606
            }
1607
            gen_op_addl_A0_seg(override);
1608
        }
1609
    }
1610

    
1611
    opreg = OR_A0;
1612
    disp = 0;
1613
    *reg_ptr = opreg;
1614
    *offset_ptr = disp;
1615
}
1616

    
1617
static void gen_nop_modrm(DisasContext *s, int modrm)
1618
{
1619
    int mod, rm, base, code;
1620

    
1621
    mod = (modrm >> 6) & 3;
1622
    if (mod == 3)
1623
        return;
1624
    rm = modrm & 7;
1625

    
1626
    if (s->aflag) {
1627

    
1628
        base = rm;
1629

    
1630
        if (base == 4) {
1631
            code = ldub_code(s->pc++);
1632
            base = (code & 7);
1633
        }
1634

    
1635
        switch (mod) {
1636
        case 0:
1637
            if (base == 5) {
1638
                s->pc += 4;
1639
            }
1640
            break;
1641
        case 1:
1642
            s->pc++;
1643
            break;
1644
        default:
1645
        case 2:
1646
            s->pc += 4;
1647
            break;
1648
        }
1649
    } else {
1650
        switch (mod) {
1651
        case 0:
1652
            if (rm == 6) {
1653
                s->pc += 2;
1654
            }
1655
            break;
1656
        case 1:
1657
            s->pc++;
1658
            break;
1659
        default:
1660
        case 2:
1661
            s->pc += 2;
1662
            break;
1663
        }
1664
    }
1665
}
1666

    
1667
/* used for LEA and MOV AX, mem */
1668
static void gen_add_A0_ds_seg(DisasContext *s)
1669
{
1670
    int override, must_add_seg;
1671
    must_add_seg = s->addseg;
1672
    override = R_DS;
1673
    if (s->override >= 0) {
1674
        override = s->override;
1675
        must_add_seg = 1;
1676
    } else {
1677
        override = R_DS;
1678
    }
1679
    if (must_add_seg) {
1680
#ifdef TARGET_X86_64
1681
        if (CODE64(s)) {
1682
            gen_op_addq_A0_seg(override);
1683
        } else
1684
#endif
1685
        {
1686
            gen_op_addl_A0_seg(override);
1687
        }
1688
    }
1689
}
1690

    
1691
/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1692
   OR_TMP0 */
1693
static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1694
{
1695
    int mod, rm, opreg, disp;
1696

    
1697
    mod = (modrm >> 6) & 3;
1698
    rm = (modrm & 7) | REX_B(s);
1699
    if (mod == 3) {
1700
        if (is_store) {
1701
            if (reg != OR_TMP0)
1702
                gen_op_mov_TN_reg(ot, 0, reg);
1703
            gen_op_mov_reg_T0(ot, rm);
1704
        } else {
1705
            gen_op_mov_TN_reg(ot, 0, rm);
1706
            if (reg != OR_TMP0)
1707
                gen_op_mov_reg_T0(ot, reg);
1708
        }
1709
    } else {
1710
        gen_lea_modrm(s, modrm, &opreg, &disp);
1711
        if (is_store) {
1712
            if (reg != OR_TMP0)
1713
                gen_op_mov_TN_reg(ot, 0, reg);
1714
            gen_op_st_T0_A0(ot + s->mem_index);
1715
        } else {
1716
            gen_op_ld_T0_A0(ot + s->mem_index);
1717
            if (reg != OR_TMP0)
1718
                gen_op_mov_reg_T0(ot, reg);
1719
        }
1720
    }
1721
}
1722

    
1723
static inline uint32_t insn_get(DisasContext *s, int ot)
1724
{
1725
    uint32_t ret;
1726

    
1727
    switch(ot) {
1728
    case OT_BYTE:
1729
        ret = ldub_code(s->pc);
1730
        s->pc++;
1731
        break;
1732
    case OT_WORD:
1733
        ret = lduw_code(s->pc);
1734
        s->pc += 2;
1735
        break;
1736
    default:
1737
    case OT_LONG:
1738
        ret = ldl_code(s->pc);
1739
        s->pc += 4;
1740
        break;
1741
    }
1742
    return ret;
1743
}
1744

    
1745
static inline int insn_const_size(unsigned int ot)
1746
{
1747
    if (ot <= OT_LONG)
1748
        return 1 << ot;
1749
    else
1750
        return 4;
1751
}
1752

    
1753
static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1754
{
1755
    TranslationBlock *tb;
1756
    target_ulong pc;
1757

    
1758
    pc = s->cs_base + eip;
1759
    tb = s->tb;
1760
    /* NOTE: we handle the case where the TB spans two pages here */
1761
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1762
        (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK))  {
1763
        /* jump to same page: we can use a direct jump */
1764
        tcg_gen_goto_tb(tb_num);
1765
        gen_jmp_im(eip);
1766
        tcg_gen_exit_tb((long)tb + tb_num);
1767
    } else {
1768
        /* jump to another page: currently not optimized */
1769
        gen_jmp_im(eip);
1770
        gen_eob(s);
1771
    }
1772
}
1773

    
1774
static inline void gen_jcc(DisasContext *s, int b,
1775
                           target_ulong val, target_ulong next_eip)
1776
{
1777
    TranslationBlock *tb;
1778
    int inv, jcc_op;
1779
    GenOpFunc1 *func;
1780
    target_ulong tmp;
1781
    int l1, l2;
1782

    
1783
    inv = b & 1;
1784
    jcc_op = (b >> 1) & 7;
1785

    
1786
    if (s->jmp_opt) {
1787
        switch(s->cc_op) {
1788
            /* we optimize the cmp/jcc case */
1789
        case CC_OP_SUBB:
1790
        case CC_OP_SUBW:
1791
        case CC_OP_SUBL:
1792
        case CC_OP_SUBQ:
1793
            func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1794
            break;
1795

    
1796
            /* some jumps are easy to compute */
1797
        case CC_OP_ADDB:
1798
        case CC_OP_ADDW:
1799
        case CC_OP_ADDL:
1800
        case CC_OP_ADDQ:
1801

    
1802
        case CC_OP_ADCB:
1803
        case CC_OP_ADCW:
1804
        case CC_OP_ADCL:
1805
        case CC_OP_ADCQ:
1806

    
1807
        case CC_OP_SBBB:
1808
        case CC_OP_SBBW:
1809
        case CC_OP_SBBL:
1810
        case CC_OP_SBBQ:
1811

    
1812
        case CC_OP_LOGICB:
1813
        case CC_OP_LOGICW:
1814
        case CC_OP_LOGICL:
1815
        case CC_OP_LOGICQ:
1816

    
1817
        case CC_OP_INCB:
1818
        case CC_OP_INCW:
1819
        case CC_OP_INCL:
1820
        case CC_OP_INCQ:
1821

    
1822
        case CC_OP_DECB:
1823
        case CC_OP_DECW:
1824
        case CC_OP_DECL:
1825
        case CC_OP_DECQ:
1826

    
1827
        case CC_OP_SHLB:
1828
        case CC_OP_SHLW:
1829
        case CC_OP_SHLL:
1830
        case CC_OP_SHLQ:
1831

    
1832
        case CC_OP_SARB:
1833
        case CC_OP_SARW:
1834
        case CC_OP_SARL:
1835
        case CC_OP_SARQ:
1836
            switch(jcc_op) {
1837
            case JCC_Z:
1838
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1839
                break;
1840
            case JCC_S:
1841
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1842
                break;
1843
            default:
1844
                func = NULL;
1845
                break;
1846
            }
1847
            break;
1848
        default:
1849
            func = NULL;
1850
            break;
1851
        }
1852

    
1853
        if (s->cc_op != CC_OP_DYNAMIC) {
1854
            gen_op_set_cc_op(s->cc_op);
1855
            s->cc_op = CC_OP_DYNAMIC;
1856
        }
1857

    
1858
        if (!func) {
1859
            gen_setcc_slow[jcc_op]();
1860
            func = gen_op_jnz_T0_label;
1861
        }
1862

    
1863
        if (inv) {
1864
            tmp = val;
1865
            val = next_eip;
1866
            next_eip = tmp;
1867
        }
1868
        tb = s->tb;
1869

    
1870
        l1 = gen_new_label();
1871
        func(l1);
1872

    
1873
        gen_goto_tb(s, 0, next_eip);
1874

    
1875
        gen_set_label(l1);
1876
        gen_goto_tb(s, 1, val);
1877

    
1878
        s->is_jmp = 3;
1879
    } else {
1880

    
1881
        if (s->cc_op != CC_OP_DYNAMIC) {
1882
            gen_op_set_cc_op(s->cc_op);
1883
            s->cc_op = CC_OP_DYNAMIC;
1884
        }
1885
        gen_setcc_slow[jcc_op]();
1886
        if (inv) {
1887
            tmp = val;
1888
            val = next_eip;
1889
            next_eip = tmp;
1890
        }
1891
        l1 = gen_new_label();
1892
        l2 = gen_new_label();
1893
        gen_op_jnz_T0_label(l1);
1894
        gen_jmp_im(next_eip);
1895
        gen_op_jmp_label(l2);
1896
        gen_set_label(l1);
1897
        gen_jmp_im(val);
1898
        gen_set_label(l2);
1899
        gen_eob(s);
1900
    }
1901
}
1902

    
1903
static void gen_setcc(DisasContext *s, int b)
1904
{
1905
    int inv, jcc_op;
1906
    GenOpFunc *func;
1907

    
1908
    inv = b & 1;
1909
    jcc_op = (b >> 1) & 7;
1910
    switch(s->cc_op) {
1911
        /* we optimize the cmp/jcc case */
1912
    case CC_OP_SUBB:
1913
    case CC_OP_SUBW:
1914
    case CC_OP_SUBL:
1915
    case CC_OP_SUBQ:
1916
        func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1917
        if (!func)
1918
            goto slow_jcc;
1919
        break;
1920

    
1921
        /* some jumps are easy to compute */
1922
    case CC_OP_ADDB:
1923
    case CC_OP_ADDW:
1924
    case CC_OP_ADDL:
1925
    case CC_OP_ADDQ:
1926

    
1927
    case CC_OP_LOGICB:
1928
    case CC_OP_LOGICW:
1929
    case CC_OP_LOGICL:
1930
    case CC_OP_LOGICQ:
1931

    
1932
    case CC_OP_INCB:
1933
    case CC_OP_INCW:
1934
    case CC_OP_INCL:
1935
    case CC_OP_INCQ:
1936

    
1937
    case CC_OP_DECB:
1938
    case CC_OP_DECW:
1939
    case CC_OP_DECL:
1940
    case CC_OP_DECQ:
1941

    
1942
    case CC_OP_SHLB:
1943
    case CC_OP_SHLW:
1944
    case CC_OP_SHLL:
1945
    case CC_OP_SHLQ:
1946
        switch(jcc_op) {
1947
        case JCC_Z:
1948
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1949
            break;
1950
        case JCC_S:
1951
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1952
            break;
1953
        default:
1954
            goto slow_jcc;
1955
        }
1956
        break;
1957
    default:
1958
    slow_jcc:
1959
        if (s->cc_op != CC_OP_DYNAMIC)
1960
            gen_op_set_cc_op(s->cc_op);
1961
        func = gen_setcc_slow[jcc_op];
1962
        break;
1963
    }
1964
    func();
1965
    if (inv) {
1966
        gen_op_xor_T0_1();
1967
    }
1968
}
1969

    
1970
/* move T0 to seg_reg and compute if the CPU state may change. Never
1971
   call this function with seg_reg == R_CS */
1972
static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1973
{
1974
    if (s->pe && !s->vm86) {
1975
        /* XXX: optimize by finding processor state dynamically */
1976
        if (s->cc_op != CC_OP_DYNAMIC)
1977
            gen_op_set_cc_op(s->cc_op);
1978
        gen_jmp_im(cur_eip);
1979
        gen_op_movl_seg_T0(seg_reg);
1980
        /* abort translation because the addseg value may change or
1981
           because ss32 may change. For R_SS, translation must always
1982
           stop as a special handling must be done to disable hardware
1983
           interrupts for the next instruction */
1984
        if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1985
            s->is_jmp = 3;
1986
    } else {
1987
        gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1988
        if (seg_reg == R_SS)
1989
            s->is_jmp = 3;
1990
    }
1991
}
1992

    
1993
#define SVM_movq_T1_im(x) gen_movtl_T1_im(x)
1994

    
1995
static inline int
1996
gen_svm_check_io(DisasContext *s, target_ulong pc_start, uint64_t type)
1997
{
1998
#if !defined(CONFIG_USER_ONLY)
1999
    if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
2000
        if (s->cc_op != CC_OP_DYNAMIC)
2001
            gen_op_set_cc_op(s->cc_op);
2002
        SVM_movq_T1_im(s->pc - s->cs_base);
2003
        gen_jmp_im(pc_start - s->cs_base);
2004
        gen_op_geneflags();
2005
        gen_op_svm_check_intercept_io((uint32_t)(type >> 32), (uint32_t)type);
2006
        s->cc_op = CC_OP_DYNAMIC;
2007
        /* FIXME: maybe we could move the io intercept vector to the TB as well
2008
                  so we know if this is an EOB or not ... let's assume it's not
2009
                  for now. */
2010
    }
2011
#endif
2012
    return 0;
2013
}
2014

    
2015
static inline int svm_is_rep(int prefixes)
2016
{
2017
    return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2018
}
2019

    
2020
static inline int
2021
gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2022
                              uint64_t type, uint64_t param)
2023
{
2024
    if(!(s->flags & (INTERCEPT_SVM_MASK)))
2025
        /* no SVM activated */
2026
        return 0;
2027
    switch(type) {
2028
        /* CRx and DRx reads/writes */
2029
        case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2030
            if (s->cc_op != CC_OP_DYNAMIC) {
2031
                gen_op_set_cc_op(s->cc_op);
2032
                s->cc_op = CC_OP_DYNAMIC;
2033
            }
2034
            gen_jmp_im(pc_start - s->cs_base);
2035
            SVM_movq_T1_im(param);
2036
            gen_op_geneflags();
2037
            gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2038
            /* this is a special case as we do not know if the interception occurs
2039
               so we assume there was none */
2040
            return 0;
2041
        case SVM_EXIT_MSR:
2042
            if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2043
                if (s->cc_op != CC_OP_DYNAMIC) {
2044
                    gen_op_set_cc_op(s->cc_op);
2045
                    s->cc_op = CC_OP_DYNAMIC;
2046
                }
2047
                gen_jmp_im(pc_start - s->cs_base);
2048
                SVM_movq_T1_im(param);
2049
                gen_op_geneflags();
2050
                gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2051
                /* this is a special case as we do not know if the interception occurs
2052
                   so we assume there was none */
2053
                return 0;
2054
            }
2055
            break;
2056
        default:
2057
            if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2058
                if (s->cc_op != CC_OP_DYNAMIC) {
2059
                    gen_op_set_cc_op(s->cc_op);
2060
                    s->cc_op = CC_OP_EFLAGS;
2061
                }
2062
                gen_jmp_im(pc_start - s->cs_base);
2063
                SVM_movq_T1_im(param);
2064
                gen_op_geneflags();
2065
                gen_op_svm_vmexit(type >> 32, type);
2066
                /* we can optimize this one so TBs don't get longer
2067
                   than up to vmexit */
2068
                gen_eob(s);
2069
                return 1;
2070
            }
2071
    }
2072
    return 0;
2073
}
2074

    
2075
static inline int
2076
gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2077
{
2078
    return gen_svm_check_intercept_param(s, pc_start, type, 0);
2079
}
2080

    
2081
static inline void gen_stack_update(DisasContext *s, int addend)
2082
{
2083
#ifdef TARGET_X86_64
2084
    if (CODE64(s)) {
2085
        gen_op_addq_ESP_im(addend);
2086
    } else
2087
#endif
2088
    if (s->ss32) {
2089
        gen_op_addl_ESP_im(addend);
2090
    } else {
2091
        gen_op_addw_ESP_im(addend);
2092
    }
2093
}
2094

    
2095
/* generate a push. It depends on ss32, addseg and dflag */
2096
static void gen_push_T0(DisasContext *s)
2097
{
2098
#ifdef TARGET_X86_64
2099
    if (CODE64(s)) {
2100
        gen_op_movq_A0_reg(R_ESP);
2101
        if (s->dflag) {
2102
            gen_op_addq_A0_im(-8);
2103
            gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2104
        } else {
2105
            gen_op_addq_A0_im(-2);
2106
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2107
        }
2108
        gen_op_mov_reg_A0(2, R_ESP);
2109
    } else
2110
#endif
2111
    {
2112
        gen_op_movl_A0_reg(R_ESP);
2113
        if (!s->dflag)
2114
            gen_op_addl_A0_im(-2);
2115
        else
2116
            gen_op_addl_A0_im(-4);
2117
        if (s->ss32) {
2118
            if (s->addseg) {
2119
                gen_op_movl_T1_A0();
2120
                gen_op_addl_A0_seg(R_SS);
2121
            }
2122
        } else {
2123
            gen_op_andl_A0_ffff();
2124
            gen_op_movl_T1_A0();
2125
            gen_op_addl_A0_seg(R_SS);
2126
        }
2127
        gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2128
        if (s->ss32 && !s->addseg)
2129
            gen_op_mov_reg_A0(1, R_ESP);
2130
        else
2131
            gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2132
    }
2133
}
2134

    
2135
/* generate a push. It depends on ss32, addseg and dflag */
2136
/* slower version for T1, only used for call Ev */
2137
static void gen_push_T1(DisasContext *s)
2138
{
2139
#ifdef TARGET_X86_64
2140
    if (CODE64(s)) {
2141
        gen_op_movq_A0_reg(R_ESP);
2142
        if (s->dflag) {
2143
            gen_op_addq_A0_im(-8);
2144
            gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2145
        } else {
2146
            gen_op_addq_A0_im(-2);
2147
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2148
        }
2149
        gen_op_mov_reg_A0(2, R_ESP);
2150
    } else
2151
#endif
2152
    {
2153
        gen_op_movl_A0_reg(R_ESP);
2154
        if (!s->dflag)
2155
            gen_op_addl_A0_im(-2);
2156
        else
2157
            gen_op_addl_A0_im(-4);
2158
        if (s->ss32) {
2159
            if (s->addseg) {
2160
                gen_op_addl_A0_seg(R_SS);
2161
            }
2162
        } else {
2163
            gen_op_andl_A0_ffff();
2164
            gen_op_addl_A0_seg(R_SS);
2165
        }
2166
        gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2167

    
2168
        if (s->ss32 && !s->addseg)
2169
            gen_op_mov_reg_A0(1, R_ESP);
2170
        else
2171
            gen_stack_update(s, (-2) << s->dflag);
2172
    }
2173
}
2174

    
2175
/* two step pop is necessary for precise exceptions */
2176
static void gen_pop_T0(DisasContext *s)
2177
{
2178
#ifdef TARGET_X86_64
2179
    if (CODE64(s)) {
2180
        gen_op_movq_A0_reg(R_ESP);
2181
        gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2182
    } else
2183
#endif
2184
    {
2185
        gen_op_movl_A0_reg(R_ESP);
2186
        if (s->ss32) {
2187
            if (s->addseg)
2188
                gen_op_addl_A0_seg(R_SS);
2189
        } else {
2190
            gen_op_andl_A0_ffff();
2191
            gen_op_addl_A0_seg(R_SS);
2192
        }
2193
        gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2194
    }
2195
}
2196

    
2197
static void gen_pop_update(DisasContext *s)
2198
{
2199
#ifdef TARGET_X86_64
2200
    if (CODE64(s) && s->dflag) {
2201
        gen_stack_update(s, 8);
2202
    } else
2203
#endif
2204
    {
2205
        gen_stack_update(s, 2 << s->dflag);
2206
    }
2207
}
2208

    
2209
static void gen_stack_A0(DisasContext *s)
2210
{
2211
    gen_op_movl_A0_reg(R_ESP);
2212
    if (!s->ss32)
2213
        gen_op_andl_A0_ffff();
2214
    gen_op_movl_T1_A0();
2215
    if (s->addseg)
2216
        gen_op_addl_A0_seg(R_SS);
2217
}
2218

    
2219
/* NOTE: wrap around in 16 bit not fully handled */
2220
static void gen_pusha(DisasContext *s)
2221
{
2222
    int i;
2223
    gen_op_movl_A0_reg(R_ESP);
2224
    gen_op_addl_A0_im(-16 <<  s->dflag);
2225
    if (!s->ss32)
2226
        gen_op_andl_A0_ffff();
2227
    gen_op_movl_T1_A0();
2228
    if (s->addseg)
2229
        gen_op_addl_A0_seg(R_SS);
2230
    for(i = 0;i < 8; i++) {
2231
        gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2232
        gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2233
        gen_op_addl_A0_im(2 <<  s->dflag);
2234
    }
2235
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2236
}
2237

    
2238
/* NOTE: wrap around in 16 bit not fully handled */
2239
static void gen_popa(DisasContext *s)
2240
{
2241
    int i;
2242
    gen_op_movl_A0_reg(R_ESP);
2243
    if (!s->ss32)
2244
        gen_op_andl_A0_ffff();
2245
    gen_op_movl_T1_A0();
2246
    gen_op_addl_T1_im(16 <<  s->dflag);
2247
    if (s->addseg)
2248
        gen_op_addl_A0_seg(R_SS);
2249
    for(i = 0;i < 8; i++) {
2250
        /* ESP is not reloaded */
2251
        if (i != 3) {
2252
            gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2253
            gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2254
        }
2255
        gen_op_addl_A0_im(2 <<  s->dflag);
2256
    }
2257
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2258
}
2259

    
2260
static void gen_enter(DisasContext *s, int esp_addend, int level)
2261
{
2262
    int ot, opsize;
2263

    
2264
    level &= 0x1f;
2265
#ifdef TARGET_X86_64
2266
    if (CODE64(s)) {
2267
        ot = s->dflag ? OT_QUAD : OT_WORD;
2268
        opsize = 1 << ot;
2269

    
2270
        gen_op_movl_A0_reg(R_ESP);
2271
        gen_op_addq_A0_im(-opsize);
2272
        gen_op_movl_T1_A0();
2273

    
2274
        /* push bp */
2275
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2276
        gen_op_st_T0_A0(ot + s->mem_index);
2277
        if (level) {
2278
            /* XXX: must save state */
2279
            tcg_gen_helper_0_2(helper_enter64_level,
2280
                               tcg_const_i32(level),
2281
                               tcg_const_i32((ot == OT_QUAD)));
2282
        }
2283
        gen_op_mov_reg_T1(ot, R_EBP);
2284
        gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2285
        gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2286
    } else
2287
#endif
2288
    {
2289
        ot = s->dflag + OT_WORD;
2290
        opsize = 2 << s->dflag;
2291

    
2292
        gen_op_movl_A0_reg(R_ESP);
2293
        gen_op_addl_A0_im(-opsize);
2294
        if (!s->ss32)
2295
            gen_op_andl_A0_ffff();
2296
        gen_op_movl_T1_A0();
2297
        if (s->addseg)
2298
            gen_op_addl_A0_seg(R_SS);
2299
        /* push bp */
2300
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2301
        gen_op_st_T0_A0(ot + s->mem_index);
2302
        if (level) {
2303
            /* XXX: must save state */
2304
            tcg_gen_helper_0_2(helper_enter_level,
2305
                               tcg_const_i32(level),
2306
                               tcg_const_i32(s->dflag));
2307
        }
2308
        gen_op_mov_reg_T1(ot, R_EBP);
2309
        gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2310
        gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2311
    }
2312
}
2313

    
2314
static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2315
{
2316
    if (s->cc_op != CC_OP_DYNAMIC)
2317
        gen_op_set_cc_op(s->cc_op);
2318
    gen_jmp_im(cur_eip);
2319
    tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
2320
    s->is_jmp = 3;
2321
}
2322

    
2323
/* an interrupt is different from an exception because of the
2324
   privilege checks */
2325
static void gen_interrupt(DisasContext *s, int intno,
2326
                          target_ulong cur_eip, target_ulong next_eip)
2327
{
2328
    if (s->cc_op != CC_OP_DYNAMIC)
2329
        gen_op_set_cc_op(s->cc_op);
2330
    gen_jmp_im(cur_eip);
2331
    tcg_gen_helper_0_2(helper_raise_interrupt, 
2332
                       tcg_const_i32(intno), 
2333
                       tcg_const_i32(next_eip - cur_eip));
2334
    s->is_jmp = 3;
2335
}
2336

    
2337
static void gen_debug(DisasContext *s, target_ulong cur_eip)
2338
{
2339
    if (s->cc_op != CC_OP_DYNAMIC)
2340
        gen_op_set_cc_op(s->cc_op);
2341
    gen_jmp_im(cur_eip);
2342
    tcg_gen_helper_0_0(helper_debug);
2343
    s->is_jmp = 3;
2344
}
2345

    
2346
/* generate a generic end of block. Trace exception is also generated
2347
   if needed */
2348
static void gen_eob(DisasContext *s)
2349
{
2350
    if (s->cc_op != CC_OP_DYNAMIC)
2351
        gen_op_set_cc_op(s->cc_op);
2352
    if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2353
        tcg_gen_helper_0_0(helper_reset_inhibit_irq);
2354
    }
2355
    if (s->singlestep_enabled) {
2356
        tcg_gen_helper_0_0(helper_debug);
2357
    } else if (s->tf) {
2358
        tcg_gen_helper_0_0(helper_single_step);
2359
    } else {
2360
        tcg_gen_exit_tb(0);
2361
    }
2362
    s->is_jmp = 3;
2363
}
2364

    
2365
/* generate a jump to eip. No segment change must happen before as a
2366
   direct call to the next block may occur */
2367
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2368
{
2369
    if (s->jmp_opt) {
2370
        if (s->cc_op != CC_OP_DYNAMIC) {
2371
            gen_op_set_cc_op(s->cc_op);
2372
            s->cc_op = CC_OP_DYNAMIC;
2373
        }
2374
        gen_goto_tb(s, tb_num, eip);
2375
        s->is_jmp = 3;
2376
    } else {
2377
        gen_jmp_im(eip);
2378
        gen_eob(s);
2379
    }
2380
}
2381

    
2382
static void gen_jmp(DisasContext *s, target_ulong eip)
2383
{
2384
    gen_jmp_tb(s, eip, 0);
2385
}
2386

    
2387
static inline void gen_ldq_env_A0(int idx, int offset)
2388
{
2389
    int mem_index = (idx >> 2) - 1;
2390
    tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0, mem_index);
2391
    tcg_gen_st_i64(cpu_tmp1, cpu_env, offset);
2392
}
2393

    
2394
static inline void gen_stq_env_A0(int idx, int offset)
2395
{
2396
    int mem_index = (idx >> 2) - 1;
2397
    tcg_gen_ld_i64(cpu_tmp1, cpu_env, offset);
2398
    tcg_gen_qemu_st64(cpu_tmp1, cpu_A0, mem_index);
2399
}
2400

    
2401
static inline void gen_ldo_env_A0(int idx, int offset)
2402
{
2403
    int mem_index = (idx >> 2) - 1;
2404
    tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0, mem_index);
2405
    tcg_gen_st_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2406
    tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2407
    tcg_gen_qemu_ld64(cpu_tmp1, cpu_tmp0, mem_index);
2408
    tcg_gen_st_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2409
}
2410

    
2411
static inline void gen_sto_env_A0(int idx, int offset)
2412
{
2413
    int mem_index = (idx >> 2) - 1;
2414
    tcg_gen_ld_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2415
    tcg_gen_qemu_st64(cpu_tmp1, cpu_A0, mem_index);
2416
    tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2417
    tcg_gen_ld_i64(cpu_tmp1, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2418
    tcg_gen_qemu_st64(cpu_tmp1, cpu_tmp0, mem_index);
2419
}
2420

    
2421
static inline void gen_op_movo(int d_offset, int s_offset)
2422
{
2423
    tcg_gen_ld_i64(cpu_tmp1, cpu_env, s_offset);
2424
    tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset);
2425
    tcg_gen_ld_i64(cpu_tmp1, cpu_env, s_offset + 8);
2426
    tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset + 8);
2427
}
2428

    
2429
static inline void gen_op_movq(int d_offset, int s_offset)
2430
{
2431
    tcg_gen_ld_i64(cpu_tmp1, cpu_env, s_offset);
2432
    tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset);
2433
}
2434

    
2435
static inline void gen_op_movl(int d_offset, int s_offset)
2436
{
2437
    tcg_gen_ld_i32(cpu_tmp2, cpu_env, s_offset);
2438
    tcg_gen_st_i32(cpu_tmp2, cpu_env, d_offset);
2439
}
2440

    
2441
static inline void gen_op_movq_env_0(int d_offset)
2442
{
2443
    tcg_gen_movi_i64(cpu_tmp1, 0);
2444
    tcg_gen_st_i64(cpu_tmp1, cpu_env, d_offset);
2445
}
2446

    
2447
#define SSE_SPECIAL ((void *)1)
2448
#define SSE_DUMMY ((void *)2)
2449

    
2450
#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2451
#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2452
                     helper_ ## x ## ss, helper_ ## x ## sd, }
2453

    
2454
static void *sse_op_table1[256][4] = {
2455
    /* 3DNow! extensions */
2456
    [0x0e] = { SSE_DUMMY }, /* femms */
2457
    [0x0f] = { SSE_DUMMY }, /* pf... */
2458
    /* pure SSE operations */
2459
    [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2460
    [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2461
    [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2462
    [0x13] = { SSE_SPECIAL, SSE_SPECIAL },  /* movlps, movlpd */
2463
    [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2464
    [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2465
    [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd, movshdup */
2466
    [0x17] = { SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd */
2467

    
2468
    [0x28] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2469
    [0x29] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2470
    [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2471
    [0x2b] = { SSE_SPECIAL, SSE_SPECIAL },  /* movntps, movntpd */
2472
    [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2473
    [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2474
    [0x2e] = { helper_ucomiss, helper_ucomisd },
2475
    [0x2f] = { helper_comiss, helper_comisd },
2476
    [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2477
    [0x51] = SSE_FOP(sqrt),
2478
    [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
2479
    [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
2480
    [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
2481
    [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
2482
    [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
2483
    [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
2484
    [0x58] = SSE_FOP(add),
2485
    [0x59] = SSE_FOP(mul),
2486
    [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
2487
               helper_cvtss2sd, helper_cvtsd2ss },
2488
    [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
2489
    [0x5c] = SSE_FOP(sub),
2490
    [0x5d] = SSE_FOP(min),
2491
    [0x5e] = SSE_FOP(div),
2492
    [0x5f] = SSE_FOP(max),
2493

    
2494
    [0xc2] = SSE_FOP(cmpeq),
2495
    [0xc6] = { helper_shufps, helper_shufpd },
2496

    
2497
    /* MMX ops and their SSE extensions */
2498
    [0x60] = MMX_OP2(punpcklbw),
2499
    [0x61] = MMX_OP2(punpcklwd),
2500
    [0x62] = MMX_OP2(punpckldq),
2501
    [0x63] = MMX_OP2(packsswb),
2502
    [0x64] = MMX_OP2(pcmpgtb),
2503
    [0x65] = MMX_OP2(pcmpgtw),
2504
    [0x66] = MMX_OP2(pcmpgtl),
2505
    [0x67] = MMX_OP2(packuswb),
2506
    [0x68] = MMX_OP2(punpckhbw),
2507
    [0x69] = MMX_OP2(punpckhwd),
2508
    [0x6a] = MMX_OP2(punpckhdq),
2509
    [0x6b] = MMX_OP2(packssdw),
2510
    [0x6c] = { NULL, helper_punpcklqdq_xmm },
2511
    [0x6d] = { NULL, helper_punpckhqdq_xmm },
2512
    [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2513
    [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2514
    [0x70] = { helper_pshufw_mmx,
2515
               helper_pshufd_xmm,
2516
               helper_pshufhw_xmm,
2517
               helper_pshuflw_xmm },
2518
    [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2519
    [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2520
    [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2521
    [0x74] = MMX_OP2(pcmpeqb),
2522
    [0x75] = MMX_OP2(pcmpeqw),
2523
    [0x76] = MMX_OP2(pcmpeql),
2524
    [0x77] = { SSE_DUMMY }, /* emms */
2525
    [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
2526
    [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
2527
    [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2528
    [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2529
    [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2530
    [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2531
    [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
2532
    [0xd1] = MMX_OP2(psrlw),
2533
    [0xd2] = MMX_OP2(psrld),
2534
    [0xd3] = MMX_OP2(psrlq),
2535
    [0xd4] = MMX_OP2(paddq),
2536
    [0xd5] = MMX_OP2(pmullw),
2537
    [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2538
    [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2539
    [0xd8] = MMX_OP2(psubusb),
2540
    [0xd9] = MMX_OP2(psubusw),
2541
    [0xda] = MMX_OP2(pminub),
2542
    [0xdb] = MMX_OP2(pand),
2543
    [0xdc] = MMX_OP2(paddusb),
2544
    [0xdd] = MMX_OP2(paddusw),
2545
    [0xde] = MMX_OP2(pmaxub),
2546
    [0xdf] = MMX_OP2(pandn),
2547
    [0xe0] = MMX_OP2(pavgb),
2548
    [0xe1] = MMX_OP2(psraw),
2549
    [0xe2] = MMX_OP2(psrad),
2550
    [0xe3] = MMX_OP2(pavgw),
2551
    [0xe4] = MMX_OP2(pmulhuw),
2552
    [0xe5] = MMX_OP2(pmulhw),
2553
    [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
2554
    [0xe7] = { SSE_SPECIAL , SSE_SPECIAL },  /* movntq, movntq */
2555
    [0xe8] = MMX_OP2(psubsb),
2556
    [0xe9] = MMX_OP2(psubsw),
2557
    [0xea] = MMX_OP2(pminsw),
2558
    [0xeb] = MMX_OP2(por),
2559
    [0xec] = MMX_OP2(paddsb),
2560
    [0xed] = MMX_OP2(paddsw),
2561
    [0xee] = MMX_OP2(pmaxsw),
2562
    [0xef] = MMX_OP2(pxor),
2563
    [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2564
    [0xf1] = MMX_OP2(psllw),
2565
    [0xf2] = MMX_OP2(pslld),
2566
    [0xf3] = MMX_OP2(psllq),
2567
    [0xf4] = MMX_OP2(pmuludq),
2568
    [0xf5] = MMX_OP2(pmaddwd),
2569
    [0xf6] = MMX_OP2(psadbw),
2570
    [0xf7] = MMX_OP2(maskmov),
2571
    [0xf8] = MMX_OP2(psubb),
2572
    [0xf9] = MMX_OP2(psubw),
2573
    [0xfa] = MMX_OP2(psubl),
2574
    [0xfb] = MMX_OP2(psubq),
2575
    [0xfc] = MMX_OP2(paddb),
2576
    [0xfd] = MMX_OP2(paddw),
2577
    [0xfe] = MMX_OP2(paddl),
2578
};
2579

    
2580
static void *sse_op_table2[3 * 8][2] = {
2581
    [0 + 2] = MMX_OP2(psrlw),
2582
    [0 + 4] = MMX_OP2(psraw),
2583
    [0 + 6] = MMX_OP2(psllw),
2584
    [8 + 2] = MMX_OP2(psrld),
2585
    [8 + 4] = MMX_OP2(psrad),
2586
    [8 + 6] = MMX_OP2(pslld),
2587
    [16 + 2] = MMX_OP2(psrlq),
2588
    [16 + 3] = { NULL, helper_psrldq_xmm },
2589
    [16 + 6] = MMX_OP2(psllq),
2590
    [16 + 7] = { NULL, helper_pslldq_xmm },
2591
};
2592

    
2593
static void *sse_op_table3[4 * 3] = {
2594
    helper_cvtsi2ss,
2595
    helper_cvtsi2sd,
2596
    X86_64_ONLY(helper_cvtsq2ss),
2597
    X86_64_ONLY(helper_cvtsq2sd),
2598

    
2599
    helper_cvttss2si,
2600
    helper_cvttsd2si,
2601
    X86_64_ONLY(helper_cvttss2sq),
2602
    X86_64_ONLY(helper_cvttsd2sq),
2603

    
2604
    helper_cvtss2si,
2605
    helper_cvtsd2si,
2606
    X86_64_ONLY(helper_cvtss2sq),
2607
    X86_64_ONLY(helper_cvtsd2sq),
2608
};
2609

    
2610
static void *sse_op_table4[8][4] = {
2611
    SSE_FOP(cmpeq),
2612
    SSE_FOP(cmplt),
2613
    SSE_FOP(cmple),
2614
    SSE_FOP(cmpunord),
2615
    SSE_FOP(cmpneq),
2616
    SSE_FOP(cmpnlt),
2617
    SSE_FOP(cmpnle),
2618
    SSE_FOP(cmpord),
2619
};
2620

    
2621
static void *sse_op_table5[256] = {
2622
    [0x0c] = helper_pi2fw,
2623
    [0x0d] = helper_pi2fd,
2624
    [0x1c] = helper_pf2iw,
2625
    [0x1d] = helper_pf2id,
2626
    [0x8a] = helper_pfnacc,
2627
    [0x8e] = helper_pfpnacc,
2628
    [0x90] = helper_pfcmpge,
2629
    [0x94] = helper_pfmin,
2630
    [0x96] = helper_pfrcp,
2631
    [0x97] = helper_pfrsqrt,
2632
    [0x9a] = helper_pfsub,
2633
    [0x9e] = helper_pfadd,
2634
    [0xa0] = helper_pfcmpgt,
2635
    [0xa4] = helper_pfmax,
2636
    [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2637
    [0xa7] = helper_movq, /* pfrsqit1 */
2638
    [0xaa] = helper_pfsubr,
2639
    [0xae] = helper_pfacc,
2640
    [0xb0] = helper_pfcmpeq,
2641
    [0xb4] = helper_pfmul,
2642
    [0xb6] = helper_movq, /* pfrcpit2 */
2643
    [0xb7] = helper_pmulhrw_mmx,
2644
    [0xbb] = helper_pswapd,
2645
    [0xbf] = helper_pavgb_mmx /* pavgusb */
2646
};
2647

    
2648
static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2649
{
2650
    int b1, op1_offset, op2_offset, is_xmm, val, ot;
2651
    int modrm, mod, rm, reg, reg_addr, offset_addr;
2652
    void *sse_op2;
2653

    
2654
    b &= 0xff;
2655
    if (s->prefix & PREFIX_DATA)
2656
        b1 = 1;
2657
    else if (s->prefix & PREFIX_REPZ)
2658
        b1 = 2;
2659
    else if (s->prefix & PREFIX_REPNZ)
2660
        b1 = 3;
2661
    else
2662
        b1 = 0;
2663
    sse_op2 = sse_op_table1[b][b1];
2664
    if (!sse_op2)
2665
        goto illegal_op;
2666
    if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2667
        is_xmm = 1;
2668
    } else {
2669
        if (b1 == 0) {
2670
            /* MMX case */
2671
            is_xmm = 0;
2672
        } else {
2673
            is_xmm = 1;
2674
        }
2675
    }
2676
    /* simple MMX/SSE operation */
2677
    if (s->flags & HF_TS_MASK) {
2678
        gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2679
        return;
2680
    }
2681
    if (s->flags & HF_EM_MASK) {
2682
    illegal_op:
2683
        gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2684
        return;
2685
    }
2686
    if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2687
        goto illegal_op;
2688
    if (b == 0x0e) {
2689
        if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
2690
            goto illegal_op;
2691
        /* femms */
2692
        tcg_gen_helper_0_0(helper_emms);
2693
        return;
2694
    }
2695
    if (b == 0x77) {
2696
        /* emms */
2697
        tcg_gen_helper_0_0(helper_emms);
2698
        return;
2699
    }
2700
    /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2701
       the static cpu state) */
2702
    if (!is_xmm) {
2703
        tcg_gen_helper_0_0(helper_enter_mmx);
2704
    }
2705

    
2706
    modrm = ldub_code(s->pc++);
2707
    reg = ((modrm >> 3) & 7);
2708
    if (is_xmm)
2709
        reg |= rex_r;
2710
    mod = (modrm >> 6) & 3;
2711
    if (sse_op2 == SSE_SPECIAL) {
2712
        b |= (b1 << 8);
2713
        switch(b) {
2714
        case 0x0e7: /* movntq */
2715
            if (mod == 3)
2716
                goto illegal_op;
2717
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2718
            gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2719
            break;
2720
        case 0x1e7: /* movntdq */
2721
        case 0x02b: /* movntps */
2722
        case 0x12b: /* movntps */
2723
        case 0x3f0: /* lddqu */
2724
            if (mod == 3)
2725
                goto illegal_op;
2726
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2727
            gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2728
            break;
2729
        case 0x6e: /* movd mm, ea */
2730
#ifdef TARGET_X86_64
2731
            if (s->dflag == 2) {
2732
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2733
                tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
2734
            } else
2735
#endif
2736
            {
2737
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2738
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
2739
                                 offsetof(CPUX86State,fpregs[reg].mmx));
2740
                tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
2741
            }
2742
            break;
2743
        case 0x16e: /* movd xmm, ea */
2744
#ifdef TARGET_X86_64
2745
            if (s->dflag == 2) {
2746
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2747
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
2748
                                 offsetof(CPUX86State,xmm_regs[reg]));
2749
                tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
2750
            } else
2751
#endif
2752
            {
2753
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2754
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
2755
                                 offsetof(CPUX86State,xmm_regs[reg]));
2756
                tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
2757
                tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2);
2758
            }
2759
            break;
2760
        case 0x6f: /* movq mm, ea */
2761
            if (mod != 3) {
2762
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2763
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2764
            } else {
2765
                rm = (modrm & 7);
2766
                tcg_gen_ld_i64(cpu_tmp1, cpu_env,
2767
                               offsetof(CPUX86State,fpregs[rm].mmx));
2768
                tcg_gen_st_i64(cpu_tmp1, cpu_env,
2769
                               offsetof(CPUX86State,fpregs[reg].mmx));
2770
            }
2771
            break;
2772
        case 0x010: /* movups */
2773
        case 0x110: /* movupd */
2774
        case 0x028: /* movaps */
2775
        case 0x128: /* movapd */
2776
        case 0x16f: /* movdqa xmm, ea */
2777
        case 0x26f: /* movdqu xmm, ea */
2778
            if (mod != 3) {
2779
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2780
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2781
            } else {
2782
                rm = (modrm & 7) | REX_B(s);
2783
                gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2784
                            offsetof(CPUX86State,xmm_regs[rm]));
2785
            }
2786
            break;
2787
        case 0x210: /* movss xmm, ea */
2788
            if (mod != 3) {
2789
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2790
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
2791
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2792
                gen_op_movl_T0_0();
2793
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2794
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2795
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2796
            } else {
2797
                rm = (modrm & 7) | REX_B(s);
2798
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2799
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2800
            }
2801
            break;
2802
        case 0x310: /* movsd xmm, ea */
2803
            if (mod != 3) {
2804
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2805
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2806
                gen_op_movl_T0_0();
2807
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2808
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2809
            } else {
2810
                rm = (modrm & 7) | REX_B(s);
2811
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2812
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2813
            }
2814
            break;
2815
        case 0x012: /* movlps */
2816
        case 0x112: /* movlpd */
2817
            if (mod != 3) {
2818
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2819
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2820
            } else {
2821
                /* movhlps */
2822
                rm = (modrm & 7) | REX_B(s);
2823
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2824
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2825
            }
2826
            break;
2827
        case 0x212: /* movsldup */
2828
            if (mod != 3) {
2829
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2830
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2831
            } else {
2832
                rm = (modrm & 7) | REX_B(s);
2833
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2834
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2835
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2836
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2837
            }
2838
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2839
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2840
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2841
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2842
            break;
2843
        case 0x312: /* movddup */
2844
            if (mod != 3) {
2845
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2846
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2847
            } else {
2848
                rm = (modrm & 7) | REX_B(s);
2849
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2850
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2851
            }
2852
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2853
                        offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2854
            break;
2855
        case 0x016: /* movhps */
2856
        case 0x116: /* movhpd */
2857
            if (mod != 3) {
2858
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2859
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2860
            } else {
2861
                /* movlhps */
2862
                rm = (modrm & 7) | REX_B(s);
2863
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2864
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2865
            }
2866
            break;
2867
        case 0x216: /* movshdup */
2868
            if (mod != 3) {
2869
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2870
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2871
            } else {
2872
                rm = (modrm & 7) | REX_B(s);
2873
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2874
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2875
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2876
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2877
            }
2878
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2879
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2880
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2881
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2882
            break;
2883
        case 0x7e: /* movd ea, mm */
2884
#ifdef TARGET_X86_64
2885
            if (s->dflag == 2) {
2886
                tcg_gen_ld_i64(cpu_T[0], cpu_env, 
2887
                               offsetof(CPUX86State,fpregs[reg].mmx));
2888
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2889
            } else
2890
#endif
2891
            {
2892
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
2893
                                 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
2894
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2895
            }
2896
            break;
2897
        case 0x17e: /* movd ea, xmm */
2898
#ifdef TARGET_X86_64
2899
            if (s->dflag == 2) {
2900
                tcg_gen_ld_i64(cpu_T[0], cpu_env, 
2901
                               offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2902
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2903
            } else
2904
#endif
2905
            {
2906
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
2907
                                 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2908
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2909
            }
2910
            break;
2911
        case 0x27e: /* movq xmm, ea */
2912
            if (mod != 3) {
2913
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2914
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2915
            } else {
2916
                rm = (modrm & 7) | REX_B(s);
2917
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2918
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2919
            }
2920
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2921
            break;
2922
        case 0x7f: /* movq ea, mm */
2923
            if (mod != 3) {
2924
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2925
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2926
            } else {
2927
                rm = (modrm & 7);
2928
                gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2929
                            offsetof(CPUX86State,fpregs[reg].mmx));
2930
            }
2931
            break;
2932
        case 0x011: /* movups */
2933
        case 0x111: /* movupd */
2934
        case 0x029: /* movaps */
2935
        case 0x129: /* movapd */
2936
        case 0x17f: /* movdqa ea, xmm */
2937
        case 0x27f: /* movdqu ea, xmm */
2938
            if (mod != 3) {
2939
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2940
                gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2941
            } else {
2942
                rm = (modrm & 7) | REX_B(s);
2943
                gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2944
                            offsetof(CPUX86State,xmm_regs[reg]));
2945
            }
2946
            break;
2947
        case 0x211: /* movss ea, xmm */
2948
            if (mod != 3) {
2949
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2950
                gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2951
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
2952
            } else {
2953
                rm = (modrm & 7) | REX_B(s);
2954
                gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2955
                            offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2956
            }
2957
            break;
2958
        case 0x311: /* movsd ea, xmm */
2959
            if (mod != 3) {
2960
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2961
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2962
            } else {
2963
                rm = (modrm & 7) | REX_B(s);
2964
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2965
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2966
            }
2967
            break;
2968
        case 0x013: /* movlps */
2969
        case 0x113: /* movlpd */
2970
            if (mod != 3) {
2971
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2972
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2973
            } else {
2974
                goto illegal_op;
2975
            }
2976
            break;
2977
        case 0x017: /* movhps */
2978
        case 0x117: /* movhpd */
2979
            if (mod != 3) {
2980
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2981
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2982
            } else {
2983
                goto illegal_op;
2984
            }
2985
            break;
2986
        case 0x71: /* shift mm, im */
2987
        case 0x72:
2988
        case 0x73:
2989
        case 0x171: /* shift xmm, im */
2990
        case 0x172:
2991
        case 0x173:
2992
            val = ldub_code(s->pc++);
2993
            if (is_xmm) {
2994
                gen_op_movl_T0_im(val);
2995
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2996
                gen_op_movl_T0_0();
2997
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2998
                op1_offset = offsetof(CPUX86State,xmm_t0);
2999
            } else {
3000
                gen_op_movl_T0_im(val);
3001
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3002
                gen_op_movl_T0_0();
3003
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3004
                op1_offset = offsetof(CPUX86State,mmx_t0);
3005
            }
3006
            sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3007
            if (!sse_op2)
3008
                goto illegal_op;
3009
            if (is_xmm) {
3010
                rm = (modrm & 7) | REX_B(s);
3011
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3012
            } else {
3013
                rm = (modrm & 7);
3014
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3015
            }
3016
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3017
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3018
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3019
            break;
3020
        case 0x050: /* movmskps */
3021
            rm = (modrm & 7) | REX_B(s);
3022
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3023
                             offsetof(CPUX86State,xmm_regs[rm]));
3024
            tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2, cpu_ptr0);
3025
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
3026
            gen_op_mov_reg_T0(OT_LONG, reg);
3027
            break;
3028
        case 0x150: /* movmskpd */
3029
            rm = (modrm & 7) | REX_B(s);
3030
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3031
                             offsetof(CPUX86State,xmm_regs[rm]));
3032
            tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2, cpu_ptr0);
3033
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
3034
            gen_op_mov_reg_T0(OT_LONG, reg);
3035
            break;
3036
        case 0x02a: /* cvtpi2ps */
3037
        case 0x12a: /* cvtpi2pd */
3038
            tcg_gen_helper_0_0(helper_enter_mmx);
3039
            if (mod != 3) {
3040
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3041
                op2_offset = offsetof(CPUX86State,mmx_t0);
3042
                gen_ldq_env_A0(s->mem_index, op2_offset);
3043
            } else {
3044
                rm = (modrm & 7);
3045
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3046
            }
3047
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3048
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3049
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3050
            switch(b >> 8) {
3051
            case 0x0:
3052
                tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3053
                break;
3054
            default:
3055
            case 0x1:
3056
                tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3057
                break;
3058
            }
3059
            break;
3060
        case 0x22a: /* cvtsi2ss */
3061
        case 0x32a: /* cvtsi2sd */
3062
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3063
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3064
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3065
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3066
            sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3067
            tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
3068
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2);
3069
            break;
3070
        case 0x02c: /* cvttps2pi */
3071
        case 0x12c: /* cvttpd2pi */
3072
        case 0x02d: /* cvtps2pi */
3073
        case 0x12d: /* cvtpd2pi */
3074
            tcg_gen_helper_0_0(helper_enter_mmx);
3075
            if (mod != 3) {
3076
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3077
                op2_offset = offsetof(CPUX86State,xmm_t0);
3078
                gen_ldo_env_A0(s->mem_index, op2_offset);
3079
            } else {
3080
                rm = (modrm & 7) | REX_B(s);
3081
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3082
            }
3083
            op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3084
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3085
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3086
            switch(b) {
3087
            case 0x02c:
3088
                tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3089
                break;
3090
            case 0x12c:
3091
                tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3092
                break;
3093
            case 0x02d:
3094
                tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3095
                break;
3096
            case 0x12d:
3097
                tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3098
                break;
3099
            }
3100
            break;
3101
        case 0x22c: /* cvttss2si */
3102
        case 0x32c: /* cvttsd2si */
3103
        case 0x22d: /* cvtss2si */
3104
        case 0x32d: /* cvtsd2si */
3105
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3106
            if (mod != 3) {
3107
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3108
                if ((b >> 8) & 1) {
3109
                    gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3110
                } else {
3111
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3112
                    gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3113
                }
3114
                op2_offset = offsetof(CPUX86State,xmm_t0);
3115
            } else {
3116
                rm = (modrm & 7) | REX_B(s);
3117
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3118
            }
3119
            sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3120
                                    (b & 1) * 4];
3121
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3122
            if (ot == OT_LONG) {
3123
                tcg_gen_helper_1_1(sse_op2, cpu_tmp2, cpu_ptr0);
3124
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
3125
            } else {
3126
                tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3127
            }
3128
            gen_op_mov_reg_T0(ot, reg);
3129
            break;
3130
        case 0xc4: /* pinsrw */
3131
        case 0x1c4:
3132
            s->rip_offset = 1;
3133
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3134
            val = ldub_code(s->pc++);
3135
            if (b1) {
3136
                val &= 7;
3137
                tcg_gen_st16_tl(cpu_T[0], cpu_env,
3138
                                offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3139
            } else {
3140
                val &= 3;
3141
                tcg_gen_st16_tl(cpu_T[0], cpu_env,
3142
                                offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3143
            }
3144
            break;
3145
        case 0xc5: /* pextrw */
3146
        case 0x1c5:
3147
            if (mod != 3)
3148
                goto illegal_op;
3149
            val = ldub_code(s->pc++);
3150
            if (b1) {
3151
                val &= 7;
3152
                rm = (modrm & 7) | REX_B(s);
3153
                tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3154
                                 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3155
            } else {
3156
                val &= 3;
3157
                rm = (modrm & 7);
3158
                tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3159
                                offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3160
            }
3161
            reg = ((modrm >> 3) & 7) | rex_r;
3162
            gen_op_mov_reg_T0(OT_LONG, reg);
3163
            break;
3164
        case 0x1d6: /* movq ea, xmm */
3165
            if (mod != 3) {
3166
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3167
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3168
            } else {
3169
                rm = (modrm & 7) | REX_B(s);
3170
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3171
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3172
                gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3173
            }
3174
            break;
3175
        case 0x2d6: /* movq2dq */
3176
            tcg_gen_helper_0_0(helper_enter_mmx);
3177
            rm = (modrm & 7);
3178
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3179
                        offsetof(CPUX86State,fpregs[rm].mmx));
3180
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3181
            break;
3182
        case 0x3d6: /* movdq2q */
3183
            tcg_gen_helper_0_0(helper_enter_mmx);
3184
            rm = (modrm & 7) | REX_B(s);
3185
            gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3186
                        offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3187
            break;
3188
        case 0xd7: /* pmovmskb */
3189
        case 0x1d7:
3190
            if (mod != 3)
3191
                goto illegal_op;
3192
            if (b1) {
3193
                rm = (modrm & 7) | REX_B(s);
3194
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3195
                tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2, cpu_ptr0);
3196
            } else {
3197
                rm = (modrm & 7);
3198
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3199
                tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2, cpu_ptr0);
3200
            }
3201
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
3202
            reg = ((modrm >> 3) & 7) | rex_r;
3203
            gen_op_mov_reg_T0(OT_LONG, reg);
3204
            break;
3205
        default:
3206
            goto illegal_op;
3207
        }
3208
    } else {
3209
        /* generic MMX or SSE operation */
3210
        switch(b) {
3211
        case 0xf7:
3212
            /* maskmov : we must prepare A0 */
3213
            if (mod != 3)
3214
                goto illegal_op;
3215
#ifdef TARGET_X86_64
3216
            if (s->aflag == 2) {
3217
                gen_op_movq_A0_reg(R_EDI);
3218
            } else
3219
#endif
3220
            {
3221
                gen_op_movl_A0_reg(R_EDI);
3222
                if (s->aflag == 0)
3223
                    gen_op_andl_A0_ffff();
3224
            }
3225
            gen_add_A0_ds_seg(s);
3226
            break;
3227
        case 0x70: /* pshufx insn */
3228
        case 0xc6: /* pshufx insn */
3229
        case 0xc2: /* compare insns */
3230
            s->rip_offset = 1;
3231
            break;
3232
        default:
3233
            break;
3234
        }
3235
        if (is_xmm) {
3236
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3237
            if (mod != 3) {
3238
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3239
                op2_offset = offsetof(CPUX86State,xmm_t0);
3240
                if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3241
                                b == 0xc2)) {
3242
                    /* specific case for SSE single instructions */
3243
                    if (b1 == 2) {
3244
                        /* 32 bit access */
3245
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3246
                        gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3247
                    } else {
3248
                        /* 64 bit access */
3249
                        gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3250
                    }
3251
                } else {
3252
                    gen_ldo_env_A0(s->mem_index, op2_offset);
3253
                }
3254
            } else {
3255
                rm = (modrm & 7) | REX_B(s);
3256
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3257
            }
3258
        } else {
3259
            op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3260
            if (mod != 3) {
3261
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3262
                op2_offset = offsetof(CPUX86State,mmx_t0);
3263
                gen_ldq_env_A0(s->mem_index, op2_offset);
3264
            } else {
3265
                rm = (modrm & 7);
3266
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3267
            }
3268
        }
3269
        switch(b) {
3270
        case 0x0f: /* 3DNow! data insns */
3271
            if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3272
                goto illegal_op;
3273
            val = ldub_code(s->pc++);
3274
            sse_op2 = sse_op_table5[val];
3275
            if (!sse_op2)
3276
                goto illegal_op;
3277
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3278
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3279
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3280
            break;
3281
        case 0x70: /* pshufx insn */
3282
        case 0xc6: /* pshufx insn */
3283
            val = ldub_code(s->pc++);
3284
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3285
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3286
            tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3287
            break;
3288
        case 0xc2:
3289
            /* compare insns */
3290
            val = ldub_code(s->pc++);
3291
            if (val >= 8)
3292
                goto illegal_op;
3293
            sse_op2 = sse_op_table4[val][b1];
3294
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3295
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3296
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3297
            break;
3298
        default:
3299
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3300
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3301
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3302
            break;
3303
        }
3304
        if (b == 0x2e || b == 0x2f) {
3305
            /* just to keep the EFLAGS optimization correct */
3306
            gen_op_com_dummy();
3307
            s->cc_op = CC_OP_EFLAGS;
3308
        }
3309
    }
3310
}
3311

    
3312
/* convert one instruction. s->is_jmp is set if the translation must
3313
   be stopped. Return the next pc value */
3314
static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3315
{
3316
    int b, prefixes, aflag, dflag;
3317
    int shift, ot;
3318
    int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3319
    target_ulong next_eip, tval;
3320
    int rex_w, rex_r;
3321

    
3322
    s->pc = pc_start;
3323
    prefixes = 0;
3324
    aflag = s->code32;
3325
    dflag = s->code32;
3326
    s->override = -1;
3327
    rex_w = -1;
3328
    rex_r = 0;
3329
#ifdef TARGET_X86_64
3330
    s->rex_x = 0;
3331
    s->rex_b = 0;
3332
    x86_64_hregs = 0;
3333
#endif
3334
    s->rip_offset = 0; /* for relative ip address */
3335
 next_byte:
3336
    b = ldub_code(s->pc);
3337
    s->pc++;
3338
    /* check prefixes */
3339
#ifdef TARGET_X86_64
3340
    if (CODE64(s)) {
3341
        switch (b) {
3342
        case 0xf3:
3343
            prefixes |= PREFIX_REPZ;
3344
            goto next_byte;
3345
        case 0xf2:
3346
            prefixes |= PREFIX_REPNZ;
3347
            goto next_byte;
3348
        case 0xf0:
3349
            prefixes |= PREFIX_LOCK;
3350
            goto next_byte;
3351
        case 0x2e:
3352
            s->override = R_CS;
3353
            goto next_byte;
3354
        case 0x36:
3355
            s->override = R_SS;
3356
            goto next_byte;
3357
        case 0x3e:
3358
            s->override = R_DS;
3359
            goto next_byte;
3360
        case 0x26:
3361
            s->override = R_ES;
3362
            goto next_byte;
3363
        case 0x64:
3364
            s->override = R_FS;
3365
            goto next_byte;
3366
        case 0x65:
3367
            s->override = R_GS;
3368
            goto next_byte;
3369
        case 0x66:
3370
            prefixes |= PREFIX_DATA;
3371
            goto next_byte;
3372
        case 0x67:
3373
            prefixes |= PREFIX_ADR;
3374
            goto next_byte;
3375
        case 0x40 ... 0x4f:
3376
            /* REX prefix */
3377
            rex_w = (b >> 3) & 1;
3378
            rex_r = (b & 0x4) << 1;
3379
            s->rex_x = (b & 0x2) << 2;
3380
            REX_B(s) = (b & 0x1) << 3;
3381
            x86_64_hregs = 1; /* select uniform byte register addressing */
3382
            goto next_byte;
3383
        }
3384
        if (rex_w == 1) {
3385
            /* 0x66 is ignored if rex.w is set */
3386
            dflag = 2;
3387
        } else {
3388
            if (prefixes & PREFIX_DATA)
3389
                dflag ^= 1;
3390
        }
3391
        if (!(prefixes & PREFIX_ADR))
3392
            aflag = 2;
3393
    } else
3394
#endif
3395
    {
3396
        switch (b) {
3397
        case 0xf3:
3398
            prefixes |= PREFIX_REPZ;
3399
            goto next_byte;
3400
        case 0xf2:
3401
            prefixes |= PREFIX_REPNZ;
3402
            goto next_byte;
3403
        case 0xf0:
3404
            prefixes |= PREFIX_LOCK;
3405
            goto next_byte;
3406
        case 0x2e:
3407
            s->override = R_CS;
3408
            goto next_byte;
3409
        case 0x36:
3410
            s->override = R_SS;
3411
            goto next_byte;
3412
        case 0x3e:
3413
            s->override = R_DS;
3414
            goto next_byte;
3415
        case 0x26:
3416
            s->override = R_ES;
3417
            goto next_byte;
3418
        case 0x64:
3419
            s->override = R_FS;
3420
            goto next_byte;
3421
        case 0x65:
3422
            s->override = R_GS;
3423
            goto next_byte;
3424
        case 0x66:
3425
            prefixes |= PREFIX_DATA;
3426
            goto next_byte;
3427
        case 0x67:
3428
            prefixes |= PREFIX_ADR;
3429
            goto next_byte;
3430
        }
3431
        if (prefixes & PREFIX_DATA)
3432
            dflag ^= 1;
3433
        if (prefixes & PREFIX_ADR)
3434
            aflag ^= 1;
3435
    }
3436

    
3437
    s->prefix = prefixes;
3438
    s->aflag = aflag;
3439
    s->dflag = dflag;
3440

    
3441
    /* lock generation */
3442
    if (prefixes & PREFIX_LOCK)
3443
        gen_op_lock();
3444

    
3445
    /* now check op code */
3446
 reswitch:
3447
    switch(b) {
3448
    case 0x0f:
3449
        /**************************/
3450
        /* extended op code */
3451
        b = ldub_code(s->pc++) | 0x100;
3452
        goto reswitch;
3453

    
3454
        /**************************/
3455
        /* arith & logic */
3456
    case 0x00 ... 0x05:
3457
    case 0x08 ... 0x0d:
3458
    case 0x10 ... 0x15:
3459
    case 0x18 ... 0x1d:
3460
    case 0x20 ... 0x25:
3461
    case 0x28 ... 0x2d:
3462
    case 0x30 ... 0x35:
3463
    case 0x38 ... 0x3d:
3464
        {
3465
            int op, f, val;
3466
            op = (b >> 3) & 7;
3467
            f = (b >> 1) & 3;
3468

    
3469
            if ((b & 1) == 0)
3470
                ot = OT_BYTE;
3471
            else
3472
                ot = dflag + OT_WORD;
3473

    
3474
            switch(f) {
3475
            case 0: /* OP Ev, Gv */
3476
                modrm = ldub_code(s->pc++);
3477
                reg = ((modrm >> 3) & 7) | rex_r;
3478
                mod = (modrm >> 6) & 3;
3479
                rm = (modrm & 7) | REX_B(s);
3480
                if (mod != 3) {
3481
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3482
                    opreg = OR_TMP0;
3483
                } else if (op == OP_XORL && rm == reg) {
3484
                xor_zero:
3485
                    /* xor reg, reg optimisation */
3486
                    gen_op_movl_T0_0();
3487
                    s->cc_op = CC_OP_LOGICB + ot;
3488
                    gen_op_mov_reg_T0(ot, reg);
3489
                    gen_op_update1_cc();
3490
                    break;
3491
                } else {
3492
                    opreg = rm;
3493
                }
3494
                gen_op_mov_TN_reg(ot, 1, reg);
3495
                gen_op(s, op, ot, opreg);
3496
                break;
3497
            case 1: /* OP Gv, Ev */
3498
                modrm = ldub_code(s->pc++);
3499
                mod = (modrm >> 6) & 3;
3500
                reg = ((modrm >> 3) & 7) | rex_r;
3501
                rm = (modrm & 7) | REX_B(s);
3502
                if (mod != 3) {
3503
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3504
                    gen_op_ld_T1_A0(ot + s->mem_index);
3505
                } else if (op == OP_XORL && rm == reg) {
3506
                    goto xor_zero;
3507
                } else {
3508
                    gen_op_mov_TN_reg(ot, 1, rm);
3509
                }
3510
                gen_op(s, op, ot, reg);
3511
                break;
3512
            case 2: /* OP A, Iv */
3513
                val = insn_get(s, ot);
3514
                gen_op_movl_T1_im(val);
3515
                gen_op(s, op, ot, OR_EAX);
3516
                break;
3517
            }
3518
        }
3519
        break;
3520

    
3521
    case 0x80: /* GRP1 */
3522
    case 0x81:
3523
    case 0x82:
3524
    case 0x83:
3525
        {
3526
            int val;
3527

    
3528
            if ((b & 1) == 0)
3529
                ot = OT_BYTE;
3530
            else
3531
                ot = dflag + OT_WORD;
3532

    
3533
            modrm = ldub_code(s->pc++);
3534
            mod = (modrm >> 6) & 3;
3535
            rm = (modrm & 7) | REX_B(s);
3536
            op = (modrm >> 3) & 7;
3537

    
3538
            if (mod != 3) {
3539
                if (b == 0x83)
3540
                    s->rip_offset = 1;
3541
                else
3542
                    s->rip_offset = insn_const_size(ot);
3543
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3544
                opreg = OR_TMP0;
3545
            } else {
3546
                opreg = rm;
3547
            }
3548

    
3549
            switch(b) {
3550
            default:
3551
            case 0x80:
3552
            case 0x81:
3553
            case 0x82:
3554
                val = insn_get(s, ot);
3555
                break;
3556
            case 0x83:
3557
                val = (int8_t)insn_get(s, OT_BYTE);
3558
                break;
3559
            }
3560
            gen_op_movl_T1_im(val);
3561
            gen_op(s, op, ot, opreg);
3562
        }
3563
        break;
3564

    
3565
        /**************************/
3566
        /* inc, dec, and other misc arith */
3567
    case 0x40 ... 0x47: /* inc Gv */
3568
        ot = dflag ? OT_LONG : OT_WORD;
3569
        gen_inc(s, ot, OR_EAX + (b & 7), 1);
3570
        break;
3571
    case 0x48 ... 0x4f: /* dec Gv */
3572
        ot = dflag ? OT_LONG : OT_WORD;
3573
        gen_inc(s, ot, OR_EAX + (b & 7), -1);
3574
        break;
3575
    case 0xf6: /* GRP3 */
3576
    case 0xf7:
3577
        if ((b & 1) == 0)
3578
            ot = OT_BYTE;
3579
        else
3580
            ot = dflag + OT_WORD;
3581

    
3582
        modrm = ldub_code(s->pc++);
3583
        mod = (modrm >> 6) & 3;
3584
        rm = (modrm & 7) | REX_B(s);
3585
        op = (modrm >> 3) & 7;
3586
        if (mod != 3) {
3587
            if (op == 0)
3588
                s->rip_offset = insn_const_size(ot);
3589
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3590
            gen_op_ld_T0_A0(ot + s->mem_index);
3591
        } else {
3592
            gen_op_mov_TN_reg(ot, 0, rm);
3593
        }
3594

    
3595
        switch(op) {
3596
        case 0: /* test */
3597
            val = insn_get(s, ot);
3598
            gen_op_movl_T1_im(val);
3599
            gen_op_testl_T0_T1_cc();
3600
            s->cc_op = CC_OP_LOGICB + ot;
3601
            break;
3602
        case 2: /* not */
3603
            gen_op_notl_T0();
3604
            if (mod != 3) {
3605
                gen_op_st_T0_A0(ot + s->mem_index);
3606
            } else {
3607
                gen_op_mov_reg_T0(ot, rm);
3608
            }
3609
            break;
3610
        case 3: /* neg */
3611
            gen_op_negl_T0();
3612
            if (mod != 3) {
3613
                gen_op_st_T0_A0(ot + s->mem_index);
3614
            } else {
3615
                gen_op_mov_reg_T0(ot, rm);
3616
            }
3617
            gen_op_update_neg_cc();
3618
            s->cc_op = CC_OP_SUBB + ot;
3619
            break;
3620
        case 4: /* mul */
3621
            switch(ot) {
3622
            case OT_BYTE:
3623
                gen_op_mulb_AL_T0();
3624
                s->cc_op = CC_OP_MULB;
3625
                break;
3626
            case OT_WORD:
3627
                gen_op_mulw_AX_T0();
3628
                s->cc_op = CC_OP_MULW;
3629
                break;
3630
            default:
3631
            case OT_LONG:
3632
                gen_op_mull_EAX_T0();
3633
                s->cc_op = CC_OP_MULL;
3634
                break;
3635
#ifdef TARGET_X86_64
3636
            case OT_QUAD:
3637
                gen_op_mulq_EAX_T0();
3638
                s->cc_op = CC_OP_MULQ;
3639
                break;
3640
#endif
3641
            }
3642
            break;
3643
        case 5: /* imul */
3644
            switch(ot) {
3645
            case OT_BYTE:
3646
                gen_op_imulb_AL_T0();
3647
                s->cc_op = CC_OP_MULB;
3648
                break;
3649
            case OT_WORD:
3650
                gen_op_imulw_AX_T0();
3651
                s->cc_op = CC_OP_MULW;
3652
                break;
3653
            default:
3654
            case OT_LONG:
3655
                gen_op_imull_EAX_T0();
3656
                s->cc_op = CC_OP_MULL;
3657
                break;
3658
#ifdef TARGET_X86_64
3659
            case OT_QUAD:
3660
                gen_op_imulq_EAX_T0();
3661
                s->cc_op = CC_OP_MULQ;
3662
                break;
3663
#endif
3664
            }
3665
            break;
3666
        case 6: /* div */
3667
            switch(ot) {
3668
            case OT_BYTE:
3669
                gen_jmp_im(pc_start - s->cs_base);
3670
                tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
3671
                break;
3672
            case OT_WORD:
3673
                gen_jmp_im(pc_start - s->cs_base);
3674
                tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
3675
                break;
3676
            default:
3677
            case OT_LONG:
3678
                gen_jmp_im(pc_start - s->cs_base);
3679
                tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
3680
                break;
3681
#ifdef TARGET_X86_64
3682
            case OT_QUAD:
3683
                gen_jmp_im(pc_start - s->cs_base);
3684
                tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
3685
                break;
3686
#endif
3687
            }
3688
            break;
3689
        case 7: /* idiv */
3690
            switch(ot) {
3691
            case OT_BYTE:
3692
                gen_jmp_im(pc_start - s->cs_base);
3693
                tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
3694
                break;
3695
            case OT_WORD:
3696
                gen_jmp_im(pc_start - s->cs_base);
3697
                tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
3698
                break;
3699
            default:
3700
            case OT_LONG:
3701
                gen_jmp_im(pc_start - s->cs_base);
3702
                tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
3703
                break;
3704
#ifdef TARGET_X86_64
3705
            case OT_QUAD:
3706
                gen_jmp_im(pc_start - s->cs_base);
3707
                tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
3708
                break;
3709
#endif
3710
            }
3711
            break;
3712
        default:
3713
            goto illegal_op;
3714
        }
3715
        break;
3716

    
3717
    case 0xfe: /* GRP4 */
3718
    case 0xff: /* GRP5 */
3719
        if ((b & 1) == 0)
3720
            ot = OT_BYTE;
3721
        else
3722
            ot = dflag + OT_WORD;
3723

    
3724
        modrm = ldub_code(s->pc++);
3725
        mod = (modrm >> 6) & 3;
3726
        rm = (modrm & 7) | REX_B(s);
3727
        op = (modrm >> 3) & 7;
3728
        if (op >= 2 && b == 0xfe) {
3729
            goto illegal_op;
3730
        }
3731
        if (CODE64(s)) {
3732
            if (op == 2 || op == 4) {
3733
                /* operand size for jumps is 64 bit */
3734
                ot = OT_QUAD;
3735
            } else if (op == 3 || op == 5) {
3736
                /* for call calls, the operand is 16 or 32 bit, even
3737
                   in long mode */
3738
                ot = dflag ? OT_LONG : OT_WORD;
3739
            } else if (op == 6) {
3740
                /* default push size is 64 bit */
3741
                ot = dflag ? OT_QUAD : OT_WORD;
3742
            }
3743
        }
3744
        if (mod != 3) {
3745
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3746
            if (op >= 2 && op != 3 && op != 5)
3747
                gen_op_ld_T0_A0(ot + s->mem_index);
3748
        } else {
3749
            gen_op_mov_TN_reg(ot, 0, rm);
3750
        }
3751

    
3752
        switch(op) {
3753
        case 0: /* inc Ev */
3754
            if (mod != 3)
3755
                opreg = OR_TMP0;
3756
            else
3757
                opreg = rm;
3758
            gen_inc(s, ot, opreg, 1);
3759
            break;
3760
        case 1: /* dec Ev */
3761
            if (mod != 3)
3762
                opreg = OR_TMP0;
3763
            else
3764
                opreg = rm;
3765
            gen_inc(s, ot, opreg, -1);
3766
            break;
3767
        case 2: /* call Ev */
3768
            /* XXX: optimize if memory (no 'and' is necessary) */
3769
            if (s->dflag == 0)
3770
                gen_op_andl_T0_ffff();
3771
            next_eip = s->pc - s->cs_base;
3772
            gen_movtl_T1_im(next_eip);
3773
            gen_push_T1(s);
3774
            gen_op_jmp_T0();
3775
            gen_eob(s);
3776
            break;
3777
        case 3: /* lcall Ev */
3778
            gen_op_ld_T1_A0(ot + s->mem_index);
3779
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3780
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3781
        do_lcall:
3782
            if (s->pe && !s->vm86) {
3783
                if (s->cc_op != CC_OP_DYNAMIC)
3784
                    gen_op_set_cc_op(s->cc_op);
3785
                gen_jmp_im(pc_start - s->cs_base);
3786
                gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3787
            } else {
3788
                gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3789
            }
3790
            gen_eob(s);
3791
            break;
3792
        case 4: /* jmp Ev */
3793
            if (s->dflag == 0)
3794
                gen_op_andl_T0_ffff();
3795
            gen_op_jmp_T0();
3796
            gen_eob(s);
3797
            break;
3798
        case 5: /* ljmp Ev */
3799
            gen_op_ld_T1_A0(ot + s->mem_index);
3800
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3801
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3802
        do_ljmp:
3803
            if (s->pe && !s->vm86) {
3804
                if (s->cc_op != CC_OP_DYNAMIC)
3805
                    gen_op_set_cc_op(s->cc_op);
3806
                gen_jmp_im(pc_start - s->cs_base);
3807
                gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3808
            } else {
3809
                gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3810
                gen_op_movl_T0_T1();
3811
                gen_op_jmp_T0();
3812
            }
3813
            gen_eob(s);
3814
            break;
3815
        case 6: /* push Ev */
3816
            gen_push_T0(s);
3817
            break;
3818
        default:
3819
            goto illegal_op;
3820
        }
3821
        break;
3822

    
3823
    case 0x84: /* test Ev, Gv */
3824
    case 0x85:
3825
        if ((b & 1) == 0)
3826
            ot = OT_BYTE;
3827
        else
3828
            ot = dflag + OT_WORD;
3829

    
3830
        modrm = ldub_code(s->pc++);
3831
        mod = (modrm >> 6) & 3;
3832
        rm = (modrm & 7) | REX_B(s);
3833
        reg = ((modrm >> 3) & 7) | rex_r;
3834

    
3835
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3836
        gen_op_mov_TN_reg(ot, 1, reg);
3837
        gen_op_testl_T0_T1_cc();
3838
        s->cc_op = CC_OP_LOGICB + ot;
3839
        break;
3840

    
3841
    case 0xa8: /* test eAX, Iv */
3842
    case 0xa9:
3843
        if ((b & 1) == 0)
3844
            ot = OT_BYTE;
3845
        else
3846
            ot = dflag + OT_WORD;
3847
        val = insn_get(s, ot);
3848

    
3849
        gen_op_mov_TN_reg(ot, 0, OR_EAX);
3850
        gen_op_movl_T1_im(val);
3851
        gen_op_testl_T0_T1_cc();
3852
        s->cc_op = CC_OP_LOGICB + ot;
3853
        break;
3854

    
3855
    case 0x98: /* CWDE/CBW */
3856
#ifdef TARGET_X86_64
3857
        if (dflag == 2) {
3858
            gen_op_movslq_RAX_EAX();
3859
        } else
3860
#endif
3861
        if (dflag == 1)
3862
            gen_op_movswl_EAX_AX();
3863
        else
3864
            gen_op_movsbw_AX_AL();
3865
        break;
3866
    case 0x99: /* CDQ/CWD */
3867
#ifdef TARGET_X86_64
3868
        if (dflag == 2) {
3869
            gen_op_movsqo_RDX_RAX();
3870
        } else
3871
#endif
3872
        if (dflag == 1)
3873
            gen_op_movslq_EDX_EAX();
3874
        else
3875
            gen_op_movswl_DX_AX();
3876
        break;
3877
    case 0x1af: /* imul Gv, Ev */
3878
    case 0x69: /* imul Gv, Ev, I */
3879
    case 0x6b:
3880
        ot = dflag + OT_WORD;
3881
        modrm = ldub_code(s->pc++);
3882
        reg = ((modrm >> 3) & 7) | rex_r;
3883
        if (b == 0x69)
3884
            s->rip_offset = insn_const_size(ot);
3885
        else if (b == 0x6b)
3886
            s->rip_offset = 1;
3887
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3888
        if (b == 0x69) {
3889
            val = insn_get(s, ot);
3890
            gen_op_movl_T1_im(val);
3891
        } else if (b == 0x6b) {
3892
            val = (int8_t)insn_get(s, OT_BYTE);
3893
            gen_op_movl_T1_im(val);
3894
        } else {
3895
            gen_op_mov_TN_reg(ot, 1, reg);
3896
        }
3897

    
3898
#ifdef TARGET_X86_64
3899
        if (ot == OT_QUAD) {
3900
            gen_op_imulq_T0_T1();
3901
        } else
3902
#endif
3903
        if (ot == OT_LONG) {
3904
            gen_op_imull_T0_T1();
3905
        } else {
3906
            gen_op_imulw_T0_T1();
3907
        }
3908
        gen_op_mov_reg_T0(ot, reg);
3909
        s->cc_op = CC_OP_MULB + ot;
3910
        break;
3911
    case 0x1c0:
3912
    case 0x1c1: /* xadd Ev, Gv */
3913
        if ((b & 1) == 0)
3914
            ot = OT_BYTE;
3915
        else
3916
            ot = dflag + OT_WORD;
3917
        modrm = ldub_code(s->pc++);
3918
        reg = ((modrm >> 3) & 7) | rex_r;
3919
        mod = (modrm >> 6) & 3;
3920
        if (mod == 3) {
3921
            rm = (modrm & 7) | REX_B(s);
3922
            gen_op_mov_TN_reg(ot, 0, reg);
3923
            gen_op_mov_TN_reg(ot, 1, rm);
3924
            gen_op_addl_T0_T1();
3925
            gen_op_mov_reg_T1(ot, reg);
3926
            gen_op_mov_reg_T0(ot, rm);
3927
        } else {
3928
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3929
            gen_op_mov_TN_reg(ot, 0, reg);
3930
            gen_op_ld_T1_A0(ot + s->mem_index);
3931
            gen_op_addl_T0_T1();
3932
            gen_op_st_T0_A0(ot + s->mem_index);
3933
            gen_op_mov_reg_T1(ot, reg);
3934
        }
3935
        gen_op_update2_cc();
3936
        s->cc_op = CC_OP_ADDB + ot;
3937
        break;
3938
    case 0x1b0:
3939
    case 0x1b1: /* cmpxchg Ev, Gv */
3940
        if ((b & 1) == 0)
3941
            ot = OT_BYTE;
3942
        else
3943
            ot = dflag + OT_WORD;
3944
        modrm = ldub_code(s->pc++);
3945
        reg = ((modrm >> 3) & 7) | rex_r;
3946
        mod = (modrm >> 6) & 3;
3947
        gen_op_mov_TN_reg(ot, 1, reg);
3948
        if (mod == 3) {
3949
            rm = (modrm & 7) | REX_B(s);
3950
            gen_op_mov_TN_reg(ot, 0, rm);
3951
            gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3952
            gen_op_mov_reg_T0(ot, rm);
3953
        } else {
3954
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3955
            gen_op_ld_T0_A0(ot + s->mem_index);
3956
            gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3957
        }
3958
        s->cc_op = CC_OP_SUBB + ot;
3959
        break;
3960
    case 0x1c7: /* cmpxchg8b */
3961
        modrm = ldub_code(s->pc++);
3962
        mod = (modrm >> 6) & 3;
3963
        if ((mod == 3) || ((modrm & 0x38) != 0x8))
3964
            goto illegal_op;
3965
        gen_jmp_im(pc_start - s->cs_base);
3966
        if (s->cc_op != CC_OP_DYNAMIC)
3967
            gen_op_set_cc_op(s->cc_op);
3968
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3969
        gen_op_cmpxchg8b();
3970
        s->cc_op = CC_OP_EFLAGS;
3971
        break;
3972

    
3973
        /**************************/
3974
        /* push/pop */
3975
    case 0x50 ... 0x57: /* push */
3976
        gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
3977
        gen_push_T0(s);
3978
        break;
3979
    case 0x58 ... 0x5f: /* pop */
3980
        if (CODE64(s)) {
3981
            ot = dflag ? OT_QUAD : OT_WORD;
3982
        } else {
3983
            ot = dflag + OT_WORD;
3984
        }
3985
        gen_pop_T0(s);
3986
        /* NOTE: order is important for pop %sp */
3987
        gen_pop_update(s);
3988
        gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
3989
        break;
3990
    case 0x60: /* pusha */
3991
        if (CODE64(s))
3992
            goto illegal_op;
3993
        gen_pusha(s);
3994
        break;
3995
    case 0x61: /* popa */
3996
        if (CODE64(s))
3997
            goto illegal_op;
3998
        gen_popa(s);
3999
        break;
4000
    case 0x68: /* push Iv */
4001
    case 0x6a:
4002
        if (CODE64(s)) {
4003
            ot = dflag ? OT_QUAD : OT_WORD;
4004
        } else {
4005
            ot = dflag + OT_WORD;
4006
        }
4007
        if (b == 0x68)
4008
            val = insn_get(s, ot);
4009
        else
4010
            val = (int8_t)insn_get(s, OT_BYTE);
4011
        gen_op_movl_T0_im(val);
4012
        gen_push_T0(s);
4013
        break;
4014
    case 0x8f: /* pop Ev */
4015
        if (CODE64(s)) {
4016
            ot = dflag ? OT_QUAD : OT_WORD;
4017
        } else {
4018
            ot = dflag + OT_WORD;
4019
        }
4020
        modrm = ldub_code(s->pc++);
4021
        mod = (modrm >> 6) & 3;
4022
        gen_pop_T0(s);
4023
        if (mod == 3) {
4024
            /* NOTE: order is important for pop %sp */
4025
            gen_pop_update(s);
4026
            rm = (modrm & 7) | REX_B(s);
4027
            gen_op_mov_reg_T0(ot, rm);
4028
        } else {
4029
            /* NOTE: order is important too for MMU exceptions */
4030
            s->popl_esp_hack = 1 << ot;
4031
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4032
            s->popl_esp_hack = 0;
4033
            gen_pop_update(s);
4034
        }
4035
        break;
4036
    case 0xc8: /* enter */
4037
        {
4038
            int level;
4039
            val = lduw_code(s->pc);
4040
            s->pc += 2;
4041
            level = ldub_code(s->pc++);
4042
            gen_enter(s, val, level);
4043
        }
4044
        break;
4045
    case 0xc9: /* leave */
4046
        /* XXX: exception not precise (ESP is updated before potential exception) */
4047
        if (CODE64(s)) {
4048
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4049
            gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4050
        } else if (s->ss32) {
4051
            gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4052
            gen_op_mov_reg_T0(OT_LONG, R_ESP);
4053
        } else {
4054
            gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4055
            gen_op_mov_reg_T0(OT_WORD, R_ESP);
4056
        }
4057
        gen_pop_T0(s);
4058
        if (CODE64(s)) {
4059
            ot = dflag ? OT_QUAD : OT_WORD;
4060
        } else {
4061
            ot = dflag + OT_WORD;
4062
        }
4063
        gen_op_mov_reg_T0(ot, R_EBP);
4064
        gen_pop_update(s);
4065
        break;
4066
    case 0x06: /* push es */
4067
    case 0x0e: /* push cs */
4068
    case 0x16: /* push ss */
4069
    case 0x1e: /* push ds */
4070
        if (CODE64(s))
4071
            goto illegal_op;
4072
        gen_op_movl_T0_seg(b >> 3);
4073
        gen_push_T0(s);
4074
        break;
4075
    case 0x1a0: /* push fs */
4076
    case 0x1a8: /* push gs */
4077
        gen_op_movl_T0_seg((b >> 3) & 7);
4078
        gen_push_T0(s);
4079
        break;
4080
    case 0x07: /* pop es */
4081
    case 0x17: /* pop ss */
4082
    case 0x1f: /* pop ds */
4083
        if (CODE64(s))
4084
            goto illegal_op;
4085
        reg = b >> 3;
4086
        gen_pop_T0(s);
4087
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4088
        gen_pop_update(s);
4089
        if (reg == R_SS) {
4090
            /* if reg == SS, inhibit interrupts/trace. */
4091
            /* If several instructions disable interrupts, only the
4092
               _first_ does it */
4093
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4094
                tcg_gen_helper_0_0(helper_set_inhibit_irq);
4095
            s->tf = 0;
4096
        }
4097
        if (s->is_jmp) {
4098
            gen_jmp_im(s->pc - s->cs_base);
4099
            gen_eob(s);
4100
        }
4101
        break;
4102
    case 0x1a1: /* pop fs */
4103
    case 0x1a9: /* pop gs */
4104
        gen_pop_T0(s);
4105
        gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4106
        gen_pop_update(s);
4107
        if (s->is_jmp) {
4108
            gen_jmp_im(s->pc - s->cs_base);
4109
            gen_eob(s);
4110
        }
4111
        break;
4112

    
4113
        /**************************/
4114
        /* mov */
4115
    case 0x88:
4116
    case 0x89: /* mov Gv, Ev */
4117
        if ((b & 1) == 0)
4118
            ot = OT_BYTE;
4119
        else
4120
            ot = dflag + OT_WORD;
4121
        modrm = ldub_code(s->pc++);
4122
        reg = ((modrm >> 3) & 7) | rex_r;
4123

    
4124
        /* generate a generic store */
4125
        gen_ldst_modrm(s, modrm, ot, reg, 1);
4126
        break;
4127
    case 0xc6:
4128
    case 0xc7: /* mov Ev, Iv */
4129
        if ((b & 1) == 0)
4130
            ot = OT_BYTE;
4131
        else
4132
            ot = dflag + OT_WORD;
4133
        modrm = ldub_code(s->pc++);
4134
        mod = (modrm >> 6) & 3;
4135
        if (mod != 3) {
4136
            s->rip_offset = insn_const_size(ot);
4137
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4138
        }
4139
        val = insn_get(s, ot);
4140
        gen_op_movl_T0_im(val);
4141
        if (mod != 3)
4142
            gen_op_st_T0_A0(ot + s->mem_index);
4143
        else
4144
            gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4145
        break;
4146
    case 0x8a:
4147
    case 0x8b: /* mov Ev, Gv */
4148
        if ((b & 1) == 0)
4149
            ot = OT_BYTE;
4150
        else
4151
            ot = OT_WORD + dflag;
4152
        modrm = ldub_code(s->pc++);
4153
        reg = ((modrm >> 3) & 7) | rex_r;
4154

    
4155
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4156
        gen_op_mov_reg_T0(ot, reg);
4157
        break;
4158
    case 0x8e: /* mov seg, Gv */
4159
        modrm = ldub_code(s->pc++);
4160
        reg = (modrm >> 3) & 7;
4161
        if (reg >= 6 || reg == R_CS)
4162
            goto illegal_op;
4163
        gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4164
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4165
        if (reg == R_SS) {
4166
            /* if reg == SS, inhibit interrupts/trace */
4167
            /* If several instructions disable interrupts, only the
4168
               _first_ does it */
4169
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4170
                tcg_gen_helper_0_0(helper_set_inhibit_irq);
4171
            s->tf = 0;
4172
        }
4173
        if (s->is_jmp) {
4174
            gen_jmp_im(s->pc - s->cs_base);
4175
            gen_eob(s);
4176
        }
4177
        break;
4178
    case 0x8c: /* mov Gv, seg */
4179
        modrm = ldub_code(s->pc++);
4180
        reg = (modrm >> 3) & 7;
4181
        mod = (modrm >> 6) & 3;
4182
        if (reg >= 6)
4183
            goto illegal_op;
4184
        gen_op_movl_T0_seg(reg);
4185
        if (mod == 3)
4186
            ot = OT_WORD + dflag;
4187
        else
4188
            ot = OT_WORD;
4189
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4190
        break;
4191

    
4192
    case 0x1b6: /* movzbS Gv, Eb */
4193
    case 0x1b7: /* movzwS Gv, Eb */
4194
    case 0x1be: /* movsbS Gv, Eb */
4195
    case 0x1bf: /* movswS Gv, Eb */
4196
        {
4197
            int d_ot;
4198
            /* d_ot is the size of destination */
4199
            d_ot = dflag + OT_WORD;
4200
            /* ot is the size of source */
4201
            ot = (b & 1) + OT_BYTE;
4202
            modrm = ldub_code(s->pc++);
4203
            reg = ((modrm >> 3) & 7) | rex_r;
4204
            mod = (modrm >> 6) & 3;
4205
            rm = (modrm & 7) | REX_B(s);
4206

    
4207
            if (mod == 3) {
4208
                gen_op_mov_TN_reg(ot, 0, rm);
4209
                switch(ot | (b & 8)) {
4210
                case OT_BYTE:
4211
                    gen_op_movzbl_T0_T0();
4212
                    break;
4213
                case OT_BYTE | 8:
4214
                    gen_op_movsbl_T0_T0();
4215
                    break;
4216
                case OT_WORD:
4217
                    gen_op_movzwl_T0_T0();
4218
                    break;
4219
                default:
4220
                case OT_WORD | 8:
4221
                    gen_op_movswl_T0_T0();
4222
                    break;
4223
                }
4224
                gen_op_mov_reg_T0(d_ot, reg);
4225
            } else {
4226
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4227
                if (b & 8) {
4228
                    gen_op_lds_T0_A0(ot + s->mem_index);
4229
                } else {
4230
                    gen_op_ldu_T0_A0(ot + s->mem_index);
4231
                }
4232
                gen_op_mov_reg_T0(d_ot, reg);
4233
            }
4234
        }
4235
        break;
4236

    
4237
    case 0x8d: /* lea */
4238
        ot = dflag + OT_WORD;
4239
        modrm = ldub_code(s->pc++);
4240
        mod = (modrm >> 6) & 3;
4241
        if (mod == 3)
4242
            goto illegal_op;
4243
        reg = ((modrm >> 3) & 7) | rex_r;
4244
        /* we must ensure that no segment is added */
4245
        s->override = -1;
4246
        val = s->addseg;
4247
        s->addseg = 0;
4248
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4249
        s->addseg = val;
4250
        gen_op_mov_reg_A0(ot - OT_WORD, reg);
4251
        break;
4252

    
4253
    case 0xa0: /* mov EAX, Ov */
4254
    case 0xa1:
4255
    case 0xa2: /* mov Ov, EAX */
4256
    case 0xa3:
4257
        {
4258
            target_ulong offset_addr;
4259

    
4260
            if ((b & 1) == 0)
4261
                ot = OT_BYTE;
4262
            else
4263
                ot = dflag + OT_WORD;
4264
#ifdef TARGET_X86_64
4265
            if (s->aflag == 2) {
4266
                offset_addr = ldq_code(s->pc);
4267
                s->pc += 8;
4268
                gen_op_movq_A0_im(offset_addr);
4269
            } else
4270
#endif
4271
            {
4272
                if (s->aflag) {
4273
                    offset_addr = insn_get(s, OT_LONG);
4274
                } else {
4275
                    offset_addr = insn_get(s, OT_WORD);
4276
                }
4277
                gen_op_movl_A0_im(offset_addr);
4278
            }
4279
            gen_add_A0_ds_seg(s);
4280
            if ((b & 2) == 0) {
4281
                gen_op_ld_T0_A0(ot + s->mem_index);
4282
                gen_op_mov_reg_T0(ot, R_EAX);
4283
            } else {
4284
                gen_op_mov_TN_reg(ot, 0, R_EAX);
4285
                gen_op_st_T0_A0(ot + s->mem_index);
4286
            }
4287
        }
4288
        break;
4289
    case 0xd7: /* xlat */
4290
#ifdef TARGET_X86_64
4291
        if (s->aflag == 2) {
4292
            gen_op_movq_A0_reg(R_EBX);
4293
            gen_op_addq_A0_AL();
4294
        } else
4295
#endif
4296
        {
4297
            gen_op_movl_A0_reg(R_EBX);
4298
            gen_op_addl_A0_AL();
4299
            if (s->aflag == 0)
4300
                gen_op_andl_A0_ffff();
4301
        }
4302
        gen_add_A0_ds_seg(s);
4303
        gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4304
        gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4305
        break;
4306
    case 0xb0 ... 0xb7: /* mov R, Ib */
4307
        val = insn_get(s, OT_BYTE);
4308
        gen_op_movl_T0_im(val);
4309
        gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4310
        break;
4311
    case 0xb8 ... 0xbf: /* mov R, Iv */
4312
#ifdef TARGET_X86_64
4313
        if (dflag == 2) {
4314
            uint64_t tmp;
4315
            /* 64 bit case */
4316
            tmp = ldq_code(s->pc);
4317
            s->pc += 8;
4318
            reg = (b & 7) | REX_B(s);
4319
            gen_movtl_T0_im(tmp);
4320
            gen_op_mov_reg_T0(OT_QUAD, reg);
4321
        } else
4322
#endif
4323
        {
4324
            ot = dflag ? OT_LONG : OT_WORD;
4325
            val = insn_get(s, ot);
4326
            reg = (b & 7) | REX_B(s);
4327
            gen_op_movl_T0_im(val);
4328
            gen_op_mov_reg_T0(ot, reg);
4329
        }
4330
        break;
4331

    
4332
    case 0x91 ... 0x97: /* xchg R, EAX */
4333
        ot = dflag + OT_WORD;
4334
        reg = (b & 7) | REX_B(s);
4335
        rm = R_EAX;
4336
        goto do_xchg_reg;
4337
    case 0x86:
4338
    case 0x87: /* xchg Ev, Gv */
4339
        if ((b & 1) == 0)
4340
            ot = OT_BYTE;
4341
        else
4342
            ot = dflag + OT_WORD;
4343
        modrm = ldub_code(s->pc++);
4344
        reg = ((modrm >> 3) & 7) | rex_r;
4345
        mod = (modrm >> 6) & 3;
4346
        if (mod == 3) {
4347
            rm = (modrm & 7) | REX_B(s);
4348
        do_xchg_reg:
4349
            gen_op_mov_TN_reg(ot, 0, reg);
4350
            gen_op_mov_TN_reg(ot, 1, rm);
4351
            gen_op_mov_reg_T0(ot, rm);
4352
            gen_op_mov_reg_T1(ot, reg);
4353
        } else {
4354
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4355
            gen_op_mov_TN_reg(ot, 0, reg);
4356
            /* for xchg, lock is implicit */
4357
            if (!(prefixes & PREFIX_LOCK))
4358
                gen_op_lock();
4359
            gen_op_ld_T1_A0(ot + s->mem_index);
4360
            gen_op_st_T0_A0(ot + s->mem_index);
4361
            if (!(prefixes & PREFIX_LOCK))
4362
                gen_op_unlock();
4363
            gen_op_mov_reg_T1(ot, reg);
4364
        }
4365
        break;
4366
    case 0xc4: /* les Gv */
4367
        if (CODE64(s))
4368
            goto illegal_op;
4369
        op = R_ES;
4370
        goto do_lxx;
4371
    case 0xc5: /* lds Gv */
4372
        if (CODE64(s))
4373
            goto illegal_op;
4374
        op = R_DS;
4375
        goto do_lxx;
4376
    case 0x1b2: /* lss Gv */
4377
        op = R_SS;
4378
        goto do_lxx;
4379
    case 0x1b4: /* lfs Gv */
4380
        op = R_FS;
4381
        goto do_lxx;
4382
    case 0x1b5: /* lgs Gv */
4383
        op = R_GS;
4384
    do_lxx:
4385
        ot = dflag ? OT_LONG : OT_WORD;
4386
        modrm = ldub_code(s->pc++);
4387
        reg = ((modrm >> 3) & 7) | rex_r;
4388
        mod = (modrm >> 6) & 3;
4389
        if (mod == 3)
4390
            goto illegal_op;
4391
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4392
        gen_op_ld_T1_A0(ot + s->mem_index);
4393
        gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4394
        /* load the segment first to handle exceptions properly */
4395
        gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4396
        gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4397
        /* then put the data */
4398
        gen_op_mov_reg_T1(ot, reg);
4399
        if (s->is_jmp) {
4400
            gen_jmp_im(s->pc - s->cs_base);
4401
            gen_eob(s);
4402
        }
4403
        break;
4404

    
4405
        /************************/
4406
        /* shifts */
4407
    case 0xc0:
4408
    case 0xc1:
4409
        /* shift Ev,Ib */
4410
        shift = 2;
4411
    grp2:
4412
        {
4413
            if ((b & 1) == 0)
4414
                ot = OT_BYTE;
4415
            else
4416
                ot = dflag + OT_WORD;
4417

    
4418
            modrm = ldub_code(s->pc++);
4419
            mod = (modrm >> 6) & 3;
4420
            op = (modrm >> 3) & 7;
4421

    
4422
            if (mod != 3) {
4423
                if (shift == 2) {
4424
                    s->rip_offset = 1;
4425
                }
4426
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4427
                opreg = OR_TMP0;
4428
            } else {
4429
                opreg = (modrm & 7) | REX_B(s);
4430
            }
4431

    
4432
            /* simpler op */
4433
            if (shift == 0) {
4434
                gen_shift(s, op, ot, opreg, OR_ECX);
4435
            } else {
4436
                if (shift == 2) {
4437
                    shift = ldub_code(s->pc++);
4438
                }
4439
                gen_shifti(s, op, ot, opreg, shift);
4440
            }
4441
        }
4442
        break;
4443
    case 0xd0:
4444
    case 0xd1:
4445
        /* shift Ev,1 */
4446
        shift = 1;
4447
        goto grp2;
4448
    case 0xd2:
4449
    case 0xd3:
4450
        /* shift Ev,cl */
4451
        shift = 0;
4452
        goto grp2;
4453

    
4454
    case 0x1a4: /* shld imm */
4455
        op = 0;
4456
        shift = 1;
4457
        goto do_shiftd;
4458
    case 0x1a5: /* shld cl */
4459
        op = 0;
4460
        shift = 0;
4461
        goto do_shiftd;
4462
    case 0x1ac: /* shrd imm */
4463
        op = 1;
4464
        shift = 1;
4465
        goto do_shiftd;
4466
    case 0x1ad: /* shrd cl */
4467
        op = 1;
4468
        shift = 0;
4469
    do_shiftd:
4470
        ot = dflag + OT_WORD;
4471
        modrm = ldub_code(s->pc++);
4472
        mod = (modrm >> 6) & 3;
4473
        rm = (modrm & 7) | REX_B(s);
4474
        reg = ((modrm >> 3) & 7) | rex_r;
4475

    
4476
        if (mod != 3) {
4477
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4478
            gen_op_ld_T0_A0(ot + s->mem_index);
4479
        } else {
4480
            gen_op_mov_TN_reg(ot, 0, rm);
4481
        }
4482
        gen_op_mov_TN_reg(ot, 1, reg);
4483

    
4484
        if (shift) {
4485
            val = ldub_code(s->pc++);
4486
            if (ot == OT_QUAD)
4487
                val &= 0x3f;
4488
            else
4489
                val &= 0x1f;
4490
            if (val) {
4491
                if (mod == 3)
4492
                    gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4493
                else
4494
                    gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4495
                if (op == 0 && ot != OT_WORD)
4496
                    s->cc_op = CC_OP_SHLB + ot;
4497
                else
4498
                    s->cc_op = CC_OP_SARB + ot;
4499
            }
4500
        } else {
4501
            if (s->cc_op != CC_OP_DYNAMIC)
4502
                gen_op_set_cc_op(s->cc_op);
4503
            if (mod == 3)
4504
                gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4505
            else
4506
                gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4507
            s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4508
        }
4509
        if (mod == 3) {
4510
            gen_op_mov_reg_T0(ot, rm);
4511
        }
4512
        break;
4513

    
4514
        /************************/
4515
        /* floats */
4516
    case 0xd8 ... 0xdf:
4517
        if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4518
            /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4519
            /* XXX: what to do if illegal op ? */
4520
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4521
            break;
4522
        }
4523
        modrm = ldub_code(s->pc++);
4524
        mod = (modrm >> 6) & 3;
4525
        rm = modrm & 7;
4526
        op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4527
        if (mod != 3) {
4528
            /* memory op */
4529
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4530
            switch(op) {
4531
            case 0x00 ... 0x07: /* fxxxs */
4532
            case 0x10 ... 0x17: /* fixxxl */
4533
            case 0x20 ... 0x27: /* fxxxl */
4534
            case 0x30 ... 0x37: /* fixxx */
4535
                {
4536
                    int op1;
4537
                    op1 = op & 7;
4538

    
4539
                    switch(op >> 4) {
4540
                    case 0:
4541
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4542
                        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4543
                        tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2);
4544
                        break;
4545
                    case 1:
4546
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4547
                        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4548
                        tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2);
4549
                        break;
4550
                    case 2:
4551
                        tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0, 
4552
                                          (s->mem_index >> 2) - 1);
4553
                        tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1);
4554
                        break;
4555
                    case 3:
4556
                    default:
4557
                        gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4558
                        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4559
                        tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2);
4560
                        break;
4561
                    }
4562

    
4563
                    tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
4564
                    if (op1 == 3) {
4565
                        /* fcomp needs pop */
4566
                        tcg_gen_helper_0_0(helper_fpop);
4567
                    }
4568
                }
4569
                break;
4570
            case 0x08: /* flds */
4571
            case 0x0a: /* fsts */
4572
            case 0x0b: /* fstps */
4573
            case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4574
            case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4575
            case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4576
                switch(op & 7) {
4577
                case 0:
4578
                    switch(op >> 4) {
4579
                    case 0:
4580
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4581
                        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4582
                        tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2);
4583
                        break;
4584
                    case 1:
4585
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4586
                        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4587
                        tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2);
4588
                        break;
4589
                    case 2:
4590
                        tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0, 
4591
                                          (s->mem_index >> 2) - 1);
4592
                        tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1);
4593
                        break;
4594
                    case 3:
4595
                    default:
4596
                        gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4597
                        tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4598
                        tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2);
4599
                        break;
4600
                    }
4601
                    break;
4602
                case 1:
4603
                    /* XXX: the corresponding CPUID bit must be tested ! */
4604
                    switch(op >> 4) {
4605
                    case 1:
4606
                        tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2);
4607
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4608
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4609
                        break;
4610
                    case 2:
4611
                        tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1);
4612
                        tcg_gen_qemu_st64(cpu_tmp1, cpu_A0, 
4613
                                          (s->mem_index >> 2) - 1);
4614
                        break;
4615
                    case 3:
4616
                    default:
4617
                        tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2);
4618
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4619
                        gen_op_st_T0_A0(OT_WORD + s->mem_index);
4620
                        break;
4621
                    }
4622
                    tcg_gen_helper_0_0(helper_fpop);
4623
                    break;
4624
                default:
4625
                    switch(op >> 4) {
4626
                    case 0:
4627
                        tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2);
4628
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4629
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4630
                        break;
4631
                    case 1:
4632
                        tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2);
4633
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4634
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4635
                        break;
4636
                    case 2:
4637
                        tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1);
4638
                        tcg_gen_qemu_st64(cpu_tmp1, cpu_A0, 
4639
                                          (s->mem_index >> 2) - 1);
4640
                        break;
4641
                    case 3:
4642
                    default:
4643
                        tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2);
4644
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4645
                        gen_op_st_T0_A0(OT_WORD + s->mem_index);
4646
                        break;
4647
                    }
4648
                    if ((op & 7) == 3)
4649
                        tcg_gen_helper_0_0(helper_fpop);
4650
                    break;
4651
                }
4652
                break;
4653
            case 0x0c: /* fldenv mem */
4654
                if (s->cc_op != CC_OP_DYNAMIC)
4655
                    gen_op_set_cc_op(s->cc_op);
4656
                gen_jmp_im(pc_start - s->cs_base);
4657
                tcg_gen_helper_0_2(helper_fldenv, 
4658
                                   cpu_A0, tcg_const_i32(s->dflag));
4659
                break;
4660
            case 0x0d: /* fldcw mem */
4661
                gen_op_ld_T0_A0(OT_WORD + s->mem_index);
4662
                tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
4663
                tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2);
4664
                break;
4665
            case 0x0e: /* fnstenv mem */
4666
                if (s->cc_op != CC_OP_DYNAMIC)
4667
                    gen_op_set_cc_op(s->cc_op);
4668
                gen_jmp_im(pc_start - s->cs_base);
4669
                tcg_gen_helper_0_2(helper_fstenv,
4670
                                   cpu_A0, tcg_const_i32(s->dflag));
4671
                break;
4672
            case 0x0f: /* fnstcw mem */
4673
                tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2);
4674
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4675
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
4676
                break;
4677
            case 0x1d: /* fldt mem */
4678
                if (s->cc_op != CC_OP_DYNAMIC)
4679
                    gen_op_set_cc_op(s->cc_op);
4680
                gen_jmp_im(pc_start - s->cs_base);
4681
                tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
4682
                break;
4683
            case 0x1f: /* fstpt mem */
4684
                if (s->cc_op != CC_OP_DYNAMIC)
4685
                    gen_op_set_cc_op(s->cc_op);
4686
                gen_jmp_im(pc_start - s->cs_base);
4687
                tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
4688
                tcg_gen_helper_0_0(helper_fpop);
4689
                break;
4690
            case 0x2c: /* frstor mem */
4691
                if (s->cc_op != CC_OP_DYNAMIC)
4692
                    gen_op_set_cc_op(s->cc_op);
4693
                gen_jmp_im(pc_start - s->cs_base);
4694
                tcg_gen_helper_0_2(helper_frstor,
4695
                                   cpu_A0, tcg_const_i32(s->dflag));
4696
                break;
4697
            case 0x2e: /* fnsave mem */
4698
                if (s->cc_op != CC_OP_DYNAMIC)
4699
                    gen_op_set_cc_op(s->cc_op);
4700
                gen_jmp_im(pc_start - s->cs_base);
4701
                tcg_gen_helper_0_2(helper_fsave,
4702
                                   cpu_A0, tcg_const_i32(s->dflag));
4703
                break;
4704
            case 0x2f: /* fnstsw mem */
4705
                tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2);
4706
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4707
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
4708
                break;
4709
            case 0x3c: /* fbld */
4710
                if (s->cc_op != CC_OP_DYNAMIC)
4711
                    gen_op_set_cc_op(s->cc_op);
4712
                gen_jmp_im(pc_start - s->cs_base);
4713
                tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
4714
                break;
4715
            case 0x3e: /* fbstp */
4716
                if (s->cc_op != CC_OP_DYNAMIC)
4717
                    gen_op_set_cc_op(s->cc_op);
4718
                gen_jmp_im(pc_start - s->cs_base);
4719
                tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
4720
                tcg_gen_helper_0_0(helper_fpop);
4721
                break;
4722
            case 0x3d: /* fildll */
4723
                tcg_gen_qemu_ld64(cpu_tmp1, cpu_A0, 
4724
                                  (s->mem_index >> 2) - 1);
4725
                tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1);
4726
                break;
4727
            case 0x3f: /* fistpll */
4728
                tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1);
4729
                tcg_gen_qemu_st64(cpu_tmp1, cpu_A0, 
4730
                                  (s->mem_index >> 2) - 1);
4731
                tcg_gen_helper_0_0(helper_fpop);
4732
                break;
4733
            default:
4734
                goto illegal_op;
4735
            }
4736
        } else {
4737
            /* register float ops */
4738
            opreg = rm;
4739

    
4740
            switch(op) {
4741
            case 0x08: /* fld sti */
4742
                tcg_gen_helper_0_0(helper_fpush);
4743
                tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
4744
                break;
4745
            case 0x09: /* fxchg sti */
4746
            case 0x29: /* fxchg4 sti, undocumented op */
4747
            case 0x39: /* fxchg7 sti, undocumented op */
4748
                tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
4749
                break;
4750
            case 0x0a: /* grp d9/2 */
4751
                switch(rm) {
4752
                case 0: /* fnop */
4753
                    /* check exceptions (FreeBSD FPU probe) */
4754
                    if (s->cc_op != CC_OP_DYNAMIC)
4755
                        gen_op_set_cc_op(s->cc_op);
4756
                    gen_jmp_im(pc_start - s->cs_base);
4757
                    tcg_gen_helper_0_0(helper_fwait);
4758
                    break;
4759
                default:
4760
                    goto illegal_op;
4761
                }
4762
                break;
4763
            case 0x0c: /* grp d9/4 */
4764
                switch(rm) {
4765
                case 0: /* fchs */
4766
                    tcg_gen_helper_0_0(helper_fchs_ST0);
4767
                    break;
4768
                case 1: /* fabs */
4769
                    tcg_gen_helper_0_0(helper_fabs_ST0);
4770
                    break;
4771
                case 4: /* ftst */
4772
                    tcg_gen_helper_0_0(helper_fldz_FT0);
4773
                    tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
4774
                    break;
4775
                case 5: /* fxam */
4776
                    tcg_gen_helper_0_0(helper_fxam_ST0);
4777
                    break;
4778
                default:
4779
                    goto illegal_op;
4780
                }
4781
                break;
4782
            case 0x0d: /* grp d9/5 */
4783
                {
4784
                    switch(rm) {
4785
                    case 0:
4786
                        tcg_gen_helper_0_0(helper_fpush);
4787
                        tcg_gen_helper_0_0(helper_fld1_ST0);
4788
                        break;
4789
                    case 1:
4790
                        tcg_gen_helper_0_0(helper_fpush);
4791
                        tcg_gen_helper_0_0(helper_fldl2t_ST0);
4792
                        break;
4793
                    case 2:
4794
                        tcg_gen_helper_0_0(helper_fpush);
4795
                        tcg_gen_helper_0_0(helper_fldl2e_ST0);
4796
                        break;
4797
                    case 3:
4798
                        tcg_gen_helper_0_0(helper_fpush);
4799
                        tcg_gen_helper_0_0(helper_fldpi_ST0);
4800
                        break;
4801
                    case 4:
4802
                        tcg_gen_helper_0_0(helper_fpush);
4803
                        tcg_gen_helper_0_0(helper_fldlg2_ST0);
4804
                        break;
4805
                    case 5:
4806
                        tcg_gen_helper_0_0(helper_fpush);
4807
                        tcg_gen_helper_0_0(helper_fldln2_ST0);
4808
                        break;
4809
                    case 6:
4810
                        tcg_gen_helper_0_0(helper_fpush);
4811
                        tcg_gen_helper_0_0(helper_fldz_ST0);
4812
                        break;
4813
                    default:
4814
                        goto illegal_op;
4815
                    }
4816
                }
4817
                break;
4818
            case 0x0e: /* grp d9/6 */
4819
                switch(rm) {
4820
                case 0: /* f2xm1 */
4821
                    tcg_gen_helper_0_0(helper_f2xm1);
4822
                    break;
4823
                case 1: /* fyl2x */
4824
                    tcg_gen_helper_0_0(helper_fyl2x);
4825
                    break;
4826
                case 2: /* fptan */
4827
                    tcg_gen_helper_0_0(helper_fptan);
4828
                    break;
4829
                case 3: /* fpatan */
4830
                    tcg_gen_helper_0_0(helper_fpatan);
4831
                    break;
4832
                case 4: /* fxtract */
4833
                    tcg_gen_helper_0_0(helper_fxtract);
4834
                    break;
4835
                case 5: /* fprem1 */
4836
                    tcg_gen_helper_0_0(helper_fprem1);
4837
                    break;
4838
                case 6: /* fdecstp */
4839
                    tcg_gen_helper_0_0(helper_fdecstp);
4840
                    break;
4841
                default:
4842
                case 7: /* fincstp */
4843
                    tcg_gen_helper_0_0(helper_fincstp);
4844
                    break;
4845
                }
4846
                break;
4847
            case 0x0f: /* grp d9/7 */
4848
                switch(rm) {
4849
                case 0: /* fprem */
4850
                    tcg_gen_helper_0_0(helper_fprem);
4851
                    break;
4852
                case 1: /* fyl2xp1 */
4853
                    tcg_gen_helper_0_0(helper_fyl2xp1);
4854
                    break;
4855
                case 2: /* fsqrt */
4856
                    tcg_gen_helper_0_0(helper_fsqrt);
4857
                    break;
4858
                case 3: /* fsincos */
4859
                    tcg_gen_helper_0_0(helper_fsincos);
4860
                    break;
4861
                case 5: /* fscale */
4862
                    tcg_gen_helper_0_0(helper_fscale);
4863
                    break;
4864
                case 4: /* frndint */
4865
                    tcg_gen_helper_0_0(helper_frndint);
4866
                    break;
4867
                case 6: /* fsin */
4868
                    tcg_gen_helper_0_0(helper_fsin);
4869
                    break;
4870
                default:
4871
                case 7: /* fcos */
4872
                    tcg_gen_helper_0_0(helper_fcos);
4873
                    break;
4874
                }
4875
                break;
4876
            case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4877
            case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4878
            case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4879
                {
4880
                    int op1;
4881

    
4882
                    op1 = op & 7;
4883
                    if (op >= 0x20) {
4884
                        tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
4885
                        if (op >= 0x30)
4886
                            tcg_gen_helper_0_0(helper_fpop);
4887
                    } else {
4888
                        tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4889
                        tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
4890
                    }
4891
                }
4892
                break;
4893
            case 0x02: /* fcom */
4894
            case 0x22: /* fcom2, undocumented op */
4895
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4896
                tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
4897
                break;
4898
            case 0x03: /* fcomp */
4899
            case 0x23: /* fcomp3, undocumented op */
4900
            case 0x32: /* fcomp5, undocumented op */
4901
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4902
                tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
4903
                tcg_gen_helper_0_0(helper_fpop);
4904
                break;
4905
            case 0x15: /* da/5 */
4906
                switch(rm) {
4907
                case 1: /* fucompp */
4908
                    tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
4909
                    tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
4910
                    tcg_gen_helper_0_0(helper_fpop);
4911
                    tcg_gen_helper_0_0(helper_fpop);
4912
                    break;
4913
                default:
4914
                    goto illegal_op;
4915
                }
4916
                break;
4917
            case 0x1c:
4918
                switch(rm) {
4919
                case 0: /* feni (287 only, just do nop here) */
4920
                    break;
4921
                case 1: /* fdisi (287 only, just do nop here) */
4922
                    break;
4923
                case 2: /* fclex */
4924
                    tcg_gen_helper_0_0(helper_fclex);
4925
                    break;
4926
                case 3: /* fninit */
4927
                    tcg_gen_helper_0_0(helper_fninit);
4928
                    break;
4929
                case 4: /* fsetpm (287 only, just do nop here) */
4930
                    break;
4931
                default:
4932
                    goto illegal_op;
4933
                }
4934
                break;
4935
            case 0x1d: /* fucomi */
4936
                if (s->cc_op != CC_OP_DYNAMIC)
4937
                    gen_op_set_cc_op(s->cc_op);
4938
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4939
                tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
4940
                gen_op_fcomi_dummy();
4941
                s->cc_op = CC_OP_EFLAGS;
4942
                break;
4943
            case 0x1e: /* fcomi */
4944
                if (s->cc_op != CC_OP_DYNAMIC)
4945
                    gen_op_set_cc_op(s->cc_op);
4946
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4947
                tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
4948
                gen_op_fcomi_dummy();
4949
                s->cc_op = CC_OP_EFLAGS;
4950
                break;
4951
            case 0x28: /* ffree sti */
4952
                tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
4953
                break;
4954
            case 0x2a: /* fst sti */
4955
                tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
4956
                break;
4957
            case 0x2b: /* fstp sti */
4958
            case 0x0b: /* fstp1 sti, undocumented op */
4959
            case 0x3a: /* fstp8 sti, undocumented op */
4960
            case 0x3b: /* fstp9 sti, undocumented op */
4961
                tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
4962
                tcg_gen_helper_0_0(helper_fpop);
4963
                break;
4964
            case 0x2c: /* fucom st(i) */
4965
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4966
                tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
4967
                break;
4968
            case 0x2d: /* fucomp st(i) */
4969
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
4970
                tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
4971
                tcg_gen_helper_0_0(helper_fpop);
4972
                break;
4973
            case 0x33: /* de/3 */
4974
                switch(rm) {
4975
                case 1: /* fcompp */
4976
                    tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
4977
                    tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
4978
                    tcg_gen_helper_0_0(helper_fpop);
4979
                    tcg_gen_helper_0_0(helper_fpop);
4980
                    break;
4981
                default:
4982
                    goto illegal_op;
4983
                }
4984
                break;
4985
            case 0x38: /* ffreep sti, undocumented op */
4986
                tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
4987
                tcg_gen_helper_0_0(helper_fpop);
4988
                break;
4989
            case 0x3c: /* df/4 */
4990
                switch(rm) {
4991
                case 0:
4992
                    tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2);
4993
                    tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2);
4994
                    gen_op_mov_reg_T0(OT_WORD, R_EAX);
4995
                    break;
4996
                default:
4997
                    goto illegal_op;
4998
                }
4999
                break;
5000
            case 0x3d: /* fucomip */
5001
                if (s->cc_op != CC_OP_DYNAMIC)
5002
                    gen_op_set_cc_op(s->cc_op);
5003
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5004
                tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5005
                tcg_gen_helper_0_0(helper_fpop);
5006
                gen_op_fcomi_dummy();
5007
                s->cc_op = CC_OP_EFLAGS;
5008
                break;
5009
            case 0x3e: /* fcomip */
5010
                if (s->cc_op != CC_OP_DYNAMIC)
5011
                    gen_op_set_cc_op(s->cc_op);
5012
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5013
                tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5014
                tcg_gen_helper_0_0(helper_fpop);
5015
                gen_op_fcomi_dummy();
5016
                s->cc_op = CC_OP_EFLAGS;
5017
                break;
5018
            case 0x10 ... 0x13: /* fcmovxx */
5019
            case 0x18 ... 0x1b:
5020
                {
5021
                    int op1, l1;
5022
                    const static uint8_t fcmov_cc[8] = {
5023
                        (JCC_B << 1),
5024
                        (JCC_Z << 1),
5025
                        (JCC_BE << 1),
5026
                        (JCC_P << 1),
5027
                    };
5028
                    op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
5029
                    gen_setcc(s, op1);
5030
                    l1 = gen_new_label();
5031
                    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), l1);
5032
                    tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
5033
                    gen_set_label(l1);
5034
                }
5035
                break;
5036
            default:
5037
                goto illegal_op;
5038
            }
5039
        }
5040
        break;
5041
        /************************/
5042
        /* string ops */
5043

    
5044
    case 0xa4: /* movsS */
5045
    case 0xa5:
5046
        if ((b & 1) == 0)
5047
            ot = OT_BYTE;
5048
        else
5049
            ot = dflag + OT_WORD;
5050

    
5051
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5052
            gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5053
        } else {
5054
            gen_movs(s, ot);
5055
        }
5056
        break;
5057

    
5058
    case 0xaa: /* stosS */
5059
    case 0xab:
5060
        if ((b & 1) == 0)
5061
            ot = OT_BYTE;
5062
        else
5063
            ot = dflag + OT_WORD;
5064

    
5065
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5066
            gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5067
        } else {
5068
            gen_stos(s, ot);
5069
        }
5070
        break;
5071
    case 0xac: /* lodsS */
5072
    case 0xad:
5073
        if ((b & 1) == 0)
5074
            ot = OT_BYTE;
5075
        else
5076
            ot = dflag + OT_WORD;
5077
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5078
            gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5079
        } else {
5080
            gen_lods(s, ot);
5081
        }
5082
        break;
5083
    case 0xae: /* scasS */
5084
    case 0xaf:
5085
        if ((b & 1) == 0)
5086
            ot = OT_BYTE;
5087
        else
5088
            ot = dflag + OT_WORD;
5089
        if (prefixes & PREFIX_REPNZ) {
5090
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5091
        } else if (prefixes & PREFIX_REPZ) {
5092
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5093
        } else {
5094
            gen_scas(s, ot);
5095
            s->cc_op = CC_OP_SUBB + ot;
5096
        }
5097
        break;
5098

    
5099
    case 0xa6: /* cmpsS */
5100
    case 0xa7:
5101
        if ((b & 1) == 0)
5102
            ot = OT_BYTE;
5103
        else
5104
            ot = dflag + OT_WORD;
5105
        if (prefixes & PREFIX_REPNZ) {
5106
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5107
        } else if (prefixes & PREFIX_REPZ) {
5108
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5109
        } else {
5110
            gen_cmps(s, ot);
5111
            s->cc_op = CC_OP_SUBB + ot;
5112
        }
5113
        break;
5114
    case 0x6c: /* insS */
5115
    case 0x6d:
5116
        if ((b & 1) == 0)
5117
            ot = OT_BYTE;
5118
        else
5119
            ot = dflag ? OT_LONG : OT_WORD;
5120
        gen_check_io(s, ot, 1, pc_start - s->cs_base);
5121
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5122
        gen_op_andl_T0_ffff();
5123
        if (gen_svm_check_io(s, pc_start,
5124
                             SVM_IOIO_TYPE_MASK | (1 << (4+ot)) |
5125
                             svm_is_rep(prefixes) | 4 | (1 << (7+s->aflag))))
5126
            break;
5127
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5128
            gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5129
        } else {
5130
            gen_ins(s, ot);
5131
        }
5132
        break;
5133
    case 0x6e: /* outsS */
5134
    case 0x6f:
5135
        if ((b & 1) == 0)
5136
            ot = OT_BYTE;
5137
        else
5138
            ot = dflag ? OT_LONG : OT_WORD;
5139
        gen_check_io(s, ot, 1, pc_start - s->cs_base);
5140
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5141
        gen_op_andl_T0_ffff();
5142
        if (gen_svm_check_io(s, pc_start,
5143
                             (1 << (4+ot)) | svm_is_rep(prefixes) |
5144
                             4 | (1 << (7+s->aflag))))
5145
            break;
5146
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5147
            gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5148
        } else {
5149
            gen_outs(s, ot);
5150
        }
5151
        break;
5152

    
5153
        /************************/
5154
        /* port I/O */
5155

    
5156
    case 0xe4:
5157
    case 0xe5:
5158
        if ((b & 1) == 0)
5159
            ot = OT_BYTE;
5160
        else
5161
            ot = dflag ? OT_LONG : OT_WORD;
5162
        val = ldub_code(s->pc++);
5163
        gen_op_movl_T0_im(val);
5164
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5165
        if (gen_svm_check_io(s, pc_start,
5166
                             SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5167
                             (1 << (4+ot))))
5168
            break;
5169
        gen_op_in[ot]();
5170
        gen_op_mov_reg_T1(ot, R_EAX);
5171
        break;
5172
    case 0xe6:
5173
    case 0xe7:
5174
        if ((b & 1) == 0)
5175
            ot = OT_BYTE;
5176
        else
5177
            ot = dflag ? OT_LONG : OT_WORD;
5178
        val = ldub_code(s->pc++);
5179
        gen_op_movl_T0_im(val);
5180
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5181
        if (gen_svm_check_io(s, pc_start, svm_is_rep(prefixes) |
5182
                             (1 << (4+ot))))
5183
            break;
5184
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5185
        gen_op_out[ot]();
5186
        break;
5187
    case 0xec:
5188
    case 0xed:
5189
        if ((b & 1) == 0)
5190
            ot = OT_BYTE;
5191
        else
5192
            ot = dflag ? OT_LONG : OT_WORD;
5193
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5194
        gen_op_andl_T0_ffff();
5195
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5196
        if (gen_svm_check_io(s, pc_start,
5197
                             SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5198
                             (1 << (4+ot))))
5199
            break;
5200
        gen_op_in[ot]();
5201
        gen_op_mov_reg_T1(ot, R_EAX);
5202
        break;
5203
    case 0xee:
5204
    case 0xef:
5205
        if ((b & 1) == 0)
5206
            ot = OT_BYTE;
5207
        else
5208
            ot = dflag ? OT_LONG : OT_WORD;
5209
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5210
        gen_op_andl_T0_ffff();
5211
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5212
        if (gen_svm_check_io(s, pc_start,
5213
                             svm_is_rep(prefixes) | (1 << (4+ot))))
5214
            break;
5215
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5216
        gen_op_out[ot]();
5217
        break;
5218

    
5219
        /************************/
5220
        /* control */
5221
    case 0xc2: /* ret im */
5222
        val = ldsw_code(s->pc);
5223
        s->pc += 2;
5224
        gen_pop_T0(s);
5225
        if (CODE64(s) && s->dflag)
5226
            s->dflag = 2;
5227
        gen_stack_update(s, val + (2 << s->dflag));
5228
        if (s->dflag == 0)
5229
            gen_op_andl_T0_ffff();
5230
        gen_op_jmp_T0();
5231
        gen_eob(s);
5232
        break;
5233
    case 0xc3: /* ret */
5234
        gen_pop_T0(s);
5235
        gen_pop_update(s);
5236
        if (s->dflag == 0)
5237
            gen_op_andl_T0_ffff();
5238
        gen_op_jmp_T0();
5239
        gen_eob(s);
5240
        break;
5241
    case 0xca: /* lret im */
5242
        val = ldsw_code(s->pc);
5243
        s->pc += 2;
5244
    do_lret:
5245
        if (s->pe && !s->vm86) {
5246
            if (s->cc_op != CC_OP_DYNAMIC)
5247
                gen_op_set_cc_op(s->cc_op);
5248
            gen_jmp_im(pc_start - s->cs_base);
5249
            gen_op_lret_protected(s->dflag, val);
5250
        } else {
5251
            gen_stack_A0(s);
5252
            /* pop offset */
5253
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5254
            if (s->dflag == 0)
5255
                gen_op_andl_T0_ffff();
5256
            /* NOTE: keeping EIP updated is not a problem in case of
5257
               exception */
5258
            gen_op_jmp_T0();
5259
            /* pop selector */
5260
            gen_op_addl_A0_im(2 << s->dflag);
5261
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5262
            gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5263
            /* add stack offset */
5264
            gen_stack_update(s, val + (4 << s->dflag));
5265
        }
5266
        gen_eob(s);
5267
        break;
5268
    case 0xcb: /* lret */
5269
        val = 0;
5270
        goto do_lret;
5271
    case 0xcf: /* iret */
5272
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5273
            break;
5274
        if (!s->pe) {
5275
            /* real mode */
5276
            gen_op_iret_real(s->dflag);
5277
            s->cc_op = CC_OP_EFLAGS;
5278
        } else if (s->vm86) {
5279
            if (s->iopl != 3) {
5280
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5281
            } else {
5282
                gen_op_iret_real(s->dflag);
5283
                s->cc_op = CC_OP_EFLAGS;
5284
            }
5285
        } else {
5286
            if (s->cc_op != CC_OP_DYNAMIC)
5287
                gen_op_set_cc_op(s->cc_op);
5288
            gen_jmp_im(pc_start - s->cs_base);
5289
            gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
5290
            s->cc_op = CC_OP_EFLAGS;
5291
        }
5292
        gen_eob(s);
5293
        break;
5294
    case 0xe8: /* call im */
5295
        {
5296
            if (dflag)
5297
                tval = (int32_t)insn_get(s, OT_LONG);
5298
            else
5299
                tval = (int16_t)insn_get(s, OT_WORD);
5300
            next_eip = s->pc - s->cs_base;
5301
            tval += next_eip;
5302
            if (s->dflag == 0)
5303
                tval &= 0xffff;
5304
            gen_movtl_T0_im(next_eip);
5305
            gen_push_T0(s);
5306
            gen_jmp(s, tval);
5307
        }
5308
        break;
5309
    case 0x9a: /* lcall im */
5310
        {
5311
            unsigned int selector, offset;
5312

    
5313
            if (CODE64(s))
5314
                goto illegal_op;
5315
            ot = dflag ? OT_LONG : OT_WORD;
5316
            offset = insn_get(s, ot);
5317
            selector = insn_get(s, OT_WORD);
5318

    
5319
            gen_op_movl_T0_im(selector);
5320
            gen_op_movl_T1_imu(offset);
5321
        }
5322
        goto do_lcall;
5323
    case 0xe9: /* jmp im */
5324
        if (dflag)
5325
            tval = (int32_t)insn_get(s, OT_LONG);
5326
        else
5327
            tval = (int16_t)insn_get(s, OT_WORD);
5328
        tval += s->pc - s->cs_base;
5329
        if (s->dflag == 0)
5330
            tval &= 0xffff;
5331
        gen_jmp(s, tval);
5332
        break;
5333
    case 0xea: /* ljmp im */
5334
        {
5335
            unsigned int selector, offset;
5336

    
5337
            if (CODE64(s))
5338
                goto illegal_op;
5339
            ot = dflag ? OT_LONG : OT_WORD;
5340
            offset = insn_get(s, ot);
5341
            selector = insn_get(s, OT_WORD);
5342

    
5343
            gen_op_movl_T0_im(selector);
5344
            gen_op_movl_T1_imu(offset);
5345
        }
5346
        goto do_ljmp;
5347
    case 0xeb: /* jmp Jb */
5348
        tval = (int8_t)insn_get(s, OT_BYTE);
5349
        tval += s->pc - s->cs_base;
5350
        if (s->dflag == 0)
5351
            tval &= 0xffff;
5352
        gen_jmp(s, tval);
5353
        break;
5354
    case 0x70 ... 0x7f: /* jcc Jb */
5355
        tval = (int8_t)insn_get(s, OT_BYTE);
5356
        goto do_jcc;
5357
    case 0x180 ... 0x18f: /* jcc Jv */
5358
        if (dflag) {
5359
            tval = (int32_t)insn_get(s, OT_LONG);
5360
        } else {
5361
            tval = (int16_t)insn_get(s, OT_WORD);
5362
        }
5363
    do_jcc:
5364
        next_eip = s->pc - s->cs_base;
5365
        tval += next_eip;
5366
        if (s->dflag == 0)
5367
            tval &= 0xffff;
5368
        gen_jcc(s, b, tval, next_eip);
5369
        break;
5370

    
5371
    case 0x190 ... 0x19f: /* setcc Gv */
5372
        modrm = ldub_code(s->pc++);
5373
        gen_setcc(s, b);
5374
        gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5375
        break;
5376
    case 0x140 ... 0x14f: /* cmov Gv, Ev */
5377
        ot = dflag + OT_WORD;
5378
        modrm = ldub_code(s->pc++);
5379
        reg = ((modrm >> 3) & 7) | rex_r;
5380
        mod = (modrm >> 6) & 3;
5381
        gen_setcc(s, b);
5382
        if (mod != 3) {
5383
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5384
            gen_op_ld_T1_A0(ot + s->mem_index);
5385
        } else {
5386
            rm = (modrm & 7) | REX_B(s);
5387
            gen_op_mov_TN_reg(ot, 1, rm);
5388
        }
5389
        gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5390
        break;
5391

    
5392
        /************************/
5393
        /* flags */
5394
    case 0x9c: /* pushf */
5395
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5396
            break;
5397
        if (s->vm86 && s->iopl != 3) {
5398
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5399
        } else {
5400
            if (s->cc_op != CC_OP_DYNAMIC)
5401
                gen_op_set_cc_op(s->cc_op);
5402
            gen_op_movl_T0_eflags();
5403
            gen_push_T0(s);
5404
        }
5405
        break;
5406
    case 0x9d: /* popf */
5407
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5408
            break;
5409
        if (s->vm86 && s->iopl != 3) {
5410
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5411
        } else {
5412
            gen_pop_T0(s);
5413
            if (s->cpl == 0) {
5414
                if (s->dflag) {
5415
                    gen_op_movl_eflags_T0_cpl0();
5416
                } else {
5417
                    gen_op_movw_eflags_T0_cpl0();
5418
                }
5419
            } else {
5420
                if (s->cpl <= s->iopl) {
5421
                    if (s->dflag) {
5422
                        gen_op_movl_eflags_T0_io();
5423
                    } else {
5424
                        gen_op_movw_eflags_T0_io();
5425
                    }
5426
                } else {
5427
                    if (s->dflag) {
5428
                        gen_op_movl_eflags_T0();
5429
                    } else {
5430
                        gen_op_movw_eflags_T0();
5431
                    }
5432
                }
5433
            }
5434
            gen_pop_update(s);
5435
            s->cc_op = CC_OP_EFLAGS;
5436
            /* abort translation because TF flag may change */
5437
            gen_jmp_im(s->pc - s->cs_base);
5438
            gen_eob(s);
5439
        }
5440
        break;
5441
    case 0x9e: /* sahf */
5442
        if (CODE64(s))
5443
            goto illegal_op;
5444
        gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5445
        if (s->cc_op != CC_OP_DYNAMIC)
5446
            gen_op_set_cc_op(s->cc_op);
5447
        gen_op_movb_eflags_T0();
5448
        s->cc_op = CC_OP_EFLAGS;
5449
        break;
5450
    case 0x9f: /* lahf */
5451
        if (CODE64(s))
5452
            goto illegal_op;
5453
        if (s->cc_op != CC_OP_DYNAMIC)
5454
            gen_op_set_cc_op(s->cc_op);
5455
        gen_op_movl_T0_eflags();
5456
        gen_op_mov_reg_T0(OT_BYTE, R_AH);
5457
        break;
5458
    case 0xf5: /* cmc */
5459
        if (s->cc_op != CC_OP_DYNAMIC)
5460
            gen_op_set_cc_op(s->cc_op);
5461
        gen_op_cmc();
5462
        s->cc_op = CC_OP_EFLAGS;
5463
        break;
5464
    case 0xf8: /* clc */
5465
        if (s->cc_op != CC_OP_DYNAMIC)
5466
            gen_op_set_cc_op(s->cc_op);
5467
        gen_op_clc();
5468
        s->cc_op = CC_OP_EFLAGS;
5469
        break;
5470
    case 0xf9: /* stc */
5471
        if (s->cc_op != CC_OP_DYNAMIC)
5472
            gen_op_set_cc_op(s->cc_op);
5473
        gen_op_stc();
5474
        s->cc_op = CC_OP_EFLAGS;
5475
        break;
5476
    case 0xfc: /* cld */
5477
        tcg_gen_movi_i32(cpu_tmp2, 1);
5478
        tcg_gen_st_i32(cpu_tmp2, cpu_env, offsetof(CPUState, df));
5479
        break;
5480
    case 0xfd: /* std */
5481
        tcg_gen_movi_i32(cpu_tmp2, -1);
5482
        tcg_gen_st_i32(cpu_tmp2, cpu_env, offsetof(CPUState, df));
5483
        break;
5484

    
5485
        /************************/
5486
        /* bit operations */
5487
    case 0x1ba: /* bt/bts/btr/btc Gv, im */
5488
        ot = dflag + OT_WORD;
5489
        modrm = ldub_code(s->pc++);
5490
        op = (modrm >> 3) & 7;
5491
        mod = (modrm >> 6) & 3;
5492
        rm = (modrm & 7) | REX_B(s);
5493
        if (mod != 3) {
5494
            s->rip_offset = 1;
5495
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5496
            gen_op_ld_T0_A0(ot + s->mem_index);
5497
        } else {
5498
            gen_op_mov_TN_reg(ot, 0, rm);
5499
        }
5500
        /* load shift */
5501
        val = ldub_code(s->pc++);
5502
        gen_op_movl_T1_im(val);
5503
        if (op < 4)
5504
            goto illegal_op;
5505
        op -= 4;
5506
        gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5507
        s->cc_op = CC_OP_SARB + ot;
5508
        if (op != 0) {
5509
            if (mod != 3)
5510
                gen_op_st_T0_A0(ot + s->mem_index);
5511
            else
5512
                gen_op_mov_reg_T0(ot, rm);
5513
            gen_op_update_bt_cc();
5514
        }
5515
        break;
5516
    case 0x1a3: /* bt Gv, Ev */
5517
        op = 0;
5518
        goto do_btx;
5519
    case 0x1ab: /* bts */
5520
        op = 1;
5521
        goto do_btx;
5522
    case 0x1b3: /* btr */
5523
        op = 2;
5524
        goto do_btx;
5525
    case 0x1bb: /* btc */
5526
        op = 3;
5527
    do_btx:
5528
        ot = dflag + OT_WORD;
5529
        modrm = ldub_code(s->pc++);
5530
        reg = ((modrm >> 3) & 7) | rex_r;
5531
        mod = (modrm >> 6) & 3;
5532
        rm = (modrm & 7) | REX_B(s);
5533
        gen_op_mov_TN_reg(OT_LONG, 1, reg);
5534
        if (mod != 3) {
5535
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5536
            /* specific case: we need to add a displacement */
5537
            gen_op_add_bit_A0_T1[ot - OT_WORD]();
5538
            gen_op_ld_T0_A0(ot + s->mem_index);
5539
        } else {
5540
            gen_op_mov_TN_reg(ot, 0, rm);
5541
        }
5542
        gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5543
        s->cc_op = CC_OP_SARB + ot;
5544
        if (op != 0) {
5545
            if (mod != 3)
5546
                gen_op_st_T0_A0(ot + s->mem_index);
5547
            else
5548
                gen_op_mov_reg_T0(ot, rm);
5549
            gen_op_update_bt_cc();
5550
        }
5551
        break;
5552
    case 0x1bc: /* bsf */
5553
    case 0x1bd: /* bsr */
5554
        ot = dflag + OT_WORD;
5555
        modrm = ldub_code(s->pc++);
5556
        reg = ((modrm >> 3) & 7) | rex_r;
5557
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5558
        /* NOTE: in order to handle the 0 case, we must load the
5559
           result. It could be optimized with a generated jump */
5560
        gen_op_mov_TN_reg(ot, 1, reg);
5561
        gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5562
        gen_op_mov_reg_T1(ot, reg);
5563
        s->cc_op = CC_OP_LOGICB + ot;
5564
        break;
5565
        /************************/
5566
        /* bcd */
5567
    case 0x27: /* daa */
5568
        if (CODE64(s))
5569
            goto illegal_op;
5570
        if (s->cc_op != CC_OP_DYNAMIC)
5571
            gen_op_set_cc_op(s->cc_op);
5572
        gen_op_daa();
5573
        s->cc_op = CC_OP_EFLAGS;
5574
        break;
5575
    case 0x2f: /* das */
5576
        if (CODE64(s))
5577
            goto illegal_op;
5578
        if (s->cc_op != CC_OP_DYNAMIC)
5579
            gen_op_set_cc_op(s->cc_op);
5580
        gen_op_das();
5581
        s->cc_op = CC_OP_EFLAGS;
5582
        break;
5583
    case 0x37: /* aaa */
5584
        if (CODE64(s))
5585
            goto illegal_op;
5586
        if (s->cc_op != CC_OP_DYNAMIC)
5587
            gen_op_set_cc_op(s->cc_op);
5588
        gen_op_aaa();
5589
        s->cc_op = CC_OP_EFLAGS;
5590
        break;
5591
    case 0x3f: /* aas */
5592
        if (CODE64(s))
5593
            goto illegal_op;
5594
        if (s->cc_op != CC_OP_DYNAMIC)
5595
            gen_op_set_cc_op(s->cc_op);
5596
        gen_op_aas();
5597
        s->cc_op = CC_OP_EFLAGS;
5598
        break;
5599
    case 0xd4: /* aam */
5600
        if (CODE64(s))
5601
            goto illegal_op;
5602
        val = ldub_code(s->pc++);
5603
        if (val == 0) {
5604
            gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5605
        } else {
5606
            gen_op_aam(val);
5607
            s->cc_op = CC_OP_LOGICB;
5608
        }
5609
        break;
5610
    case 0xd5: /* aad */
5611
        if (CODE64(s))
5612
            goto illegal_op;
5613
        val = ldub_code(s->pc++);
5614
        gen_op_aad(val);
5615
        s->cc_op = CC_OP_LOGICB;
5616
        break;
5617
        /************************/
5618
        /* misc */
5619
    case 0x90: /* nop */
5620
        /* XXX: xchg + rex handling */
5621
        /* XXX: correct lock test for all insn */
5622
        if (prefixes & PREFIX_LOCK)
5623
            goto illegal_op;
5624
        if (prefixes & PREFIX_REPZ) {
5625
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
5626
        }
5627
        break;
5628
    case 0x9b: /* fwait */
5629
        if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5630
            (HF_MP_MASK | HF_TS_MASK)) {
5631
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5632
        } else {
5633
            if (s->cc_op != CC_OP_DYNAMIC)
5634
                gen_op_set_cc_op(s->cc_op);
5635
            gen_jmp_im(pc_start - s->cs_base);
5636
            tcg_gen_helper_0_0(helper_fwait);
5637
        }
5638
        break;
5639
    case 0xcc: /* int3 */
5640
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5641
            break;
5642
        gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5643
        break;
5644
    case 0xcd: /* int N */
5645
        val = ldub_code(s->pc++);
5646
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5647
            break;
5648
        if (s->vm86 && s->iopl != 3) {
5649
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5650
        } else {
5651
            gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5652
        }
5653
        break;
5654
    case 0xce: /* into */
5655
        if (CODE64(s))
5656
            goto illegal_op;
5657
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5658
            break;
5659
        if (s->cc_op != CC_OP_DYNAMIC)
5660
            gen_op_set_cc_op(s->cc_op);
5661
        gen_jmp_im(pc_start - s->cs_base);
5662
        gen_op_into(s->pc - pc_start);
5663
        break;
5664
    case 0xf1: /* icebp (undocumented, exits to external debugger) */
5665
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
5666
            break;
5667
#if 1
5668
        gen_debug(s, pc_start - s->cs_base);
5669
#else
5670
        /* start debug */
5671
        tb_flush(cpu_single_env);
5672
        cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5673
#endif
5674
        break;
5675
    case 0xfa: /* cli */
5676
        if (!s->vm86) {
5677
            if (s->cpl <= s->iopl) {
5678
                tcg_gen_helper_0_0(helper_cli);
5679
            } else {
5680
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5681
            }
5682
        } else {
5683
            if (s->iopl == 3) {
5684
                tcg_gen_helper_0_0(helper_cli);
5685
            } else {
5686
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5687
            }
5688
        }
5689
        break;
5690
    case 0xfb: /* sti */
5691
        if (!s->vm86) {
5692
            if (s->cpl <= s->iopl) {
5693
            gen_sti:
5694
                tcg_gen_helper_0_0(helper_sti);
5695
                /* interruptions are enabled only the first insn after sti */
5696
                /* If several instructions disable interrupts, only the
5697
                   _first_ does it */
5698
                if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5699
                    tcg_gen_helper_0_0(helper_set_inhibit_irq);
5700
                /* give a chance to handle pending irqs */
5701
                gen_jmp_im(s->pc - s->cs_base);
5702
                gen_eob(s);
5703
            } else {
5704
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5705
            }
5706
        } else {
5707
            if (s->iopl == 3) {
5708
                goto gen_sti;
5709
            } else {
5710
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5711
            }
5712
        }
5713
        break;
5714
    case 0x62: /* bound */
5715
        if (CODE64(s))
5716
            goto illegal_op;
5717
        ot = dflag ? OT_LONG : OT_WORD;
5718
        modrm = ldub_code(s->pc++);
5719
        reg = (modrm >> 3) & 7;
5720
        mod = (modrm >> 6) & 3;
5721
        if (mod == 3)
5722
            goto illegal_op;
5723
        gen_op_mov_TN_reg(ot, 0, reg);
5724
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5725
        gen_jmp_im(pc_start - s->cs_base);
5726
        if (ot == OT_WORD)
5727
            tcg_gen_helper_0_0(helper_boundw);
5728
        else
5729
            tcg_gen_helper_0_0(helper_boundl);
5730
        break;
5731
    case 0x1c8 ... 0x1cf: /* bswap reg */
5732
        reg = (b & 7) | REX_B(s);
5733
#ifdef TARGET_X86_64
5734
        if (dflag == 2) {
5735
            gen_op_mov_TN_reg(OT_QUAD, 0, reg);
5736
            tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
5737
            gen_op_mov_reg_T0(OT_QUAD, reg);
5738
        } else
5739
        {
5740
            TCGv tmp0;
5741
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
5742
            
5743
            tmp0 = tcg_temp_new(TCG_TYPE_I32);
5744
            tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
5745
            tcg_gen_bswap_i32(tmp0, tmp0);
5746
            tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
5747
            gen_op_mov_reg_T0(OT_LONG, reg);
5748
        }
5749
#else
5750
        {
5751
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
5752
            tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
5753
            gen_op_mov_reg_T0(OT_LONG, reg);
5754
        }
5755
#endif
5756
        break;
5757
    case 0xd6: /* salc */
5758
        if (CODE64(s))
5759
            goto illegal_op;
5760
        if (s->cc_op != CC_OP_DYNAMIC)
5761
            gen_op_set_cc_op(s->cc_op);
5762
        gen_op_salc();
5763
        break;
5764
    case 0xe0: /* loopnz */
5765
    case 0xe1: /* loopz */
5766
        if (s->cc_op != CC_OP_DYNAMIC)
5767
            gen_op_set_cc_op(s->cc_op);
5768
        /* FALL THRU */
5769
    case 0xe2: /* loop */
5770
    case 0xe3: /* jecxz */
5771
        {
5772
            int l1, l2;
5773

    
5774
            tval = (int8_t)insn_get(s, OT_BYTE);
5775
            next_eip = s->pc - s->cs_base;
5776
            tval += next_eip;
5777
            if (s->dflag == 0)
5778
                tval &= 0xffff;
5779

    
5780
            l1 = gen_new_label();
5781
            l2 = gen_new_label();
5782
            b &= 3;
5783
            if (b == 3) {
5784
                gen_op_jz_ecx[s->aflag](l1);
5785
            } else {
5786
                gen_op_dec_ECX[s->aflag]();
5787
                if (b <= 1)
5788
                    gen_op_mov_T0_cc();
5789
                gen_op_loop[s->aflag][b](l1);
5790
            }
5791

    
5792
            gen_jmp_im(next_eip);
5793
            gen_op_jmp_label(l2);
5794
            gen_set_label(l1);
5795
            gen_jmp_im(tval);
5796
            gen_set_label(l2);
5797
            gen_eob(s);
5798
        }
5799
        break;
5800
    case 0x130: /* wrmsr */
5801
    case 0x132: /* rdmsr */
5802
        if (s->cpl != 0) {
5803
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5804
        } else {
5805
            int retval = 0;
5806
            if (b & 2) {
5807
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
5808
                tcg_gen_helper_0_0(helper_rdmsr);
5809
            } else {
5810
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
5811
                tcg_gen_helper_0_0(helper_wrmsr);
5812
            }
5813
            if(retval)
5814
                gen_eob(s);
5815
        }
5816
        break;
5817
    case 0x131: /* rdtsc */
5818
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
5819
            break;
5820
        gen_jmp_im(pc_start - s->cs_base);
5821
        tcg_gen_helper_0_0(helper_rdtsc);
5822
        break;
5823
    case 0x133: /* rdpmc */
5824
        gen_jmp_im(pc_start - s->cs_base);
5825
        tcg_gen_helper_0_0(helper_rdpmc);
5826
        break;
5827
    case 0x134: /* sysenter */
5828
        if (CODE64(s))
5829
            goto illegal_op;
5830
        if (!s->pe) {
5831
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5832
        } else {
5833
            if (s->cc_op != CC_OP_DYNAMIC) {
5834
                gen_op_set_cc_op(s->cc_op);
5835
                s->cc_op = CC_OP_DYNAMIC;
5836
            }
5837
            gen_jmp_im(pc_start - s->cs_base);
5838
            tcg_gen_helper_0_0(helper_sysenter);
5839
            gen_eob(s);
5840
        }
5841
        break;
5842
    case 0x135: /* sysexit */
5843
        if (CODE64(s))
5844
            goto illegal_op;
5845
        if (!s->pe) {
5846
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5847
        } else {
5848
            if (s->cc_op != CC_OP_DYNAMIC) {
5849
                gen_op_set_cc_op(s->cc_op);
5850
                s->cc_op = CC_OP_DYNAMIC;
5851
            }
5852
            gen_jmp_im(pc_start - s->cs_base);
5853
            tcg_gen_helper_0_0(helper_sysexit);
5854
            gen_eob(s);
5855
        }
5856
        break;
5857
#ifdef TARGET_X86_64
5858
    case 0x105: /* syscall */
5859
        /* XXX: is it usable in real mode ? */
5860
        if (s->cc_op != CC_OP_DYNAMIC) {
5861
            gen_op_set_cc_op(s->cc_op);
5862
            s->cc_op = CC_OP_DYNAMIC;
5863
        }
5864
        gen_jmp_im(pc_start - s->cs_base);
5865
        tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
5866
        gen_eob(s);
5867
        break;
5868
    case 0x107: /* sysret */
5869
        if (!s->pe) {
5870
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5871
        } else {
5872
            if (s->cc_op != CC_OP_DYNAMIC) {
5873
                gen_op_set_cc_op(s->cc_op);
5874
                s->cc_op = CC_OP_DYNAMIC;
5875
            }
5876
            gen_jmp_im(pc_start - s->cs_base);
5877
            tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
5878
            /* condition codes are modified only in long mode */
5879
            if (s->lma)
5880
                s->cc_op = CC_OP_EFLAGS;
5881
            gen_eob(s);
5882
        }
5883
        break;
5884
#endif
5885
    case 0x1a2: /* cpuid */
5886
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
5887
            break;
5888
        tcg_gen_helper_0_0(helper_cpuid);
5889
        break;
5890
    case 0xf4: /* hlt */
5891
        if (s->cpl != 0) {
5892
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5893
        } else {
5894
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
5895
                break;
5896
            if (s->cc_op != CC_OP_DYNAMIC)
5897
                gen_op_set_cc_op(s->cc_op);
5898
            gen_jmp_im(s->pc - s->cs_base);
5899
            tcg_gen_helper_0_0(helper_hlt);
5900
            s->is_jmp = 3;
5901
        }
5902
        break;
5903
    case 0x100:
5904
        modrm = ldub_code(s->pc++);
5905
        mod = (modrm >> 6) & 3;
5906
        op = (modrm >> 3) & 7;
5907
        switch(op) {
5908
        case 0: /* sldt */
5909
            if (!s->pe || s->vm86)
5910
                goto illegal_op;
5911
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
5912
                break;
5913
            gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5914
            ot = OT_WORD;
5915
            if (mod == 3)
5916
                ot += s->dflag;
5917
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5918
            break;
5919
        case 2: /* lldt */
5920
            if (!s->pe || s->vm86)
5921
                goto illegal_op;
5922
            if (s->cpl != 0) {
5923
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5924
            } else {
5925
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
5926
                    break;
5927
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5928
                gen_jmp_im(pc_start - s->cs_base);
5929
                tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
5930
                tcg_gen_helper_0_1(helper_lldt, cpu_tmp2);
5931
            }
5932
            break;
5933
        case 1: /* str */
5934
            if (!s->pe || s->vm86)
5935
                goto illegal_op;
5936
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
5937
                break;
5938
            gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5939
            ot = OT_WORD;
5940
            if (mod == 3)
5941
                ot += s->dflag;
5942
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5943
            break;
5944
        case 3: /* ltr */
5945
            if (!s->pe || s->vm86)
5946
                goto illegal_op;
5947
            if (s->cpl != 0) {
5948
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5949
            } else {
5950
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
5951
                    break;
5952
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5953
                gen_jmp_im(pc_start - s->cs_base);
5954
                tcg_gen_trunc_tl_i32(cpu_tmp2, cpu_T[0]);
5955
                tcg_gen_helper_0_1(helper_ltr, cpu_tmp2);
5956
            }
5957
            break;
5958
        case 4: /* verr */
5959
        case 5: /* verw */
5960
            if (!s->pe || s->vm86)
5961
                goto illegal_op;
5962
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5963
            if (s->cc_op != CC_OP_DYNAMIC)
5964
                gen_op_set_cc_op(s->cc_op);
5965
            if (op == 4)
5966
                gen_op_verr();
5967
            else
5968
                gen_op_verw();
5969
            s->cc_op = CC_OP_EFLAGS;
5970
            break;
5971
        default:
5972
            goto illegal_op;
5973
        }
5974
        break;
5975
    case 0x101:
5976
        modrm = ldub_code(s->pc++);
5977
        mod = (modrm >> 6) & 3;
5978
        op = (modrm >> 3) & 7;
5979
        rm = modrm & 7;
5980
        switch(op) {
5981
        case 0: /* sgdt */
5982
            if (mod == 3)
5983
                goto illegal_op;
5984
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
5985
                break;
5986
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5987
            gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5988
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
5989
            gen_add_A0_im(s, 2);
5990
            gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5991
            if (!s->dflag)
5992
                gen_op_andl_T0_im(0xffffff);
5993
            gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5994
            break;
5995
        case 1:
5996
            if (mod == 3) {
5997
                switch (rm) {
5998
                case 0: /* monitor */
5999
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6000
                        s->cpl != 0)
6001
                        goto illegal_op;
6002
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
6003
                        break;
6004
                    gen_jmp_im(pc_start - s->cs_base);
6005
#ifdef TARGET_X86_64
6006
                    if (s->aflag == 2) {
6007
                        gen_op_movq_A0_reg(R_EBX);
6008
                        gen_op_addq_A0_AL();
6009
                    } else
6010
#endif
6011
                    {
6012
                        gen_op_movl_A0_reg(R_EBX);
6013
                        gen_op_addl_A0_AL();
6014
                        if (s->aflag == 0)
6015
                            gen_op_andl_A0_ffff();
6016
                    }
6017
                    gen_add_A0_ds_seg(s);
6018
                    tcg_gen_helper_0_1(helper_monitor, cpu_A0);
6019
                    break;
6020
                case 1: /* mwait */
6021
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6022
                        s->cpl != 0)
6023
                        goto illegal_op;
6024
                    if (s->cc_op != CC_OP_DYNAMIC) {
6025
                        gen_op_set_cc_op(s->cc_op);
6026
                        s->cc_op = CC_OP_DYNAMIC;
6027
                    }
6028
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
6029
                        break;
6030
                    gen_jmp_im(s->pc - s->cs_base);
6031
                    tcg_gen_helper_0_0(helper_mwait);
6032
                    gen_eob(s);
6033
                    break;
6034
                default:
6035
                    goto illegal_op;
6036
                }
6037
            } else { /* sidt */
6038
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
6039
                    break;
6040
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6041
                gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
6042
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
6043
                gen_add_A0_im(s, 2);
6044
                gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
6045
                if (!s->dflag)
6046
                    gen_op_andl_T0_im(0xffffff);
6047
                gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6048
            }
6049
            break;
6050
        case 2: /* lgdt */
6051
        case 3: /* lidt */
6052
            if (mod == 3) {
6053
                switch(rm) {
6054
                case 0: /* VMRUN */
6055
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
6056
                        break;
6057
                    if (s->cc_op != CC_OP_DYNAMIC)
6058
                        gen_op_set_cc_op(s->cc_op);
6059
                    gen_jmp_im(s->pc - s->cs_base);
6060
                    tcg_gen_helper_0_0(helper_vmrun);
6061
                    s->cc_op = CC_OP_EFLAGS;
6062
                    gen_eob(s);
6063
                    break;
6064
                case 1: /* VMMCALL */
6065
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
6066
                         break;
6067
                    /* FIXME: cause #UD if hflags & SVM */
6068
                    tcg_gen_helper_0_0(helper_vmmcall);
6069
                    break;
6070
                case 2: /* VMLOAD */
6071
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
6072
                         break;
6073
                    tcg_gen_helper_0_0(helper_vmload);
6074
                    break;
6075
                case 3: /* VMSAVE */
6076
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
6077
                         break;
6078
                    tcg_gen_helper_0_0(helper_vmsave);
6079
                    break;
6080
                case 4: /* STGI */
6081
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
6082
                         break;
6083
                    tcg_gen_helper_0_0(helper_stgi);
6084
                    break;
6085
                case 5: /* CLGI */
6086
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
6087
                         break;
6088
                    tcg_gen_helper_0_0(helper_clgi);
6089
                    break;
6090
                case 6: /* SKINIT */
6091
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
6092
                         break;
6093
                    tcg_gen_helper_0_0(helper_skinit);
6094
                    break;
6095
                case 7: /* INVLPGA */
6096
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
6097
                         break;
6098
                    tcg_gen_helper_0_0(helper_invlpga);
6099
                    break;
6100
                default:
6101
                    goto illegal_op;
6102
                }
6103
            } else if (s->cpl != 0) {
6104
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6105
            } else {
6106
                if (gen_svm_check_intercept(s, pc_start,
6107
                                            op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
6108
                    break;
6109
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6110
                gen_op_ld_T1_A0(OT_WORD + s->mem_index);
6111
                gen_add_A0_im(s, 2);
6112
                gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6113
                if (!s->dflag)
6114
                    gen_op_andl_T0_im(0xffffff);
6115
                if (op == 2) {
6116
                    gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
6117
                    gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
6118
                } else {
6119
                    gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
6120
                    gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
6121
                }
6122
            }
6123
            break;
6124
        case 4: /* smsw */
6125
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
6126
                break;
6127
            gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
6128
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6129
            break;
6130
        case 6: /* lmsw */
6131
            if (s->cpl != 0) {
6132
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6133
            } else {
6134
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
6135
                    break;
6136
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6137
                gen_op_lmsw_T0();
6138
                gen_jmp_im(s->pc - s->cs_base);
6139
                gen_eob(s);
6140
            }
6141
            break;
6142
        case 7: /* invlpg */
6143
            if (s->cpl != 0) {
6144
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6145
            } else {
6146
                if (mod == 3) {
6147
#ifdef TARGET_X86_64
6148
                    if (CODE64(s) && rm == 0) {
6149
                        /* swapgs */
6150
                        gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
6151
                        gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
6152
                        gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
6153
                        gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
6154
                    } else
6155
#endif
6156
                    {
6157
                        goto illegal_op;
6158
                    }
6159
                } else {
6160
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
6161
                        break;
6162
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6163
                    tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
6164
                    gen_jmp_im(s->pc - s->cs_base);
6165
                    gen_eob(s);
6166
                }
6167
            }
6168
            break;
6169
        default:
6170
            goto illegal_op;
6171
        }
6172
        break;
6173
    case 0x108: /* invd */
6174
    case 0x109: /* wbinvd */
6175
        if (s->cpl != 0) {
6176
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6177
        } else {
6178
            if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6179
                break;
6180
            /* nothing to do */
6181
        }
6182
        break;
6183
    case 0x63: /* arpl or movslS (x86_64) */
6184
#ifdef TARGET_X86_64
6185
        if (CODE64(s)) {
6186
            int d_ot;
6187
            /* d_ot is the size of destination */
6188
            d_ot = dflag + OT_WORD;
6189

    
6190
            modrm = ldub_code(s->pc++);
6191
            reg = ((modrm >> 3) & 7) | rex_r;
6192
            mod = (modrm >> 6) & 3;
6193
            rm = (modrm & 7) | REX_B(s);
6194

    
6195
            if (mod == 3) {
6196
                gen_op_mov_TN_reg(OT_LONG, 0, rm);
6197
                /* sign extend */
6198
                if (d_ot == OT_QUAD)
6199
                    gen_op_movslq_T0_T0();
6200
                gen_op_mov_reg_T0(d_ot, reg);
6201
            } else {
6202
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6203
                if (d_ot == OT_QUAD) {
6204
                    gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6205
                } else {
6206
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6207
                }
6208
                gen_op_mov_reg_T0(d_ot, reg);
6209
            }
6210
        } else
6211
#endif
6212
        {
6213
            if (!s->pe || s->vm86)
6214
                goto illegal_op;
6215
            ot = dflag ? OT_LONG : OT_WORD;
6216
            modrm = ldub_code(s->pc++);
6217
            reg = (modrm >> 3) & 7;
6218
            mod = (modrm >> 6) & 3;
6219
            rm = modrm & 7;
6220
            if (mod != 3) {
6221
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6222
                gen_op_ld_T0_A0(ot + s->mem_index);
6223
            } else {
6224
                gen_op_mov_TN_reg(ot, 0, rm);
6225
            }
6226
            if (s->cc_op != CC_OP_DYNAMIC)
6227
                gen_op_set_cc_op(s->cc_op);
6228
            gen_op_arpl();
6229
            s->cc_op = CC_OP_EFLAGS;
6230
            if (mod != 3) {
6231
                gen_op_st_T0_A0(ot + s->mem_index);
6232
            } else {
6233
                gen_op_mov_reg_T0(ot, rm);
6234
            }
6235
            gen_op_arpl_update();
6236
        }
6237
        break;
6238
    case 0x102: /* lar */
6239
    case 0x103: /* lsl */
6240
        if (!s->pe || s->vm86)
6241
            goto illegal_op;
6242
        ot = dflag ? OT_LONG : OT_WORD;
6243
        modrm = ldub_code(s->pc++);
6244
        reg = ((modrm >> 3) & 7) | rex_r;
6245
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6246
        gen_op_mov_TN_reg(ot, 1, reg);
6247
        if (s->cc_op != CC_OP_DYNAMIC)
6248
            gen_op_set_cc_op(s->cc_op);
6249
        if (b == 0x102)
6250
            gen_op_lar();
6251
        else
6252
            gen_op_lsl();
6253
        s->cc_op = CC_OP_EFLAGS;
6254
        gen_op_mov_reg_T1(ot, reg);
6255
        break;
6256
    case 0x118:
6257
        modrm = ldub_code(s->pc++);
6258
        mod = (modrm >> 6) & 3;
6259
        op = (modrm >> 3) & 7;
6260
        switch(op) {
6261
        case 0: /* prefetchnta */
6262
        case 1: /* prefetchnt0 */
6263
        case 2: /* prefetchnt0 */
6264
        case 3: /* prefetchnt0 */
6265
            if (mod == 3)
6266
                goto illegal_op;
6267
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6268
            /* nothing more to do */
6269
            break;
6270
        default: /* nop (multi byte) */
6271
            gen_nop_modrm(s, modrm);
6272
            break;
6273
        }
6274
        break;
6275
    case 0x119 ... 0x11f: /* nop (multi byte) */
6276
        modrm = ldub_code(s->pc++);
6277
        gen_nop_modrm(s, modrm);
6278
        break;
6279
    case 0x120: /* mov reg, crN */
6280
    case 0x122: /* mov crN, reg */
6281
        if (s->cpl != 0) {
6282
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6283
        } else {
6284
            modrm = ldub_code(s->pc++);
6285
            if ((modrm & 0xc0) != 0xc0)
6286
                goto illegal_op;
6287
            rm = (modrm & 7) | REX_B(s);
6288
            reg = ((modrm >> 3) & 7) | rex_r;
6289
            if (CODE64(s))
6290
                ot = OT_QUAD;
6291
            else
6292
                ot = OT_LONG;
6293
            switch(reg) {
6294
            case 0:
6295
            case 2:
6296
            case 3:
6297
            case 4:
6298
            case 8:
6299
                if (b & 2) {
6300
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6301
                    gen_op_mov_TN_reg(ot, 0, rm);
6302
                    gen_op_movl_crN_T0(reg);
6303
                    gen_jmp_im(s->pc - s->cs_base);
6304
                    gen_eob(s);
6305
                } else {
6306
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6307
#if !defined(CONFIG_USER_ONLY)
6308
                    if (reg == 8)
6309
                        gen_op_movtl_T0_cr8();
6310
                    else
6311
#endif
6312
                        gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6313
                    gen_op_mov_reg_T0(ot, rm);
6314
                }
6315
                break;
6316
            default:
6317
                goto illegal_op;
6318
            }
6319
        }
6320
        break;
6321
    case 0x121: /* mov reg, drN */
6322
    case 0x123: /* mov drN, reg */
6323
        if (s->cpl != 0) {
6324
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6325
        } else {
6326
            modrm = ldub_code(s->pc++);
6327
            if ((modrm & 0xc0) != 0xc0)
6328
                goto illegal_op;
6329
            rm = (modrm & 7) | REX_B(s);
6330
            reg = ((modrm >> 3) & 7) | rex_r;
6331
            if (CODE64(s))
6332
                ot = OT_QUAD;
6333
            else
6334
                ot = OT_LONG;
6335
            /* XXX: do it dynamically with CR4.DE bit */
6336
            if (reg == 4 || reg == 5 || reg >= 8)
6337
                goto illegal_op;
6338
            if (b & 2) {
6339
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6340
                gen_op_mov_TN_reg(ot, 0, rm);
6341
                gen_op_movl_drN_T0(reg);
6342
                gen_jmp_im(s->pc - s->cs_base);
6343
                gen_eob(s);
6344
            } else {
6345
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6346
                gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6347
                gen_op_mov_reg_T0(ot, rm);
6348
            }
6349
        }
6350
        break;
6351
    case 0x106: /* clts */
6352
        if (s->cpl != 0) {
6353
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6354
        } else {
6355
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6356
            gen_op_clts();
6357
            /* abort block because static cpu state changed */
6358
            gen_jmp_im(s->pc - s->cs_base);
6359
            gen_eob(s);
6360
        }
6361
        break;
6362
    /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6363
    case 0x1c3: /* MOVNTI reg, mem */
6364
        if (!(s->cpuid_features & CPUID_SSE2))
6365
            goto illegal_op;
6366
        ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6367
        modrm = ldub_code(s->pc++);
6368
        mod = (modrm >> 6) & 3;
6369
        if (mod == 3)
6370
            goto illegal_op;
6371
        reg = ((modrm >> 3) & 7) | rex_r;
6372
        /* generate a generic store */
6373
        gen_ldst_modrm(s, modrm, ot, reg, 1);
6374
        break;
6375
    case 0x1ae:
6376
        modrm = ldub_code(s->pc++);
6377
        mod = (modrm >> 6) & 3;
6378
        op = (modrm >> 3) & 7;
6379
        switch(op) {
6380
        case 0: /* fxsave */
6381
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6382
                (s->flags & HF_EM_MASK))
6383
                goto illegal_op;
6384
            if (s->flags & HF_TS_MASK) {
6385
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6386
                break;
6387
            }
6388
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6389
            if (s->cc_op != CC_OP_DYNAMIC)
6390
                gen_op_set_cc_op(s->cc_op);
6391
            gen_jmp_im(pc_start - s->cs_base);
6392
            tcg_gen_helper_0_2(helper_fxsave, 
6393
                               cpu_A0, tcg_const_i32((s->dflag == 2)));
6394
            break;
6395
        case 1: /* fxrstor */
6396
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6397
                (s->flags & HF_EM_MASK))
6398
                goto illegal_op;
6399
            if (s->flags & HF_TS_MASK) {
6400
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6401
                break;
6402
            }
6403
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6404
            if (s->cc_op != CC_OP_DYNAMIC)
6405
                gen_op_set_cc_op(s->cc_op);
6406
            gen_jmp_im(pc_start - s->cs_base);
6407
            tcg_gen_helper_0_2(helper_fxrstor,
6408
                               cpu_A0, tcg_const_i32((s->dflag == 2)));
6409
            break;
6410
        case 2: /* ldmxcsr */
6411
        case 3: /* stmxcsr */
6412
            if (s->flags & HF_TS_MASK) {
6413
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6414
                break;
6415
            }
6416
            if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6417
                mod == 3)
6418
                goto illegal_op;
6419
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6420
            if (op == 2) {
6421
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6422
                gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6423
            } else {
6424
                gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6425
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
6426
            }
6427
            break;
6428
        case 5: /* lfence */
6429
        case 6: /* mfence */
6430
            if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6431
                goto illegal_op;
6432
            break;
6433
        case 7: /* sfence / clflush */
6434
            if ((modrm & 0xc7) == 0xc0) {
6435
                /* sfence */
6436
                /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6437
                if (!(s->cpuid_features & CPUID_SSE))
6438
                    goto illegal_op;
6439
            } else {
6440
                /* clflush */
6441
                if (!(s->cpuid_features & CPUID_CLFLUSH))
6442
                    goto illegal_op;
6443
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6444
            }
6445
            break;
6446
        default:
6447
            goto illegal_op;
6448
        }
6449
        break;
6450
    case 0x10d: /* 3DNow! prefetch(w) */
6451
        modrm = ldub_code(s->pc++);
6452
        mod = (modrm >> 6) & 3;
6453
        if (mod == 3)
6454
            goto illegal_op;
6455
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6456
        /* ignore for now */
6457
        break;
6458
    case 0x1aa: /* rsm */
6459
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6460
            break;
6461
        if (!(s->flags & HF_SMM_MASK))
6462
            goto illegal_op;
6463
        if (s->cc_op != CC_OP_DYNAMIC) {
6464
            gen_op_set_cc_op(s->cc_op);
6465
            s->cc_op = CC_OP_DYNAMIC;
6466
        }
6467
        gen_jmp_im(s->pc - s->cs_base);
6468
        tcg_gen_helper_0_0(helper_rsm);
6469
        gen_eob(s);
6470
        break;
6471
    case 0x10e ... 0x10f:
6472
        /* 3DNow! instructions, ignore prefixes */
6473
        s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
6474
    case 0x110 ... 0x117:
6475
    case 0x128 ... 0x12f:
6476
    case 0x150 ... 0x177:
6477
    case 0x17c ... 0x17f:
6478
    case 0x1c2:
6479
    case 0x1c4 ... 0x1c6:
6480
    case 0x1d0 ... 0x1fe:
6481
        gen_sse(s, b, pc_start, rex_r);
6482
        break;
6483
    default:
6484
        goto illegal_op;
6485
    }
6486
    /* lock generation */
6487
    if (s->prefix & PREFIX_LOCK)
6488
        gen_op_unlock();
6489
    return s->pc;
6490
 illegal_op:
6491
    if (s->prefix & PREFIX_LOCK)
6492
        gen_op_unlock();
6493
    /* XXX: ensure that no lock was generated */
6494
    gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6495
    return s->pc;
6496
}
6497

    
6498
#define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6499
#define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6500

    
6501
/* flags read by an operation */
6502
static uint16_t opc_read_flags[NB_OPS] = {
6503
    [INDEX_op_aas] = CC_A,
6504
    [INDEX_op_aaa] = CC_A,
6505
    [INDEX_op_das] = CC_A | CC_C,
6506
    [INDEX_op_daa] = CC_A | CC_C,
6507

    
6508
    /* subtle: due to the incl/decl implementation, C is used */
6509
    [INDEX_op_update_inc_cc] = CC_C,
6510

    
6511
    [INDEX_op_into] = CC_O,
6512

    
6513
    [INDEX_op_jb_subb] = CC_C,
6514
    [INDEX_op_jb_subw] = CC_C,
6515
    [INDEX_op_jb_subl] = CC_C,
6516

    
6517
    [INDEX_op_jz_subb] = CC_Z,
6518
    [INDEX_op_jz_subw] = CC_Z,
6519
    [INDEX_op_jz_subl] = CC_Z,
6520

    
6521
    [INDEX_op_jbe_subb] = CC_Z | CC_C,
6522
    [INDEX_op_jbe_subw] = CC_Z | CC_C,
6523
    [INDEX_op_jbe_subl] = CC_Z | CC_C,
6524

    
6525
    [INDEX_op_js_subb] = CC_S,
6526
    [INDEX_op_js_subw] = CC_S,
6527
    [INDEX_op_js_subl] = CC_S,
6528

    
6529
    [INDEX_op_jl_subb] = CC_O | CC_S,
6530
    [INDEX_op_jl_subw] = CC_O | CC_S,
6531
    [INDEX_op_jl_subl] = CC_O | CC_S,
6532

    
6533
    [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6534
    [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6535
    [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6536

    
6537
    [INDEX_op_loopnzw] = CC_Z,
6538
    [INDEX_op_loopnzl] = CC_Z,
6539
    [INDEX_op_loopzw] = CC_Z,
6540
    [INDEX_op_loopzl] = CC_Z,
6541

    
6542
    [INDEX_op_seto_T0_cc] = CC_O,
6543
    [INDEX_op_setb_T0_cc] = CC_C,
6544
    [INDEX_op_setz_T0_cc] = CC_Z,
6545
    [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6546
    [INDEX_op_sets_T0_cc] = CC_S,
6547
    [INDEX_op_setp_T0_cc] = CC_P,
6548
    [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6549
    [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6550

    
6551
    [INDEX_op_setb_T0_subb] = CC_C,
6552
    [INDEX_op_setb_T0_subw] = CC_C,
6553
    [INDEX_op_setb_T0_subl] = CC_C,
6554

    
6555
    [INDEX_op_setz_T0_subb] = CC_Z,
6556
    [INDEX_op_setz_T0_subw] = CC_Z,
6557
    [INDEX_op_setz_T0_subl] = CC_Z,
6558

    
6559
    [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6560
    [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6561
    [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6562

    
6563
    [INDEX_op_sets_T0_subb] = CC_S,
6564
    [INDEX_op_sets_T0_subw] = CC_S,
6565
    [INDEX_op_sets_T0_subl] = CC_S,
6566

    
6567
    [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6568
    [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6569
    [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6570

    
6571
    [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6572
    [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6573
    [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6574

    
6575
    [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6576
    [INDEX_op_cmc] = CC_C,
6577
    [INDEX_op_salc] = CC_C,
6578

    
6579
    /* needed for correct flag optimisation before string ops */
6580
    [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6581
    [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6582
    [INDEX_op_jz_ecxw] = CC_OSZAPC,
6583
    [INDEX_op_jz_ecxl] = CC_OSZAPC,
6584

    
6585
#ifdef TARGET_X86_64
6586
    [INDEX_op_jb_subq] = CC_C,
6587
    [INDEX_op_jz_subq] = CC_Z,
6588
    [INDEX_op_jbe_subq] = CC_Z | CC_C,
6589
    [INDEX_op_js_subq] = CC_S,
6590
    [INDEX_op_jl_subq] = CC_O | CC_S,
6591
    [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6592

    
6593
    [INDEX_op_loopnzq] = CC_Z,
6594
    [INDEX_op_loopzq] = CC_Z,
6595

    
6596
    [INDEX_op_setb_T0_subq] = CC_C,
6597
    [INDEX_op_setz_T0_subq] = CC_Z,
6598
    [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6599
    [INDEX_op_sets_T0_subq] = CC_S,
6600
    [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6601
    [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6602

    
6603
    [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6604
    [INDEX_op_jz_ecxq] = CC_OSZAPC,
6605
#endif
6606

    
6607
#define DEF_READF(SUFFIX)\
6608
    [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6609
    [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6610
    [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6611
    X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6612
    [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6613
    [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6614
    [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6615
    X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6616
\
6617
    [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6618
    [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6619
    [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6620
    X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6621
    [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6622
    [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6623
    [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6624
    X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6625

    
6626
    DEF_READF( )
6627
    DEF_READF(_raw)
6628
#ifndef CONFIG_USER_ONLY
6629
    DEF_READF(_kernel)
6630
    DEF_READF(_user)
6631
#endif
6632
};
6633

    
6634
/* flags written by an operation */
6635
static uint16_t opc_write_flags[NB_OPS] = {
6636
    [INDEX_op_update2_cc] = CC_OSZAPC,
6637
    [INDEX_op_update1_cc] = CC_OSZAPC,
6638
    [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6639
    [INDEX_op_update_neg_cc] = CC_OSZAPC,
6640
    /* subtle: due to the incl/decl implementation, C is used */
6641
    [INDEX_op_update_inc_cc] = CC_OSZAPC,
6642
    [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6643

    
6644
    [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6645
    [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6646
    [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6647
    X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6648
    [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6649
    [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6650
    [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6651
    X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6652
    [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6653
    [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6654
    X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6655

    
6656
    /* sse */
6657
    [INDEX_op_com_dummy] = CC_OSZAPC,
6658
    [INDEX_op_com_dummy] = CC_OSZAPC,
6659
    [INDEX_op_com_dummy] = CC_OSZAPC,
6660
    [INDEX_op_com_dummy] = CC_OSZAPC,
6661

    
6662
    /* bcd */
6663
    [INDEX_op_aam] = CC_OSZAPC,
6664
    [INDEX_op_aad] = CC_OSZAPC,
6665
    [INDEX_op_aas] = CC_OSZAPC,
6666
    [INDEX_op_aaa] = CC_OSZAPC,
6667
    [INDEX_op_das] = CC_OSZAPC,
6668
    [INDEX_op_daa] = CC_OSZAPC,
6669

    
6670
    [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6671
    [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6672
    [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6673
    [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6674
    [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6675
    [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6676
    [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6677
    [INDEX_op_clc] = CC_C,
6678
    [INDEX_op_stc] = CC_C,
6679
    [INDEX_op_cmc] = CC_C,
6680

    
6681
    [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6682
    [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6683
    X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6684
    [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6685
    [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6686
    X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6687
    [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6688
    [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6689
    X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6690
    [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6691
    [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6692
    X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6693

    
6694
    [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6695
    [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6696
    X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6697
    [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6698
    [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6699
    X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6700

    
6701
    [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6702
    [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6703
    [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6704
    X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6705

    
6706
    [INDEX_op_cmpxchg8b] = CC_Z,
6707
    [INDEX_op_lar] = CC_Z,
6708
    [INDEX_op_lsl] = CC_Z,
6709
    [INDEX_op_verr] = CC_Z,
6710
    [INDEX_op_verw] = CC_Z,
6711
    [INDEX_op_fcomi_dummy] = CC_Z | CC_P | CC_C,
6712
    [INDEX_op_fcomi_dummy] = CC_Z | CC_P | CC_C,
6713

    
6714
#define DEF_WRITEF(SUFFIX)\
6715
    [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6716
    [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6717
    [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6718
    X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6719
    [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6720
    [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6721
    [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6722
    X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6723
\
6724
    [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6725
    [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6726
    [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6727
    X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6728
    [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6729
    [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6730
    [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6731
    X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6732
\
6733
    [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6734
    [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6735
    [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6736
    X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6737
    [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6738
    [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6739
    [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6740
    X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6741
\
6742
    [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6743
    [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6744
    [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6745
    X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6746
\
6747
    [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6748
    [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6749
    [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6750
    X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6751
\
6752
    [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6753
    [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6754
    [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6755
    X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6756
\
6757
    [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6758
    [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6759
    X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6760
    [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6761
    [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6762
    X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6763
\
6764
    [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6765
    [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6766
    X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6767
    [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6768
    [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6769
    X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6770
\
6771
    [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6772
    [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6773
    [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6774
    X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6775

    
6776

    
6777
    DEF_WRITEF( )
6778
    DEF_WRITEF(_raw)
6779
#ifndef CONFIG_USER_ONLY
6780
    DEF_WRITEF(_kernel)
6781
    DEF_WRITEF(_user)
6782
#endif
6783
};
6784

    
6785
/* simpler form of an operation if no flags need to be generated */
6786
static uint16_t opc_simpler[NB_OPS] = {
6787
    [INDEX_op_update2_cc] = INDEX_op_nop,
6788
    [INDEX_op_update1_cc] = INDEX_op_nop,
6789
    [INDEX_op_update_neg_cc] = INDEX_op_nop,
6790
#if 0
6791
    /* broken: CC_OP logic must be rewritten */
6792
    [INDEX_op_update_inc_cc] = INDEX_op_nop,
6793
#endif
6794

    
6795
    [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6796
    [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6797
    [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6798
    X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6799

    
6800
    [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6801
    [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6802
    [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6803
    X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6804

    
6805
    [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6806
    [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6807
    [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6808
    X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6809

    
6810
#define DEF_SIMPLER(SUFFIX)\
6811
    [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6812
    [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6813
    [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6814
    X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6815
\
6816
    [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6817
    [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6818
    [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6819
    X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6820

    
6821
    DEF_SIMPLER( )
6822
    DEF_SIMPLER(_raw)
6823
#ifndef CONFIG_USER_ONLY
6824
    DEF_SIMPLER(_kernel)
6825
    DEF_SIMPLER(_user)
6826
#endif
6827
};
6828

    
6829
static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
6830
{
6831
    switch(macro_id) {
6832
#ifdef MACRO_TEST
6833
    case MACRO_TEST:
6834
        tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
6835
        break;
6836
#endif
6837
    }
6838
}
6839

    
6840
void optimize_flags_init(void)
6841
{
6842
    int i;
6843
    /* put default values in arrays */
6844
    for(i = 0; i < NB_OPS; i++) {
6845
        if (opc_simpler[i] == 0)
6846
            opc_simpler[i] = i;
6847
    }
6848

    
6849
    tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
6850

    
6851
    cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
6852
#if TARGET_LONG_BITS > HOST_LONG_BITS
6853
    cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL, 
6854
                                  TCG_AREG0, offsetof(CPUState, t0), "T0");
6855
    cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
6856
                                  TCG_AREG0, offsetof(CPUState, t1), "T1");
6857
    cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
6858
                                TCG_AREG0, offsetof(CPUState, t2), "A0");
6859
#else
6860
    cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
6861
    cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
6862
    cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
6863
    cpu_tmp1 = tcg_global_reg2_new_hack(TCG_TYPE_I64, TCG_AREG1, TCG_AREG2, "tmp1");
6864
#endif
6865
}
6866

    
6867
/* CPU flags computation optimization: we move backward thru the
6868
   generated code to see which flags are needed. The operation is
6869
   modified if suitable */
6870
static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6871
{
6872
    uint16_t *opc_ptr;
6873
    int live_flags, write_flags, op;
6874

    
6875
    opc_ptr = opc_buf + opc_buf_len;
6876
    /* live_flags contains the flags needed by the next instructions
6877
       in the code. At the end of the block, we consider that all the
6878
       flags are live. */
6879
    live_flags = CC_OSZAPC;
6880
    while (opc_ptr > opc_buf) {
6881
        op = *--opc_ptr;
6882
        /* if none of the flags written by the instruction is used,
6883
           then we can try to find a simpler instruction */
6884
        write_flags = opc_write_flags[op];
6885
        if ((live_flags & write_flags) == 0) {
6886
            *opc_ptr = opc_simpler[op];
6887
        }
6888
        /* compute the live flags before the instruction */
6889
        live_flags &= ~write_flags;
6890
        live_flags |= opc_read_flags[op];
6891
    }
6892
}
6893

    
6894
/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6895
   basic block 'tb'. If search_pc is TRUE, also generate PC
6896
   information for each intermediate instruction. */
6897
static inline int gen_intermediate_code_internal(CPUState *env,
6898
                                                 TranslationBlock *tb,
6899
                                                 int search_pc)
6900
{
6901
    DisasContext dc1, *dc = &dc1;
6902
    target_ulong pc_ptr;
6903
    uint16_t *gen_opc_end;
6904
    int j, lj, cflags;
6905
    uint64_t flags;
6906
    target_ulong pc_start;
6907
    target_ulong cs_base;
6908

    
6909
    /* generate intermediate code */
6910
    pc_start = tb->pc;
6911
    cs_base = tb->cs_base;
6912
    flags = tb->flags;
6913
    cflags = tb->cflags;
6914

    
6915
    dc->pe = (flags >> HF_PE_SHIFT) & 1;
6916
    dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6917
    dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6918
    dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6919
    dc->f_st = 0;
6920
    dc->vm86 = (flags >> VM_SHIFT) & 1;
6921
    dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6922
    dc->iopl = (flags >> IOPL_SHIFT) & 3;
6923
    dc->tf = (flags >> TF_SHIFT) & 1;
6924
    dc->singlestep_enabled = env->singlestep_enabled;
6925
    dc->cc_op = CC_OP_DYNAMIC;
6926
    dc->cs_base = cs_base;
6927
    dc->tb = tb;
6928
    dc->popl_esp_hack = 0;
6929
    /* select memory access functions */
6930
    dc->mem_index = 0;
6931
    if (flags & HF_SOFTMMU_MASK) {
6932
        if (dc->cpl == 3)
6933
            dc->mem_index = 2 * 4;
6934
        else
6935
            dc->mem_index = 1 * 4;
6936
    }
6937
    dc->cpuid_features = env->cpuid_features;
6938
    dc->cpuid_ext_features = env->cpuid_ext_features;
6939
    dc->cpuid_ext2_features = env->cpuid_ext2_features;
6940
#ifdef TARGET_X86_64
6941
    dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6942
    dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6943
#endif
6944
    dc->flags = flags;
6945
    dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6946
                    (flags & HF_INHIBIT_IRQ_MASK)
6947
#ifndef CONFIG_SOFTMMU
6948
                    || (flags & HF_SOFTMMU_MASK)
6949
#endif
6950
                    );
6951
#if 0
6952
    /* check addseg logic */
6953
    if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6954
        printf("ERROR addseg\n");
6955
#endif
6956

    
6957
    cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
6958
#if TARGET_LONG_BITS > HOST_LONG_BITS
6959
    cpu_tmp1 = tcg_temp_new(TCG_TYPE_I64);
6960
#endif
6961
    cpu_tmp2 = tcg_temp_new(TCG_TYPE_I32);
6962
    cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
6963
    cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
6964

    
6965
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6966

    
6967
    dc->is_jmp = DISAS_NEXT;
6968
    pc_ptr = pc_start;
6969
    lj = -1;
6970

    
6971
    for(;;) {
6972
        if (env->nb_breakpoints > 0) {
6973
            for(j = 0; j < env->nb_breakpoints; j++) {
6974
                if (env->breakpoints[j] == pc_ptr) {
6975
                    gen_debug(dc, pc_ptr - dc->cs_base);
6976
                    break;
6977
                }
6978
            }
6979
        }
6980
        if (search_pc) {
6981
            j = gen_opc_ptr - gen_opc_buf;
6982
            if (lj < j) {
6983
                lj++;
6984
                while (lj < j)
6985
                    gen_opc_instr_start[lj++] = 0;
6986
            }
6987
            gen_opc_pc[lj] = pc_ptr;
6988
            gen_opc_cc_op[lj] = dc->cc_op;
6989
            gen_opc_instr_start[lj] = 1;
6990
        }
6991
        pc_ptr = disas_insn(dc, pc_ptr);
6992
        /* stop translation if indicated */
6993
        if (dc->is_jmp)
6994
            break;
6995
        /* if single step mode, we generate only one instruction and
6996
           generate an exception */
6997
        /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6998
           the flag and abort the translation to give the irqs a
6999
           change to be happen */
7000
        if (dc->tf || dc->singlestep_enabled ||
7001
            (flags & HF_INHIBIT_IRQ_MASK) ||
7002
            (cflags & CF_SINGLE_INSN)) {
7003
            gen_jmp_im(pc_ptr - dc->cs_base);
7004
            gen_eob(dc);
7005
            break;
7006
        }
7007
        /* if too long translation, stop generation too */
7008
        if (gen_opc_ptr >= gen_opc_end ||
7009
            (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
7010
            gen_jmp_im(pc_ptr - dc->cs_base);
7011
            gen_eob(dc);
7012
            break;
7013
        }
7014
    }
7015
    *gen_opc_ptr = INDEX_op_end;
7016
    /* we don't forget to fill the last values */
7017
    if (search_pc) {
7018
        j = gen_opc_ptr - gen_opc_buf;
7019
        lj++;
7020
        while (lj <= j)
7021
            gen_opc_instr_start[lj++] = 0;
7022
    }
7023

    
7024
#ifdef DEBUG_DISAS
7025
    if (loglevel & CPU_LOG_TB_CPU) {
7026
        cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
7027
    }
7028
    if (loglevel & CPU_LOG_TB_IN_ASM) {
7029
        int disas_flags;
7030
        fprintf(logfile, "----------------\n");
7031
        fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
7032
#ifdef TARGET_X86_64
7033
        if (dc->code64)
7034
            disas_flags = 2;
7035
        else
7036
#endif
7037
            disas_flags = !dc->code32;
7038
        target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
7039
        fprintf(logfile, "\n");
7040
        if (loglevel & CPU_LOG_TB_OP_OPT) {
7041
            fprintf(logfile, "OP before opt:\n");
7042
            tcg_dump_ops(&tcg_ctx, logfile);
7043
            fprintf(logfile, "\n");
7044
        }
7045
    }
7046
#endif
7047

    
7048
    /* optimize flag computations */
7049
    optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
7050

    
7051
    if (!search_pc)
7052
        tb->size = pc_ptr - pc_start;
7053
    return 0;
7054
}
7055

    
7056
int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
7057
{
7058
    return gen_intermediate_code_internal(env, tb, 0);
7059
}
7060

    
7061
int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
7062
{
7063
    return gen_intermediate_code_internal(env, tb, 1);
7064
}
7065

    
7066
void gen_pc_load(CPUState *env, TranslationBlock *tb,
7067
                unsigned long searched_pc, int pc_pos, void *puc)
7068
{
7069
    int cc_op;
7070
#ifdef DEBUG_DISAS
7071
    if (loglevel & CPU_LOG_TB_OP) {
7072
        int i;
7073
        fprintf(logfile, "RESTORE:\n");
7074
        for(i = 0;i <= pc_pos; i++) {
7075
            if (gen_opc_instr_start[i]) {
7076
                fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7077
            }
7078
        }
7079
        fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7080
                searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7081
                (uint32_t)tb->cs_base);
7082
    }
7083
#endif
7084
    env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7085
    cc_op = gen_opc_cc_op[pc_pos];
7086
    if (cc_op != CC_OP_DYNAMIC)
7087
        env->cc_op = cc_op;
7088
}