Statistics
| Branch: | Revision:

root / target-i386 / translate.c @ d2856f1a

History | View | Annotate | Download (207.9 kB)

1
/*
2
 *  i386 translation
3
 *
4
 *  Copyright (c) 2003 Fabrice Bellard
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20
#include <stdarg.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23
#include <string.h>
24
#include <inttypes.h>
25
#include <signal.h>
26
#include <assert.h>
27

    
28
#include "cpu.h"
29
#include "exec-all.h"
30
#include "disas.h"
31
#include "helper.h"
32
#include "tcg-op.h"
33

    
34
#define PREFIX_REPZ   0x01
35
#define PREFIX_REPNZ  0x02
36
#define PREFIX_LOCK   0x04
37
#define PREFIX_DATA   0x08
38
#define PREFIX_ADR    0x10
39

    
40
#ifdef TARGET_X86_64
41
#define X86_64_ONLY(x) x
42
#define X86_64_DEF(x...) x
43
#define CODE64(s) ((s)->code64)
44
#define REX_X(s) ((s)->rex_x)
45
#define REX_B(s) ((s)->rex_b)
46
/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
47
#if 1
48
#define BUGGY_64(x) NULL
49
#endif
50
#else
51
#define X86_64_ONLY(x) NULL
52
#define X86_64_DEF(x...)
53
#define CODE64(s) 0
54
#define REX_X(s) 0
55
#define REX_B(s) 0
56
#endif
57

    
58
//#define MACRO_TEST   1
59

    
60
/* global register indexes */
61
static TCGv cpu_env, cpu_T[2], cpu_A0;
62
/* local register indexes (only used inside old micro ops) */
63
static TCGv cpu_tmp0;
64

    
65
#ifdef TARGET_X86_64
66
static int x86_64_hregs;
67
#endif
68

    
69
typedef struct DisasContext {
70
    /* current insn context */
71
    int override; /* -1 if no override */
72
    int prefix;
73
    int aflag, dflag;
74
    target_ulong pc; /* pc = eip + cs_base */
75
    int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
76
                   static state change (stop translation) */
77
    /* current block context */
78
    target_ulong cs_base; /* base of CS segment */
79
    int pe;     /* protected mode */
80
    int code32; /* 32 bit code segment */
81
#ifdef TARGET_X86_64
82
    int lma;    /* long mode active */
83
    int code64; /* 64 bit code segment */
84
    int rex_x, rex_b;
85
#endif
86
    int ss32;   /* 32 bit stack segment */
87
    int cc_op;  /* current CC operation */
88
    int addseg; /* non zero if either DS/ES/SS have a non zero base */
89
    int f_st;   /* currently unused */
90
    int vm86;   /* vm86 mode */
91
    int cpl;
92
    int iopl;
93
    int tf;     /* TF cpu flag */
94
    int singlestep_enabled; /* "hardware" single step enabled */
95
    int jmp_opt; /* use direct block chaining for direct jumps */
96
    int mem_index; /* select memory access functions */
97
    uint64_t flags; /* all execution flags */
98
    struct TranslationBlock *tb;
99
    int popl_esp_hack; /* for correct popl with esp base handling */
100
    int rip_offset; /* only used in x86_64, but left for simplicity */
101
    int cpuid_features;
102
    int cpuid_ext_features;
103
    int cpuid_ext2_features;
104
} DisasContext;
105

    
106
static void gen_eob(DisasContext *s);
107
static void gen_jmp(DisasContext *s, target_ulong eip);
108
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
109

    
110
/* i386 arith/logic operations */
111
enum {
112
    OP_ADDL,
113
    OP_ORL,
114
    OP_ADCL,
115
    OP_SBBL,
116
    OP_ANDL,
117
    OP_SUBL,
118
    OP_XORL,
119
    OP_CMPL,
120
};
121

    
122
/* i386 shift ops */
123
enum {
124
    OP_ROL,
125
    OP_ROR,
126
    OP_RCL,
127
    OP_RCR,
128
    OP_SHL,
129
    OP_SHR,
130
    OP_SHL1, /* undocumented */
131
    OP_SAR = 7,
132
};
133

    
134
/* operand size */
135
enum {
136
    OT_BYTE = 0,
137
    OT_WORD,
138
    OT_LONG,
139
    OT_QUAD,
140
};
141

    
142
enum {
143
    /* I386 int registers */
144
    OR_EAX,   /* MUST be even numbered */
145
    OR_ECX,
146
    OR_EDX,
147
    OR_EBX,
148
    OR_ESP,
149
    OR_EBP,
150
    OR_ESI,
151
    OR_EDI,
152

    
153
    OR_TMP0 = 16,    /* temporary operand register */
154
    OR_TMP1,
155
    OR_A0, /* temporary register used when doing address evaluation */
156
};
157

    
158
static inline void gen_op_movl_T0_0(void)
159
{
160
    tcg_gen_movi_tl(cpu_T[0], 0);
161
}
162

    
163
static inline void gen_op_movl_T0_im(int32_t val)
164
{
165
    tcg_gen_movi_tl(cpu_T[0], val);
166
}
167

    
168
static inline void gen_op_movl_T0_imu(uint32_t val)
169
{
170
    tcg_gen_movi_tl(cpu_T[0], val);
171
}
172

    
173
static inline void gen_op_movl_T1_im(int32_t val)
174
{
175
    tcg_gen_movi_tl(cpu_T[1], val);
176
}
177

    
178
static inline void gen_op_movl_T1_imu(uint32_t val)
179
{
180
    tcg_gen_movi_tl(cpu_T[1], val);
181
}
182

    
183
static inline void gen_op_movl_A0_im(uint32_t val)
184
{
185
    tcg_gen_movi_tl(cpu_A0, val);
186
}
187

    
188
#ifdef TARGET_X86_64
189
static inline void gen_op_movq_A0_im(int64_t val)
190
{
191
    tcg_gen_movi_tl(cpu_A0, val);
192
}
193
#endif
194

    
195
static inline void gen_movtl_T0_im(target_ulong val)
196
{
197
    tcg_gen_movi_tl(cpu_T[0], val);
198
}
199

    
200
static inline void gen_movtl_T1_im(target_ulong val)
201
{
202
    tcg_gen_movi_tl(cpu_T[1], val);
203
}
204

    
205
static inline void gen_op_andl_T0_ffff(void)
206
{
207
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
208
}
209

    
210
static inline void gen_op_andl_T0_im(uint32_t val)
211
{
212
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
213
}
214

    
215
static inline void gen_op_movl_T0_T1(void)
216
{
217
    tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
218
}
219

    
220
static inline void gen_op_andl_A0_ffff(void)
221
{
222
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
223
}
224

    
225
#ifdef TARGET_X86_64
226

    
227
#define NB_OP_SIZES 4
228

    
229
#define DEF_REGS(prefix, suffix) \
230
  prefix ## EAX ## suffix,\
231
  prefix ## ECX ## suffix,\
232
  prefix ## EDX ## suffix,\
233
  prefix ## EBX ## suffix,\
234
  prefix ## ESP ## suffix,\
235
  prefix ## EBP ## suffix,\
236
  prefix ## ESI ## suffix,\
237
  prefix ## EDI ## suffix,\
238
  prefix ## R8 ## suffix,\
239
  prefix ## R9 ## suffix,\
240
  prefix ## R10 ## suffix,\
241
  prefix ## R11 ## suffix,\
242
  prefix ## R12 ## suffix,\
243
  prefix ## R13 ## suffix,\
244
  prefix ## R14 ## suffix,\
245
  prefix ## R15 ## suffix,
246

    
247
#else /* !TARGET_X86_64 */
248

    
249
#define NB_OP_SIZES 3
250

    
251
#define DEF_REGS(prefix, suffix) \
252
  prefix ## EAX ## suffix,\
253
  prefix ## ECX ## suffix,\
254
  prefix ## EDX ## suffix,\
255
  prefix ## EBX ## suffix,\
256
  prefix ## ESP ## suffix,\
257
  prefix ## EBP ## suffix,\
258
  prefix ## ESI ## suffix,\
259
  prefix ## EDI ## suffix,
260

    
261
#endif /* !TARGET_X86_64 */
262

    
263
#if defined(WORDS_BIGENDIAN)
264
#define REG_B_OFFSET (sizeof(target_ulong) - 1)
265
#define REG_H_OFFSET (sizeof(target_ulong) - 2)
266
#define REG_W_OFFSET (sizeof(target_ulong) - 2)
267
#define REG_L_OFFSET (sizeof(target_ulong) - 4)
268
#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
269
#else
270
#define REG_B_OFFSET 0
271
#define REG_H_OFFSET 1
272
#define REG_W_OFFSET 0
273
#define REG_L_OFFSET 0
274
#define REG_LH_OFFSET 4
275
#endif
276

    
277
static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
278
{
279
    switch(ot) {
280
    case OT_BYTE:
281
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
282
            tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
283
        } else {
284
            tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
285
        }
286
        break;
287
    case OT_WORD:
288
        tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
289
        break;
290
#ifdef TARGET_X86_64
291
    case OT_LONG:
292
        tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
293
        /* high part of register set to zero */
294
        tcg_gen_movi_tl(cpu_tmp0, 0);
295
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
296
        break;
297
    default:
298
    case OT_QUAD:
299
        tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
300
        break;
301
#else
302
    default:
303
    case OT_LONG:
304
        tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
305
        break;
306
#endif
307
    }
308
}
309

    
310
static inline void gen_op_mov_reg_T0(int ot, int reg)
311
{
312
    gen_op_mov_reg_TN(ot, 0, reg);
313
}
314

    
315
static inline void gen_op_mov_reg_T1(int ot, int reg)
316
{
317
    gen_op_mov_reg_TN(ot, 1, reg);
318
}
319

    
320
static inline void gen_op_mov_reg_A0(int size, int reg)
321
{
322
    switch(size) {
323
    case 0:
324
        tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
325
        break;
326
#ifdef TARGET_X86_64
327
    case 1:
328
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
329
        /* high part of register set to zero */
330
        tcg_gen_movi_tl(cpu_tmp0, 0);
331
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
332
        break;
333
    default:
334
    case 2:
335
        tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
336
        break;
337
#else
338
    default:
339
    case 1:
340
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
341
        break;
342
#endif
343
    }
344
}
345

    
346
static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
347
{
348
    switch(ot) {
349
    case OT_BYTE:
350
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
351
            goto std_case;
352
        } else {
353
            tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
354
        }
355
        break;
356
    default:
357
    std_case:
358
        tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
359
        break;
360
    }
361
}
362

    
363
static inline void gen_op_movl_A0_reg(int reg)
364
{
365
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
366
}
367

    
368
static inline void gen_op_addl_A0_im(int32_t val)
369
{
370
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
371
#ifdef TARGET_X86_64
372
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
373
#endif
374
}
375

    
376
#ifdef TARGET_X86_64
377
static inline void gen_op_addq_A0_im(int64_t val)
378
{
379
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
380
}
381
#endif
382
    
383
static void gen_add_A0_im(DisasContext *s, int val)
384
{
385
#ifdef TARGET_X86_64
386
    if (CODE64(s))
387
        gen_op_addq_A0_im(val);
388
    else
389
#endif
390
        gen_op_addl_A0_im(val);
391
}
392

    
393
static inline void gen_op_addl_T0_T1(void)
394
{
395
    tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
396
}
397

    
398
static inline void gen_op_jmp_T0(void)
399
{
400
    tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
401
}
402

    
403
static inline void gen_op_addw_ESP_im(int32_t val)
404
{
405
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
406
    tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
407
    tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]) + REG_W_OFFSET);
408
}
409

    
410
static inline void gen_op_addl_ESP_im(int32_t val)
411
{
412
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
413
    tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
414
#ifdef TARGET_X86_64
415
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
416
#endif
417
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
418
}
419

    
420
#ifdef TARGET_X86_64
421
static inline void gen_op_addq_ESP_im(int32_t val)
422
{
423
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
424
    tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
425
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
426
}
427
#endif
428

    
429
static inline void gen_op_set_cc_op(int32_t val)
430
{
431
    tcg_gen_movi_tl(cpu_tmp0, val);
432
    tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, cc_op));
433
}
434

    
435
static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
436
{
437
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
438
    if (shift != 0) 
439
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
440
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
441
#ifdef TARGET_X86_64
442
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
443
#endif
444
}
445

    
446
static inline void gen_op_movl_A0_seg(int reg)
447
{
448
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
449
}
450

    
451
static inline void gen_op_addl_A0_seg(int reg)
452
{
453
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
454
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
455
#ifdef TARGET_X86_64
456
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
457
#endif
458
}
459

    
460
#ifdef TARGET_X86_64
461
static inline void gen_op_movq_A0_seg(int reg)
462
{
463
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
464
}
465

    
466
static inline void gen_op_addq_A0_seg(int reg)
467
{
468
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
469
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
470
}
471

    
472
static inline void gen_op_movq_A0_reg(int reg)
473
{
474
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
475
}
476

    
477
static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
478
{
479
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
480
    if (shift != 0) 
481
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
482
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
483
}
484
#endif
485

    
486
static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
487
    [0] = {
488
        DEF_REGS(gen_op_cmovw_, _T1_T0)
489
    },
490
    [1] = {
491
        DEF_REGS(gen_op_cmovl_, _T1_T0)
492
    },
493
#ifdef TARGET_X86_64
494
    [2] = {
495
        DEF_REGS(gen_op_cmovq_, _T1_T0)
496
    },
497
#endif
498
};
499

    
500
#define DEF_ARITHC(SUFFIX)\
501
    {\
502
        gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
503
        gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
504
    },\
505
    {\
506
        gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
507
        gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
508
    },\
509
    {\
510
        gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
511
        gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
512
    },\
513
    {\
514
        X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
515
        X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
516
    },
517

    
518
static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
519
    DEF_ARITHC( )
520
};
521

    
522
static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
523
    DEF_ARITHC(_raw)
524
#ifndef CONFIG_USER_ONLY
525
    DEF_ARITHC(_kernel)
526
    DEF_ARITHC(_user)
527
#endif
528
};
529

    
530
static const int cc_op_arithb[8] = {
531
    CC_OP_ADDB,
532
    CC_OP_LOGICB,
533
    CC_OP_ADDB,
534
    CC_OP_SUBB,
535
    CC_OP_LOGICB,
536
    CC_OP_SUBB,
537
    CC_OP_LOGICB,
538
    CC_OP_SUBB,
539
};
540

    
541
#define DEF_CMPXCHG(SUFFIX)\
542
    gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
543
    gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
544
    gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
545
    X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
546

    
547
static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
548
    DEF_CMPXCHG( )
549
};
550

    
551
static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
552
    DEF_CMPXCHG(_raw)
553
#ifndef CONFIG_USER_ONLY
554
    DEF_CMPXCHG(_kernel)
555
    DEF_CMPXCHG(_user)
556
#endif
557
};
558

    
559
#define DEF_SHIFT(SUFFIX)\
560
    {\
561
        gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
562
        gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
563
        gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
564
        gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
565
        gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
566
        gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
567
        gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
568
        gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
569
    },\
570
    {\
571
        gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
572
        gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
573
        gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
574
        gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
575
        gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
576
        gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
577
        gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
578
        gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
579
    },\
580
    {\
581
        gen_op_roll ## SUFFIX ## _T0_T1_cc,\
582
        gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
583
        gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
584
        gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
585
        gen_op_shll ## SUFFIX ## _T0_T1_cc,\
586
        gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
587
        gen_op_shll ## SUFFIX ## _T0_T1_cc,\
588
        gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
589
    },\
590
    {\
591
        X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
592
        X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
593
        X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
594
        X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
595
        X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
596
        X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
597
        X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
598
        X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
599
    },
600

    
601
static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
602
    DEF_SHIFT( )
603
};
604

    
605
static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
606
    DEF_SHIFT(_raw)
607
#ifndef CONFIG_USER_ONLY
608
    DEF_SHIFT(_kernel)
609
    DEF_SHIFT(_user)
610
#endif
611
};
612

    
613
#define DEF_SHIFTD(SUFFIX, op)\
614
    {\
615
        NULL,\
616
        NULL,\
617
    },\
618
    {\
619
        gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
620
        gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
621
     },\
622
    {\
623
        gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
624
        gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
625
    },\
626
    {\
627
X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
628
           gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
629
    },
630

    
631
static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
632
    DEF_SHIFTD(, im)
633
};
634

    
635
static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
636
    DEF_SHIFTD(, ECX)
637
};
638

    
639
static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
640
    DEF_SHIFTD(_raw, im)
641
#ifndef CONFIG_USER_ONLY
642
    DEF_SHIFTD(_kernel, im)
643
    DEF_SHIFTD(_user, im)
644
#endif
645
};
646

    
647
static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
648
    DEF_SHIFTD(_raw, ECX)
649
#ifndef CONFIG_USER_ONLY
650
    DEF_SHIFTD(_kernel, ECX)
651
    DEF_SHIFTD(_user, ECX)
652
#endif
653
};
654

    
655
static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
656
    [0] = {
657
        gen_op_btw_T0_T1_cc,
658
        gen_op_btsw_T0_T1_cc,
659
        gen_op_btrw_T0_T1_cc,
660
        gen_op_btcw_T0_T1_cc,
661
    },
662
    [1] = {
663
        gen_op_btl_T0_T1_cc,
664
        gen_op_btsl_T0_T1_cc,
665
        gen_op_btrl_T0_T1_cc,
666
        gen_op_btcl_T0_T1_cc,
667
    },
668
#ifdef TARGET_X86_64
669
    [2] = {
670
        gen_op_btq_T0_T1_cc,
671
        gen_op_btsq_T0_T1_cc,
672
        gen_op_btrq_T0_T1_cc,
673
        gen_op_btcq_T0_T1_cc,
674
    },
675
#endif
676
};
677

    
678
static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
679
    gen_op_add_bitw_A0_T1,
680
    gen_op_add_bitl_A0_T1,
681
    X86_64_ONLY(gen_op_add_bitq_A0_T1),
682
};
683

    
684
static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
685
    [0] = {
686
        gen_op_bsfw_T0_cc,
687
        gen_op_bsrw_T0_cc,
688
    },
689
    [1] = {
690
        gen_op_bsfl_T0_cc,
691
        gen_op_bsrl_T0_cc,
692
    },
693
#ifdef TARGET_X86_64
694
    [2] = {
695
        gen_op_bsfq_T0_cc,
696
        gen_op_bsrq_T0_cc,
697
    },
698
#endif
699
};
700

    
701
static inline void gen_op_lds_T0_A0(int idx)
702
{
703
    int mem_index = (idx >> 2) - 1;
704
    switch(idx & 3) {
705
    case 0:
706
        tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
707
        break;
708
    case 1:
709
        tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
710
        break;
711
    default:
712
    case 2:
713
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
714
        break;
715
    }
716
}
717

    
718
/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
719
static inline void gen_op_ld_T0_A0(int idx)
720
{
721
    int mem_index = (idx >> 2) - 1;
722
    switch(idx & 3) {
723
    case 0:
724
        tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
725
        break;
726
    case 1:
727
        tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
728
        break;
729
    case 2:
730
        tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
731
        break;
732
    default:
733
    case 3:
734
        tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
735
        break;
736
    }
737
}
738

    
739
static inline void gen_op_ldu_T0_A0(int idx)
740
{
741
    gen_op_ld_T0_A0(idx);
742
}
743

    
744
static inline void gen_op_ld_T1_A0(int idx)
745
{
746
    int mem_index = (idx >> 2) - 1;
747
    switch(idx & 3) {
748
    case 0:
749
        tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
750
        break;
751
    case 1:
752
        tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
753
        break;
754
    case 2:
755
        tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
756
        break;
757
    default:
758
    case 3:
759
        tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
760
        break;
761
    }
762
}
763

    
764
static inline void gen_op_st_T0_A0(int idx)
765
{
766
    int mem_index = (idx >> 2) - 1;
767
    switch(idx & 3) {
768
    case 0:
769
        tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
770
        break;
771
    case 1:
772
        tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
773
        break;
774
    case 2:
775
        tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
776
        break;
777
    default:
778
    case 3:
779
        tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
780
        break;
781
    }
782
}
783

    
784
static inline void gen_op_st_T1_A0(int idx)
785
{
786
    int mem_index = (idx >> 2) - 1;
787
    switch(idx & 3) {
788
    case 0:
789
        tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
790
        break;
791
    case 1:
792
        tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
793
        break;
794
    case 2:
795
        tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
796
        break;
797
    default:
798
    case 3:
799
        tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
800
        break;
801
    }
802
}
803

    
804
static inline void gen_jmp_im(target_ulong pc)
805
{
806
    tcg_gen_movi_tl(cpu_tmp0, pc);
807
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
808
}
809

    
810
static inline void gen_string_movl_A0_ESI(DisasContext *s)
811
{
812
    int override;
813

    
814
    override = s->override;
815
#ifdef TARGET_X86_64
816
    if (s->aflag == 2) {
817
        if (override >= 0) {
818
            gen_op_movq_A0_seg(override);
819
            gen_op_addq_A0_reg_sN(0, R_ESI);
820
        } else {
821
            gen_op_movq_A0_reg(R_ESI);
822
        }
823
    } else
824
#endif
825
    if (s->aflag) {
826
        /* 32 bit address */
827
        if (s->addseg && override < 0)
828
            override = R_DS;
829
        if (override >= 0) {
830
            gen_op_movl_A0_seg(override);
831
            gen_op_addl_A0_reg_sN(0, R_ESI);
832
        } else {
833
            gen_op_movl_A0_reg(R_ESI);
834
        }
835
    } else {
836
        /* 16 address, always override */
837
        if (override < 0)
838
            override = R_DS;
839
        gen_op_movl_A0_reg(R_ESI);
840
        gen_op_andl_A0_ffff();
841
        gen_op_addl_A0_seg(override);
842
    }
843
}
844

    
845
static inline void gen_string_movl_A0_EDI(DisasContext *s)
846
{
847
#ifdef TARGET_X86_64
848
    if (s->aflag == 2) {
849
        gen_op_movq_A0_reg(R_EDI);
850
    } else
851
#endif
852
    if (s->aflag) {
853
        if (s->addseg) {
854
            gen_op_movl_A0_seg(R_ES);
855
            gen_op_addl_A0_reg_sN(0, R_EDI);
856
        } else {
857
            gen_op_movl_A0_reg(R_EDI);
858
        }
859
    } else {
860
        gen_op_movl_A0_reg(R_EDI);
861
        gen_op_andl_A0_ffff();
862
        gen_op_addl_A0_seg(R_ES);
863
    }
864
}
865

    
866
static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
867
    gen_op_movl_T0_Dshiftb,
868
    gen_op_movl_T0_Dshiftw,
869
    gen_op_movl_T0_Dshiftl,
870
    X86_64_ONLY(gen_op_movl_T0_Dshiftq),
871
};
872

    
873
static GenOpFunc1 *gen_op_jnz_ecx[3] = {
874
    gen_op_jnz_ecxw,
875
    gen_op_jnz_ecxl,
876
    X86_64_ONLY(gen_op_jnz_ecxq),
877
};
878

    
879
static GenOpFunc1 *gen_op_jz_ecx[3] = {
880
    gen_op_jz_ecxw,
881
    gen_op_jz_ecxl,
882
    X86_64_ONLY(gen_op_jz_ecxq),
883
};
884

    
885
static GenOpFunc *gen_op_dec_ECX[3] = {
886
    gen_op_decw_ECX,
887
    gen_op_decl_ECX,
888
    X86_64_ONLY(gen_op_decq_ECX),
889
};
890

    
891
static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
892
    {
893
        gen_op_jnz_subb,
894
        gen_op_jnz_subw,
895
        gen_op_jnz_subl,
896
        X86_64_ONLY(gen_op_jnz_subq),
897
    },
898
    {
899
        gen_op_jz_subb,
900
        gen_op_jz_subw,
901
        gen_op_jz_subl,
902
        X86_64_ONLY(gen_op_jz_subq),
903
    },
904
};
905

    
906
static GenOpFunc *gen_op_in_DX_T0[3] = {
907
    gen_op_inb_DX_T0,
908
    gen_op_inw_DX_T0,
909
    gen_op_inl_DX_T0,
910
};
911

    
912
static GenOpFunc *gen_op_out_DX_T0[3] = {
913
    gen_op_outb_DX_T0,
914
    gen_op_outw_DX_T0,
915
    gen_op_outl_DX_T0,
916
};
917

    
918
static GenOpFunc *gen_op_in[3] = {
919
    gen_op_inb_T0_T1,
920
    gen_op_inw_T0_T1,
921
    gen_op_inl_T0_T1,
922
};
923

    
924
static GenOpFunc *gen_op_out[3] = {
925
    gen_op_outb_T0_T1,
926
    gen_op_outw_T0_T1,
927
    gen_op_outl_T0_T1,
928
};
929

    
930
static GenOpFunc *gen_check_io_T0[3] = {
931
    gen_op_check_iob_T0,
932
    gen_op_check_iow_T0,
933
    gen_op_check_iol_T0,
934
};
935

    
936
static GenOpFunc *gen_check_io_DX[3] = {
937
    gen_op_check_iob_DX,
938
    gen_op_check_iow_DX,
939
    gen_op_check_iol_DX,
940
};
941

    
942
static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
943
{
944
    if (s->pe && (s->cpl > s->iopl || s->vm86)) {
945
        if (s->cc_op != CC_OP_DYNAMIC)
946
            gen_op_set_cc_op(s->cc_op);
947
        gen_jmp_im(cur_eip);
948
        if (use_dx)
949
            gen_check_io_DX[ot]();
950
        else
951
            gen_check_io_T0[ot]();
952
    }
953
}
954

    
955
static inline void gen_movs(DisasContext *s, int ot)
956
{
957
    gen_string_movl_A0_ESI(s);
958
    gen_op_ld_T0_A0(ot + s->mem_index);
959
    gen_string_movl_A0_EDI(s);
960
    gen_op_st_T0_A0(ot + s->mem_index);
961
    gen_op_movl_T0_Dshift[ot]();
962
#ifdef TARGET_X86_64
963
    if (s->aflag == 2) {
964
        gen_op_addq_ESI_T0();
965
        gen_op_addq_EDI_T0();
966
    } else
967
#endif
968
    if (s->aflag) {
969
        gen_op_addl_ESI_T0();
970
        gen_op_addl_EDI_T0();
971
    } else {
972
        gen_op_addw_ESI_T0();
973
        gen_op_addw_EDI_T0();
974
    }
975
}
976

    
977
static inline void gen_update_cc_op(DisasContext *s)
978
{
979
    if (s->cc_op != CC_OP_DYNAMIC) {
980
        gen_op_set_cc_op(s->cc_op);
981
        s->cc_op = CC_OP_DYNAMIC;
982
    }
983
}
984

    
985
/* XXX: does not work with gdbstub "ice" single step - not a
986
   serious problem */
987
static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
988
{
989
    int l1, l2;
990

    
991
    l1 = gen_new_label();
992
    l2 = gen_new_label();
993
    gen_op_jnz_ecx[s->aflag](l1);
994
    gen_set_label(l2);
995
    gen_jmp_tb(s, next_eip, 1);
996
    gen_set_label(l1);
997
    return l2;
998
}
999

    
1000
static inline void gen_stos(DisasContext *s, int ot)
1001
{
1002
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1003
    gen_string_movl_A0_EDI(s);
1004
    gen_op_st_T0_A0(ot + s->mem_index);
1005
    gen_op_movl_T0_Dshift[ot]();
1006
#ifdef TARGET_X86_64
1007
    if (s->aflag == 2) {
1008
        gen_op_addq_EDI_T0();
1009
    } else
1010
#endif
1011
    if (s->aflag) {
1012
        gen_op_addl_EDI_T0();
1013
    } else {
1014
        gen_op_addw_EDI_T0();
1015
    }
1016
}
1017

    
1018
static inline void gen_lods(DisasContext *s, int ot)
1019
{
1020
    gen_string_movl_A0_ESI(s);
1021
    gen_op_ld_T0_A0(ot + s->mem_index);
1022
    gen_op_mov_reg_T0(ot, R_EAX);
1023
    gen_op_movl_T0_Dshift[ot]();
1024
#ifdef TARGET_X86_64
1025
    if (s->aflag == 2) {
1026
        gen_op_addq_ESI_T0();
1027
    } else
1028
#endif
1029
    if (s->aflag) {
1030
        gen_op_addl_ESI_T0();
1031
    } else {
1032
        gen_op_addw_ESI_T0();
1033
    }
1034
}
1035

    
1036
static inline void gen_scas(DisasContext *s, int ot)
1037
{
1038
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1039
    gen_string_movl_A0_EDI(s);
1040
    gen_op_ld_T1_A0(ot + s->mem_index);
1041
    gen_op_cmpl_T0_T1_cc();
1042
    gen_op_movl_T0_Dshift[ot]();
1043
#ifdef TARGET_X86_64
1044
    if (s->aflag == 2) {
1045
        gen_op_addq_EDI_T0();
1046
    } else
1047
#endif
1048
    if (s->aflag) {
1049
        gen_op_addl_EDI_T0();
1050
    } else {
1051
        gen_op_addw_EDI_T0();
1052
    }
1053
}
1054

    
1055
static inline void gen_cmps(DisasContext *s, int ot)
1056
{
1057
    gen_string_movl_A0_ESI(s);
1058
    gen_op_ld_T0_A0(ot + s->mem_index);
1059
    gen_string_movl_A0_EDI(s);
1060
    gen_op_ld_T1_A0(ot + s->mem_index);
1061
    gen_op_cmpl_T0_T1_cc();
1062
    gen_op_movl_T0_Dshift[ot]();
1063
#ifdef TARGET_X86_64
1064
    if (s->aflag == 2) {
1065
        gen_op_addq_ESI_T0();
1066
        gen_op_addq_EDI_T0();
1067
    } else
1068
#endif
1069
    if (s->aflag) {
1070
        gen_op_addl_ESI_T0();
1071
        gen_op_addl_EDI_T0();
1072
    } else {
1073
        gen_op_addw_ESI_T0();
1074
        gen_op_addw_EDI_T0();
1075
    }
1076
}
1077

    
1078
static inline void gen_ins(DisasContext *s, int ot)
1079
{
1080
    gen_string_movl_A0_EDI(s);
1081
    gen_op_movl_T0_0();
1082
    gen_op_st_T0_A0(ot + s->mem_index);
1083
    gen_op_in_DX_T0[ot]();
1084
    gen_op_st_T0_A0(ot + s->mem_index);
1085
    gen_op_movl_T0_Dshift[ot]();
1086
#ifdef TARGET_X86_64
1087
    if (s->aflag == 2) {
1088
        gen_op_addq_EDI_T0();
1089
    } else
1090
#endif
1091
    if (s->aflag) {
1092
        gen_op_addl_EDI_T0();
1093
    } else {
1094
        gen_op_addw_EDI_T0();
1095
    }
1096
}
1097

    
1098
static inline void gen_outs(DisasContext *s, int ot)
1099
{
1100
    gen_string_movl_A0_ESI(s);
1101
    gen_op_ld_T0_A0(ot + s->mem_index);
1102
    gen_op_out_DX_T0[ot]();
1103
    gen_op_movl_T0_Dshift[ot]();
1104
#ifdef TARGET_X86_64
1105
    if (s->aflag == 2) {
1106
        gen_op_addq_ESI_T0();
1107
    } else
1108
#endif
1109
    if (s->aflag) {
1110
        gen_op_addl_ESI_T0();
1111
    } else {
1112
        gen_op_addw_ESI_T0();
1113
    }
1114
}
1115

    
1116
/* same method as Valgrind : we generate jumps to current or next
1117
   instruction */
1118
#define GEN_REPZ(op)                                                          \
1119
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1120
                                 target_ulong cur_eip, target_ulong next_eip) \
1121
{                                                                             \
1122
    int l2;\
1123
    gen_update_cc_op(s);                                                      \
1124
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1125
    gen_ ## op(s, ot);                                                        \
1126
    gen_op_dec_ECX[s->aflag]();                                               \
1127
    /* a loop would cause two single step exceptions if ECX = 1               \
1128
       before rep string_insn */                                              \
1129
    if (!s->jmp_opt)                                                          \
1130
        gen_op_jz_ecx[s->aflag](l2);                                          \
1131
    gen_jmp(s, cur_eip);                                                      \
1132
}
1133

    
1134
#define GEN_REPZ2(op)                                                         \
1135
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1136
                                   target_ulong cur_eip,                      \
1137
                                   target_ulong next_eip,                     \
1138
                                   int nz)                                    \
1139
{                                                                             \
1140
    int l2;\
1141
    gen_update_cc_op(s);                                                      \
1142
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1143
    gen_ ## op(s, ot);                                                        \
1144
    gen_op_dec_ECX[s->aflag]();                                               \
1145
    gen_op_set_cc_op(CC_OP_SUBB + ot);                                        \
1146
    gen_op_string_jnz_sub[nz][ot](l2);\
1147
    if (!s->jmp_opt)                                                          \
1148
        gen_op_jz_ecx[s->aflag](l2);                                          \
1149
    gen_jmp(s, cur_eip);                                                      \
1150
}
1151

    
1152
GEN_REPZ(movs)
1153
GEN_REPZ(stos)
1154
GEN_REPZ(lods)
1155
GEN_REPZ(ins)
1156
GEN_REPZ(outs)
1157
GEN_REPZ2(scas)
1158
GEN_REPZ2(cmps)
1159

    
1160
enum {
1161
    JCC_O,
1162
    JCC_B,
1163
    JCC_Z,
1164
    JCC_BE,
1165
    JCC_S,
1166
    JCC_P,
1167
    JCC_L,
1168
    JCC_LE,
1169
};
1170

    
1171
static GenOpFunc1 *gen_jcc_sub[4][8] = {
1172
    [OT_BYTE] = {
1173
        NULL,
1174
        gen_op_jb_subb,
1175
        gen_op_jz_subb,
1176
        gen_op_jbe_subb,
1177
        gen_op_js_subb,
1178
        NULL,
1179
        gen_op_jl_subb,
1180
        gen_op_jle_subb,
1181
    },
1182
    [OT_WORD] = {
1183
        NULL,
1184
        gen_op_jb_subw,
1185
        gen_op_jz_subw,
1186
        gen_op_jbe_subw,
1187
        gen_op_js_subw,
1188
        NULL,
1189
        gen_op_jl_subw,
1190
        gen_op_jle_subw,
1191
    },
1192
    [OT_LONG] = {
1193
        NULL,
1194
        gen_op_jb_subl,
1195
        gen_op_jz_subl,
1196
        gen_op_jbe_subl,
1197
        gen_op_js_subl,
1198
        NULL,
1199
        gen_op_jl_subl,
1200
        gen_op_jle_subl,
1201
    },
1202
#ifdef TARGET_X86_64
1203
    [OT_QUAD] = {
1204
        NULL,
1205
        BUGGY_64(gen_op_jb_subq),
1206
        gen_op_jz_subq,
1207
        BUGGY_64(gen_op_jbe_subq),
1208
        gen_op_js_subq,
1209
        NULL,
1210
        BUGGY_64(gen_op_jl_subq),
1211
        BUGGY_64(gen_op_jle_subq),
1212
    },
1213
#endif
1214
};
1215
static GenOpFunc1 *gen_op_loop[3][4] = {
1216
    [0] = {
1217
        gen_op_loopnzw,
1218
        gen_op_loopzw,
1219
        gen_op_jnz_ecxw,
1220
    },
1221
    [1] = {
1222
        gen_op_loopnzl,
1223
        gen_op_loopzl,
1224
        gen_op_jnz_ecxl,
1225
    },
1226
#ifdef TARGET_X86_64
1227
    [2] = {
1228
        gen_op_loopnzq,
1229
        gen_op_loopzq,
1230
        gen_op_jnz_ecxq,
1231
    },
1232
#endif
1233
};
1234

    
1235
static GenOpFunc *gen_setcc_slow[8] = {
1236
    gen_op_seto_T0_cc,
1237
    gen_op_setb_T0_cc,
1238
    gen_op_setz_T0_cc,
1239
    gen_op_setbe_T0_cc,
1240
    gen_op_sets_T0_cc,
1241
    gen_op_setp_T0_cc,
1242
    gen_op_setl_T0_cc,
1243
    gen_op_setle_T0_cc,
1244
};
1245

    
1246
static GenOpFunc *gen_setcc_sub[4][8] = {
1247
    [OT_BYTE] = {
1248
        NULL,
1249
        gen_op_setb_T0_subb,
1250
        gen_op_setz_T0_subb,
1251
        gen_op_setbe_T0_subb,
1252
        gen_op_sets_T0_subb,
1253
        NULL,
1254
        gen_op_setl_T0_subb,
1255
        gen_op_setle_T0_subb,
1256
    },
1257
    [OT_WORD] = {
1258
        NULL,
1259
        gen_op_setb_T0_subw,
1260
        gen_op_setz_T0_subw,
1261
        gen_op_setbe_T0_subw,
1262
        gen_op_sets_T0_subw,
1263
        NULL,
1264
        gen_op_setl_T0_subw,
1265
        gen_op_setle_T0_subw,
1266
    },
1267
    [OT_LONG] = {
1268
        NULL,
1269
        gen_op_setb_T0_subl,
1270
        gen_op_setz_T0_subl,
1271
        gen_op_setbe_T0_subl,
1272
        gen_op_sets_T0_subl,
1273
        NULL,
1274
        gen_op_setl_T0_subl,
1275
        gen_op_setle_T0_subl,
1276
    },
1277
#ifdef TARGET_X86_64
1278
    [OT_QUAD] = {
1279
        NULL,
1280
        gen_op_setb_T0_subq,
1281
        gen_op_setz_T0_subq,
1282
        gen_op_setbe_T0_subq,
1283
        gen_op_sets_T0_subq,
1284
        NULL,
1285
        gen_op_setl_T0_subq,
1286
        gen_op_setle_T0_subq,
1287
    },
1288
#endif
1289
};
1290

    
1291
static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1292
    gen_op_fadd_ST0_FT0,
1293
    gen_op_fmul_ST0_FT0,
1294
    gen_op_fcom_ST0_FT0,
1295
    gen_op_fcom_ST0_FT0,
1296
    gen_op_fsub_ST0_FT0,
1297
    gen_op_fsubr_ST0_FT0,
1298
    gen_op_fdiv_ST0_FT0,
1299
    gen_op_fdivr_ST0_FT0,
1300
};
1301

    
1302
/* NOTE the exception in "r" op ordering */
1303
static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1304
    gen_op_fadd_STN_ST0,
1305
    gen_op_fmul_STN_ST0,
1306
    NULL,
1307
    NULL,
1308
    gen_op_fsubr_STN_ST0,
1309
    gen_op_fsub_STN_ST0,
1310
    gen_op_fdivr_STN_ST0,
1311
    gen_op_fdiv_STN_ST0,
1312
};
1313

    
1314
/* if d == OR_TMP0, it means memory operand (address in A0) */
1315
static void gen_op(DisasContext *s1, int op, int ot, int d)
1316
{
1317
    GenOpFunc *gen_update_cc;
1318

    
1319
    if (d != OR_TMP0) {
1320
        gen_op_mov_TN_reg(ot, 0, d);
1321
    } else {
1322
        gen_op_ld_T0_A0(ot + s1->mem_index);
1323
    }
1324
    switch(op) {
1325
    case OP_ADCL:
1326
    case OP_SBBL:
1327
        if (s1->cc_op != CC_OP_DYNAMIC)
1328
            gen_op_set_cc_op(s1->cc_op);
1329
        if (d != OR_TMP0) {
1330
            gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1331
            gen_op_mov_reg_T0(ot, d);
1332
        } else {
1333
            gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1334
        }
1335
        s1->cc_op = CC_OP_DYNAMIC;
1336
        goto the_end;
1337
    case OP_ADDL:
1338
        gen_op_addl_T0_T1();
1339
        s1->cc_op = CC_OP_ADDB + ot;
1340
        gen_update_cc = gen_op_update2_cc;
1341
        break;
1342
    case OP_SUBL:
1343
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1344
        s1->cc_op = CC_OP_SUBB + ot;
1345
        gen_update_cc = gen_op_update2_cc;
1346
        break;
1347
    default:
1348
    case OP_ANDL:
1349
        tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1350
        s1->cc_op = CC_OP_LOGICB + ot;
1351
        gen_update_cc = gen_op_update1_cc;
1352
        break;
1353
    case OP_ORL:
1354
        tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1355
        s1->cc_op = CC_OP_LOGICB + ot;
1356
        gen_update_cc = gen_op_update1_cc;
1357
        break;
1358
    case OP_XORL:
1359
        tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1360
        s1->cc_op = CC_OP_LOGICB + ot;
1361
        gen_update_cc = gen_op_update1_cc;
1362
        break;
1363
    case OP_CMPL:
1364
        gen_op_cmpl_T0_T1_cc();
1365
        s1->cc_op = CC_OP_SUBB + ot;
1366
        gen_update_cc = NULL;
1367
        break;
1368
    }
1369
    if (op != OP_CMPL) {
1370
        if (d != OR_TMP0)
1371
            gen_op_mov_reg_T0(ot, d);
1372
        else
1373
            gen_op_st_T0_A0(ot + s1->mem_index);
1374
    }
1375
    /* the flags update must happen after the memory write (precise
1376
       exception support) */
1377
    if (gen_update_cc)
1378
        gen_update_cc();
1379
 the_end: ;
1380
}
1381

    
1382
/* if d == OR_TMP0, it means memory operand (address in A0) */
1383
static void gen_inc(DisasContext *s1, int ot, int d, int c)
1384
{
1385
    if (d != OR_TMP0)
1386
        gen_op_mov_TN_reg(ot, 0, d);
1387
    else
1388
        gen_op_ld_T0_A0(ot + s1->mem_index);
1389
    if (s1->cc_op != CC_OP_DYNAMIC)
1390
        gen_op_set_cc_op(s1->cc_op);
1391
    if (c > 0) {
1392
        gen_op_incl_T0();
1393
        s1->cc_op = CC_OP_INCB + ot;
1394
    } else {
1395
        gen_op_decl_T0();
1396
        s1->cc_op = CC_OP_DECB + ot;
1397
    }
1398
    if (d != OR_TMP0)
1399
        gen_op_mov_reg_T0(ot, d);
1400
    else
1401
        gen_op_st_T0_A0(ot + s1->mem_index);
1402
    gen_op_update_inc_cc();
1403
}
1404

    
1405
static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1406
{
1407
    if (d != OR_TMP0)
1408
        gen_op_mov_TN_reg(ot, 0, d);
1409
    else
1410
        gen_op_ld_T0_A0(ot + s1->mem_index);
1411
    if (s != OR_TMP1)
1412
        gen_op_mov_TN_reg(ot, 1, s);
1413
    /* for zero counts, flags are not updated, so must do it dynamically */
1414
    if (s1->cc_op != CC_OP_DYNAMIC)
1415
        gen_op_set_cc_op(s1->cc_op);
1416

    
1417
    if (d != OR_TMP0)
1418
        gen_op_shift_T0_T1_cc[ot][op]();
1419
    else
1420
        gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1421
    if (d != OR_TMP0)
1422
        gen_op_mov_reg_T0(ot, d);
1423
    s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1424
}
1425

    
1426
static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1427
{
1428
    /* currently not optimized */
1429
    gen_op_movl_T1_im(c);
1430
    gen_shift(s1, op, ot, d, OR_TMP1);
1431
}
1432

    
1433
static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1434
{
1435
    target_long disp;
1436
    int havesib;
1437
    int base;
1438
    int index;
1439
    int scale;
1440
    int opreg;
1441
    int mod, rm, code, override, must_add_seg;
1442

    
1443
    override = s->override;
1444
    must_add_seg = s->addseg;
1445
    if (override >= 0)
1446
        must_add_seg = 1;
1447
    mod = (modrm >> 6) & 3;
1448
    rm = modrm & 7;
1449

    
1450
    if (s->aflag) {
1451

    
1452
        havesib = 0;
1453
        base = rm;
1454
        index = 0;
1455
        scale = 0;
1456

    
1457
        if (base == 4) {
1458
            havesib = 1;
1459
            code = ldub_code(s->pc++);
1460
            scale = (code >> 6) & 3;
1461
            index = ((code >> 3) & 7) | REX_X(s);
1462
            base = (code & 7);
1463
        }
1464
        base |= REX_B(s);
1465

    
1466
        switch (mod) {
1467
        case 0:
1468
            if ((base & 7) == 5) {
1469
                base = -1;
1470
                disp = (int32_t)ldl_code(s->pc);
1471
                s->pc += 4;
1472
                if (CODE64(s) && !havesib) {
1473
                    disp += s->pc + s->rip_offset;
1474
                }
1475
            } else {
1476
                disp = 0;
1477
            }
1478
            break;
1479
        case 1:
1480
            disp = (int8_t)ldub_code(s->pc++);
1481
            break;
1482
        default:
1483
        case 2:
1484
            disp = ldl_code(s->pc);
1485
            s->pc += 4;
1486
            break;
1487
        }
1488

    
1489
        if (base >= 0) {
1490
            /* for correct popl handling with esp */
1491
            if (base == 4 && s->popl_esp_hack)
1492
                disp += s->popl_esp_hack;
1493
#ifdef TARGET_X86_64
1494
            if (s->aflag == 2) {
1495
                gen_op_movq_A0_reg(base);
1496
                if (disp != 0) {
1497
                    gen_op_addq_A0_im(disp);
1498
                }
1499
            } else
1500
#endif
1501
            {
1502
                gen_op_movl_A0_reg(base);
1503
                if (disp != 0)
1504
                    gen_op_addl_A0_im(disp);
1505
            }
1506
        } else {
1507
#ifdef TARGET_X86_64
1508
            if (s->aflag == 2) {
1509
                gen_op_movq_A0_im(disp);
1510
            } else
1511
#endif
1512
            {
1513
                gen_op_movl_A0_im(disp);
1514
            }
1515
        }
1516
        /* XXX: index == 4 is always invalid */
1517
        if (havesib && (index != 4 || scale != 0)) {
1518
#ifdef TARGET_X86_64
1519
            if (s->aflag == 2) {
1520
                gen_op_addq_A0_reg_sN(scale, index);
1521
            } else
1522
#endif
1523
            {
1524
                gen_op_addl_A0_reg_sN(scale, index);
1525
            }
1526
        }
1527
        if (must_add_seg) {
1528
            if (override < 0) {
1529
                if (base == R_EBP || base == R_ESP)
1530
                    override = R_SS;
1531
                else
1532
                    override = R_DS;
1533
            }
1534
#ifdef TARGET_X86_64
1535
            if (s->aflag == 2) {
1536
                gen_op_addq_A0_seg(override);
1537
            } else
1538
#endif
1539
            {
1540
                gen_op_addl_A0_seg(override);
1541
            }
1542
        }
1543
    } else {
1544
        switch (mod) {
1545
        case 0:
1546
            if (rm == 6) {
1547
                disp = lduw_code(s->pc);
1548
                s->pc += 2;
1549
                gen_op_movl_A0_im(disp);
1550
                rm = 0; /* avoid SS override */
1551
                goto no_rm;
1552
            } else {
1553
                disp = 0;
1554
            }
1555
            break;
1556
        case 1:
1557
            disp = (int8_t)ldub_code(s->pc++);
1558
            break;
1559
        default:
1560
        case 2:
1561
            disp = lduw_code(s->pc);
1562
            s->pc += 2;
1563
            break;
1564
        }
1565
        switch(rm) {
1566
        case 0:
1567
            gen_op_movl_A0_reg(R_EBX);
1568
            gen_op_addl_A0_reg_sN(0, R_ESI);
1569
            break;
1570
        case 1:
1571
            gen_op_movl_A0_reg(R_EBX);
1572
            gen_op_addl_A0_reg_sN(0, R_EDI);
1573
            break;
1574
        case 2:
1575
            gen_op_movl_A0_reg(R_EBP);
1576
            gen_op_addl_A0_reg_sN(0, R_ESI);
1577
            break;
1578
        case 3:
1579
            gen_op_movl_A0_reg(R_EBP);
1580
            gen_op_addl_A0_reg_sN(0, R_EDI);
1581
            break;
1582
        case 4:
1583
            gen_op_movl_A0_reg(R_ESI);
1584
            break;
1585
        case 5:
1586
            gen_op_movl_A0_reg(R_EDI);
1587
            break;
1588
        case 6:
1589
            gen_op_movl_A0_reg(R_EBP);
1590
            break;
1591
        default:
1592
        case 7:
1593
            gen_op_movl_A0_reg(R_EBX);
1594
            break;
1595
        }
1596
        if (disp != 0)
1597
            gen_op_addl_A0_im(disp);
1598
        gen_op_andl_A0_ffff();
1599
    no_rm:
1600
        if (must_add_seg) {
1601
            if (override < 0) {
1602
                if (rm == 2 || rm == 3 || rm == 6)
1603
                    override = R_SS;
1604
                else
1605
                    override = R_DS;
1606
            }
1607
            gen_op_addl_A0_seg(override);
1608
        }
1609
    }
1610

    
1611
    opreg = OR_A0;
1612
    disp = 0;
1613
    *reg_ptr = opreg;
1614
    *offset_ptr = disp;
1615
}
1616

    
1617
static void gen_nop_modrm(DisasContext *s, int modrm)
1618
{
1619
    int mod, rm, base, code;
1620

    
1621
    mod = (modrm >> 6) & 3;
1622
    if (mod == 3)
1623
        return;
1624
    rm = modrm & 7;
1625

    
1626
    if (s->aflag) {
1627

    
1628
        base = rm;
1629

    
1630
        if (base == 4) {
1631
            code = ldub_code(s->pc++);
1632
            base = (code & 7);
1633
        }
1634

    
1635
        switch (mod) {
1636
        case 0:
1637
            if (base == 5) {
1638
                s->pc += 4;
1639
            }
1640
            break;
1641
        case 1:
1642
            s->pc++;
1643
            break;
1644
        default:
1645
        case 2:
1646
            s->pc += 4;
1647
            break;
1648
        }
1649
    } else {
1650
        switch (mod) {
1651
        case 0:
1652
            if (rm == 6) {
1653
                s->pc += 2;
1654
            }
1655
            break;
1656
        case 1:
1657
            s->pc++;
1658
            break;
1659
        default:
1660
        case 2:
1661
            s->pc += 2;
1662
            break;
1663
        }
1664
    }
1665
}
1666

    
1667
/* used for LEA and MOV AX, mem */
1668
static void gen_add_A0_ds_seg(DisasContext *s)
1669
{
1670
    int override, must_add_seg;
1671
    must_add_seg = s->addseg;
1672
    override = R_DS;
1673
    if (s->override >= 0) {
1674
        override = s->override;
1675
        must_add_seg = 1;
1676
    } else {
1677
        override = R_DS;
1678
    }
1679
    if (must_add_seg) {
1680
#ifdef TARGET_X86_64
1681
        if (CODE64(s)) {
1682
            gen_op_addq_A0_seg(override);
1683
        } else
1684
#endif
1685
        {
1686
            gen_op_addl_A0_seg(override);
1687
        }
1688
    }
1689
}
1690

    
1691
/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1692
   OR_TMP0 */
1693
static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1694
{
1695
    int mod, rm, opreg, disp;
1696

    
1697
    mod = (modrm >> 6) & 3;
1698
    rm = (modrm & 7) | REX_B(s);
1699
    if (mod == 3) {
1700
        if (is_store) {
1701
            if (reg != OR_TMP0)
1702
                gen_op_mov_TN_reg(ot, 0, reg);
1703
            gen_op_mov_reg_T0(ot, rm);
1704
        } else {
1705
            gen_op_mov_TN_reg(ot, 0, rm);
1706
            if (reg != OR_TMP0)
1707
                gen_op_mov_reg_T0(ot, reg);
1708
        }
1709
    } else {
1710
        gen_lea_modrm(s, modrm, &opreg, &disp);
1711
        if (is_store) {
1712
            if (reg != OR_TMP0)
1713
                gen_op_mov_TN_reg(ot, 0, reg);
1714
            gen_op_st_T0_A0(ot + s->mem_index);
1715
        } else {
1716
            gen_op_ld_T0_A0(ot + s->mem_index);
1717
            if (reg != OR_TMP0)
1718
                gen_op_mov_reg_T0(ot, reg);
1719
        }
1720
    }
1721
}
1722

    
1723
static inline uint32_t insn_get(DisasContext *s, int ot)
1724
{
1725
    uint32_t ret;
1726

    
1727
    switch(ot) {
1728
    case OT_BYTE:
1729
        ret = ldub_code(s->pc);
1730
        s->pc++;
1731
        break;
1732
    case OT_WORD:
1733
        ret = lduw_code(s->pc);
1734
        s->pc += 2;
1735
        break;
1736
    default:
1737
    case OT_LONG:
1738
        ret = ldl_code(s->pc);
1739
        s->pc += 4;
1740
        break;
1741
    }
1742
    return ret;
1743
}
1744

    
1745
static inline int insn_const_size(unsigned int ot)
1746
{
1747
    if (ot <= OT_LONG)
1748
        return 1 << ot;
1749
    else
1750
        return 4;
1751
}
1752

    
1753
static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1754
{
1755
    TranslationBlock *tb;
1756
    target_ulong pc;
1757

    
1758
    pc = s->cs_base + eip;
1759
    tb = s->tb;
1760
    /* NOTE: we handle the case where the TB spans two pages here */
1761
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1762
        (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK))  {
1763
        /* jump to same page: we can use a direct jump */
1764
        tcg_gen_goto_tb(tb_num);
1765
        gen_jmp_im(eip);
1766
        tcg_gen_exit_tb((long)tb + tb_num);
1767
    } else {
1768
        /* jump to another page: currently not optimized */
1769
        gen_jmp_im(eip);
1770
        gen_eob(s);
1771
    }
1772
}
1773

    
1774
static inline void gen_jcc(DisasContext *s, int b,
1775
                           target_ulong val, target_ulong next_eip)
1776
{
1777
    TranslationBlock *tb;
1778
    int inv, jcc_op;
1779
    GenOpFunc1 *func;
1780
    target_ulong tmp;
1781
    int l1, l2;
1782

    
1783
    inv = b & 1;
1784
    jcc_op = (b >> 1) & 7;
1785

    
1786
    if (s->jmp_opt) {
1787
        switch(s->cc_op) {
1788
            /* we optimize the cmp/jcc case */
1789
        case CC_OP_SUBB:
1790
        case CC_OP_SUBW:
1791
        case CC_OP_SUBL:
1792
        case CC_OP_SUBQ:
1793
            func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1794
            break;
1795

    
1796
            /* some jumps are easy to compute */
1797
        case CC_OP_ADDB:
1798
        case CC_OP_ADDW:
1799
        case CC_OP_ADDL:
1800
        case CC_OP_ADDQ:
1801

    
1802
        case CC_OP_ADCB:
1803
        case CC_OP_ADCW:
1804
        case CC_OP_ADCL:
1805
        case CC_OP_ADCQ:
1806

    
1807
        case CC_OP_SBBB:
1808
        case CC_OP_SBBW:
1809
        case CC_OP_SBBL:
1810
        case CC_OP_SBBQ:
1811

    
1812
        case CC_OP_LOGICB:
1813
        case CC_OP_LOGICW:
1814
        case CC_OP_LOGICL:
1815
        case CC_OP_LOGICQ:
1816

    
1817
        case CC_OP_INCB:
1818
        case CC_OP_INCW:
1819
        case CC_OP_INCL:
1820
        case CC_OP_INCQ:
1821

    
1822
        case CC_OP_DECB:
1823
        case CC_OP_DECW:
1824
        case CC_OP_DECL:
1825
        case CC_OP_DECQ:
1826

    
1827
        case CC_OP_SHLB:
1828
        case CC_OP_SHLW:
1829
        case CC_OP_SHLL:
1830
        case CC_OP_SHLQ:
1831

    
1832
        case CC_OP_SARB:
1833
        case CC_OP_SARW:
1834
        case CC_OP_SARL:
1835
        case CC_OP_SARQ:
1836
            switch(jcc_op) {
1837
            case JCC_Z:
1838
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1839
                break;
1840
            case JCC_S:
1841
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1842
                break;
1843
            default:
1844
                func = NULL;
1845
                break;
1846
            }
1847
            break;
1848
        default:
1849
            func = NULL;
1850
            break;
1851
        }
1852

    
1853
        if (s->cc_op != CC_OP_DYNAMIC) {
1854
            gen_op_set_cc_op(s->cc_op);
1855
            s->cc_op = CC_OP_DYNAMIC;
1856
        }
1857

    
1858
        if (!func) {
1859
            gen_setcc_slow[jcc_op]();
1860
            func = gen_op_jnz_T0_label;
1861
        }
1862

    
1863
        if (inv) {
1864
            tmp = val;
1865
            val = next_eip;
1866
            next_eip = tmp;
1867
        }
1868
        tb = s->tb;
1869

    
1870
        l1 = gen_new_label();
1871
        func(l1);
1872

    
1873
        gen_goto_tb(s, 0, next_eip);
1874

    
1875
        gen_set_label(l1);
1876
        gen_goto_tb(s, 1, val);
1877

    
1878
        s->is_jmp = 3;
1879
    } else {
1880

    
1881
        if (s->cc_op != CC_OP_DYNAMIC) {
1882
            gen_op_set_cc_op(s->cc_op);
1883
            s->cc_op = CC_OP_DYNAMIC;
1884
        }
1885
        gen_setcc_slow[jcc_op]();
1886
        if (inv) {
1887
            tmp = val;
1888
            val = next_eip;
1889
            next_eip = tmp;
1890
        }
1891
        l1 = gen_new_label();
1892
        l2 = gen_new_label();
1893
        gen_op_jnz_T0_label(l1);
1894
        gen_jmp_im(next_eip);
1895
        gen_op_jmp_label(l2);
1896
        gen_set_label(l1);
1897
        gen_jmp_im(val);
1898
        gen_set_label(l2);
1899
        gen_eob(s);
1900
    }
1901
}
1902

    
1903
static void gen_setcc(DisasContext *s, int b)
1904
{
1905
    int inv, jcc_op;
1906
    GenOpFunc *func;
1907

    
1908
    inv = b & 1;
1909
    jcc_op = (b >> 1) & 7;
1910
    switch(s->cc_op) {
1911
        /* we optimize the cmp/jcc case */
1912
    case CC_OP_SUBB:
1913
    case CC_OP_SUBW:
1914
    case CC_OP_SUBL:
1915
    case CC_OP_SUBQ:
1916
        func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1917
        if (!func)
1918
            goto slow_jcc;
1919
        break;
1920

    
1921
        /* some jumps are easy to compute */
1922
    case CC_OP_ADDB:
1923
    case CC_OP_ADDW:
1924
    case CC_OP_ADDL:
1925
    case CC_OP_ADDQ:
1926

    
1927
    case CC_OP_LOGICB:
1928
    case CC_OP_LOGICW:
1929
    case CC_OP_LOGICL:
1930
    case CC_OP_LOGICQ:
1931

    
1932
    case CC_OP_INCB:
1933
    case CC_OP_INCW:
1934
    case CC_OP_INCL:
1935
    case CC_OP_INCQ:
1936

    
1937
    case CC_OP_DECB:
1938
    case CC_OP_DECW:
1939
    case CC_OP_DECL:
1940
    case CC_OP_DECQ:
1941

    
1942
    case CC_OP_SHLB:
1943
    case CC_OP_SHLW:
1944
    case CC_OP_SHLL:
1945
    case CC_OP_SHLQ:
1946
        switch(jcc_op) {
1947
        case JCC_Z:
1948
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1949
            break;
1950
        case JCC_S:
1951
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1952
            break;
1953
        default:
1954
            goto slow_jcc;
1955
        }
1956
        break;
1957
    default:
1958
    slow_jcc:
1959
        if (s->cc_op != CC_OP_DYNAMIC)
1960
            gen_op_set_cc_op(s->cc_op);
1961
        func = gen_setcc_slow[jcc_op];
1962
        break;
1963
    }
1964
    func();
1965
    if (inv) {
1966
        gen_op_xor_T0_1();
1967
    }
1968
}
1969

    
1970
/* move T0 to seg_reg and compute if the CPU state may change. Never
1971
   call this function with seg_reg == R_CS */
1972
static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1973
{
1974
    if (s->pe && !s->vm86) {
1975
        /* XXX: optimize by finding processor state dynamically */
1976
        if (s->cc_op != CC_OP_DYNAMIC)
1977
            gen_op_set_cc_op(s->cc_op);
1978
        gen_jmp_im(cur_eip);
1979
        gen_op_movl_seg_T0(seg_reg);
1980
        /* abort translation because the addseg value may change or
1981
           because ss32 may change. For R_SS, translation must always
1982
           stop as a special handling must be done to disable hardware
1983
           interrupts for the next instruction */
1984
        if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1985
            s->is_jmp = 3;
1986
    } else {
1987
        gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1988
        if (seg_reg == R_SS)
1989
            s->is_jmp = 3;
1990
    }
1991
}
1992

    
1993
#define SVM_movq_T1_im(x) gen_movtl_T1_im(x)
1994

    
1995
static inline int
1996
gen_svm_check_io(DisasContext *s, target_ulong pc_start, uint64_t type)
1997
{
1998
#if !defined(CONFIG_USER_ONLY)
1999
    if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
2000
        if (s->cc_op != CC_OP_DYNAMIC)
2001
            gen_op_set_cc_op(s->cc_op);
2002
        SVM_movq_T1_im(s->pc - s->cs_base);
2003
        gen_jmp_im(pc_start - s->cs_base);
2004
        gen_op_geneflags();
2005
        gen_op_svm_check_intercept_io((uint32_t)(type >> 32), (uint32_t)type);
2006
        s->cc_op = CC_OP_DYNAMIC;
2007
        /* FIXME: maybe we could move the io intercept vector to the TB as well
2008
                  so we know if this is an EOB or not ... let's assume it's not
2009
                  for now. */
2010
    }
2011
#endif
2012
    return 0;
2013
}
2014

    
2015
static inline int svm_is_rep(int prefixes)
2016
{
2017
    return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2018
}
2019

    
2020
static inline int
2021
gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2022
                              uint64_t type, uint64_t param)
2023
{
2024
    if(!(s->flags & (INTERCEPT_SVM_MASK)))
2025
        /* no SVM activated */
2026
        return 0;
2027
    switch(type) {
2028
        /* CRx and DRx reads/writes */
2029
        case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2030
            if (s->cc_op != CC_OP_DYNAMIC) {
2031
                gen_op_set_cc_op(s->cc_op);
2032
                s->cc_op = CC_OP_DYNAMIC;
2033
            }
2034
            gen_jmp_im(pc_start - s->cs_base);
2035
            SVM_movq_T1_im(param);
2036
            gen_op_geneflags();
2037
            gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2038
            /* this is a special case as we do not know if the interception occurs
2039
               so we assume there was none */
2040
            return 0;
2041
        case SVM_EXIT_MSR:
2042
            if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2043
                if (s->cc_op != CC_OP_DYNAMIC) {
2044
                    gen_op_set_cc_op(s->cc_op);
2045
                    s->cc_op = CC_OP_DYNAMIC;
2046
                }
2047
                gen_jmp_im(pc_start - s->cs_base);
2048
                SVM_movq_T1_im(param);
2049
                gen_op_geneflags();
2050
                gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2051
                /* this is a special case as we do not know if the interception occurs
2052
                   so we assume there was none */
2053
                return 0;
2054
            }
2055
            break;
2056
        default:
2057
            if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2058
                if (s->cc_op != CC_OP_DYNAMIC) {
2059
                    gen_op_set_cc_op(s->cc_op);
2060
                    s->cc_op = CC_OP_EFLAGS;
2061
                }
2062
                gen_jmp_im(pc_start - s->cs_base);
2063
                SVM_movq_T1_im(param);
2064
                gen_op_geneflags();
2065
                gen_op_svm_vmexit(type >> 32, type);
2066
                /* we can optimize this one so TBs don't get longer
2067
                   than up to vmexit */
2068
                gen_eob(s);
2069
                return 1;
2070
            }
2071
    }
2072
    return 0;
2073
}
2074

    
2075
static inline int
2076
gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2077
{
2078
    return gen_svm_check_intercept_param(s, pc_start, type, 0);
2079
}
2080

    
2081
static inline void gen_stack_update(DisasContext *s, int addend)
2082
{
2083
#ifdef TARGET_X86_64
2084
    if (CODE64(s)) {
2085
        gen_op_addq_ESP_im(addend);
2086
    } else
2087
#endif
2088
    if (s->ss32) {
2089
        gen_op_addl_ESP_im(addend);
2090
    } else {
2091
        gen_op_addw_ESP_im(addend);
2092
    }
2093
}
2094

    
2095
/* generate a push. It depends on ss32, addseg and dflag */
2096
static void gen_push_T0(DisasContext *s)
2097
{
2098
#ifdef TARGET_X86_64
2099
    if (CODE64(s)) {
2100
        gen_op_movq_A0_reg(R_ESP);
2101
        if (s->dflag) {
2102
            gen_op_addq_A0_im(-8);
2103
            gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2104
        } else {
2105
            gen_op_addq_A0_im(-2);
2106
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2107
        }
2108
        gen_op_mov_reg_A0(2, R_ESP);
2109
    } else
2110
#endif
2111
    {
2112
        gen_op_movl_A0_reg(R_ESP);
2113
        if (!s->dflag)
2114
            gen_op_addl_A0_im(-2);
2115
        else
2116
            gen_op_addl_A0_im(-4);
2117
        if (s->ss32) {
2118
            if (s->addseg) {
2119
                gen_op_movl_T1_A0();
2120
                gen_op_addl_A0_seg(R_SS);
2121
            }
2122
        } else {
2123
            gen_op_andl_A0_ffff();
2124
            gen_op_movl_T1_A0();
2125
            gen_op_addl_A0_seg(R_SS);
2126
        }
2127
        gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2128
        if (s->ss32 && !s->addseg)
2129
            gen_op_mov_reg_A0(1, R_ESP);
2130
        else
2131
            gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2132
    }
2133
}
2134

    
2135
/* generate a push. It depends on ss32, addseg and dflag */
2136
/* slower version for T1, only used for call Ev */
2137
static void gen_push_T1(DisasContext *s)
2138
{
2139
#ifdef TARGET_X86_64
2140
    if (CODE64(s)) {
2141
        gen_op_movq_A0_reg(R_ESP);
2142
        if (s->dflag) {
2143
            gen_op_addq_A0_im(-8);
2144
            gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2145
        } else {
2146
            gen_op_addq_A0_im(-2);
2147
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2148
        }
2149
        gen_op_mov_reg_A0(2, R_ESP);
2150
    } else
2151
#endif
2152
    {
2153
        gen_op_movl_A0_reg(R_ESP);
2154
        if (!s->dflag)
2155
            gen_op_addl_A0_im(-2);
2156
        else
2157
            gen_op_addl_A0_im(-4);
2158
        if (s->ss32) {
2159
            if (s->addseg) {
2160
                gen_op_addl_A0_seg(R_SS);
2161
            }
2162
        } else {
2163
            gen_op_andl_A0_ffff();
2164
            gen_op_addl_A0_seg(R_SS);
2165
        }
2166
        gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2167

    
2168
        if (s->ss32 && !s->addseg)
2169
            gen_op_mov_reg_A0(1, R_ESP);
2170
        else
2171
            gen_stack_update(s, (-2) << s->dflag);
2172
    }
2173
}
2174

    
2175
/* two step pop is necessary for precise exceptions */
2176
static void gen_pop_T0(DisasContext *s)
2177
{
2178
#ifdef TARGET_X86_64
2179
    if (CODE64(s)) {
2180
        gen_op_movq_A0_reg(R_ESP);
2181
        gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2182
    } else
2183
#endif
2184
    {
2185
        gen_op_movl_A0_reg(R_ESP);
2186
        if (s->ss32) {
2187
            if (s->addseg)
2188
                gen_op_addl_A0_seg(R_SS);
2189
        } else {
2190
            gen_op_andl_A0_ffff();
2191
            gen_op_addl_A0_seg(R_SS);
2192
        }
2193
        gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2194
    }
2195
}
2196

    
2197
static void gen_pop_update(DisasContext *s)
2198
{
2199
#ifdef TARGET_X86_64
2200
    if (CODE64(s) && s->dflag) {
2201
        gen_stack_update(s, 8);
2202
    } else
2203
#endif
2204
    {
2205
        gen_stack_update(s, 2 << s->dflag);
2206
    }
2207
}
2208

    
2209
static void gen_stack_A0(DisasContext *s)
2210
{
2211
    gen_op_movl_A0_reg(R_ESP);
2212
    if (!s->ss32)
2213
        gen_op_andl_A0_ffff();
2214
    gen_op_movl_T1_A0();
2215
    if (s->addseg)
2216
        gen_op_addl_A0_seg(R_SS);
2217
}
2218

    
2219
/* NOTE: wrap around in 16 bit not fully handled */
2220
static void gen_pusha(DisasContext *s)
2221
{
2222
    int i;
2223
    gen_op_movl_A0_reg(R_ESP);
2224
    gen_op_addl_A0_im(-16 <<  s->dflag);
2225
    if (!s->ss32)
2226
        gen_op_andl_A0_ffff();
2227
    gen_op_movl_T1_A0();
2228
    if (s->addseg)
2229
        gen_op_addl_A0_seg(R_SS);
2230
    for(i = 0;i < 8; i++) {
2231
        gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2232
        gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2233
        gen_op_addl_A0_im(2 <<  s->dflag);
2234
    }
2235
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2236
}
2237

    
2238
/* NOTE: wrap around in 16 bit not fully handled */
2239
static void gen_popa(DisasContext *s)
2240
{
2241
    int i;
2242
    gen_op_movl_A0_reg(R_ESP);
2243
    if (!s->ss32)
2244
        gen_op_andl_A0_ffff();
2245
    gen_op_movl_T1_A0();
2246
    gen_op_addl_T1_im(16 <<  s->dflag);
2247
    if (s->addseg)
2248
        gen_op_addl_A0_seg(R_SS);
2249
    for(i = 0;i < 8; i++) {
2250
        /* ESP is not reloaded */
2251
        if (i != 3) {
2252
            gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2253
            gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2254
        }
2255
        gen_op_addl_A0_im(2 <<  s->dflag);
2256
    }
2257
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2258
}
2259

    
2260
static void gen_enter(DisasContext *s, int esp_addend, int level)
2261
{
2262
    int ot, opsize;
2263

    
2264
    level &= 0x1f;
2265
#ifdef TARGET_X86_64
2266
    if (CODE64(s)) {
2267
        ot = s->dflag ? OT_QUAD : OT_WORD;
2268
        opsize = 1 << ot;
2269

    
2270
        gen_op_movl_A0_reg(R_ESP);
2271
        gen_op_addq_A0_im(-opsize);
2272
        gen_op_movl_T1_A0();
2273

    
2274
        /* push bp */
2275
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2276
        gen_op_st_T0_A0(ot + s->mem_index);
2277
        if (level) {
2278
            gen_op_enter64_level(level, (ot == OT_QUAD));
2279
        }
2280
        gen_op_mov_reg_T1(ot, R_EBP);
2281
        gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2282
        gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2283
    } else
2284
#endif
2285
    {
2286
        ot = s->dflag + OT_WORD;
2287
        opsize = 2 << s->dflag;
2288

    
2289
        gen_op_movl_A0_reg(R_ESP);
2290
        gen_op_addl_A0_im(-opsize);
2291
        if (!s->ss32)
2292
            gen_op_andl_A0_ffff();
2293
        gen_op_movl_T1_A0();
2294
        if (s->addseg)
2295
            gen_op_addl_A0_seg(R_SS);
2296
        /* push bp */
2297
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2298
        gen_op_st_T0_A0(ot + s->mem_index);
2299
        if (level) {
2300
            gen_op_enter_level(level, s->dflag);
2301
        }
2302
        gen_op_mov_reg_T1(ot, R_EBP);
2303
        gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2304
        gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2305
    }
2306
}
2307

    
2308
static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2309
{
2310
    if (s->cc_op != CC_OP_DYNAMIC)
2311
        gen_op_set_cc_op(s->cc_op);
2312
    gen_jmp_im(cur_eip);
2313
    gen_op_raise_exception(trapno);
2314
    s->is_jmp = 3;
2315
}
2316

    
2317
/* an interrupt is different from an exception because of the
2318
   privilege checks */
2319
static void gen_interrupt(DisasContext *s, int intno,
2320
                          target_ulong cur_eip, target_ulong next_eip)
2321
{
2322
    if (s->cc_op != CC_OP_DYNAMIC)
2323
        gen_op_set_cc_op(s->cc_op);
2324
    gen_jmp_im(cur_eip);
2325
    gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2326
    s->is_jmp = 3;
2327
}
2328

    
2329
static void gen_debug(DisasContext *s, target_ulong cur_eip)
2330
{
2331
    if (s->cc_op != CC_OP_DYNAMIC)
2332
        gen_op_set_cc_op(s->cc_op);
2333
    gen_jmp_im(cur_eip);
2334
    gen_op_debug();
2335
    s->is_jmp = 3;
2336
}
2337

    
2338
/* generate a generic end of block. Trace exception is also generated
2339
   if needed */
2340
static void gen_eob(DisasContext *s)
2341
{
2342
    if (s->cc_op != CC_OP_DYNAMIC)
2343
        gen_op_set_cc_op(s->cc_op);
2344
    if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2345
        gen_op_reset_inhibit_irq();
2346
    }
2347
    if (s->singlestep_enabled) {
2348
        gen_op_debug();
2349
    } else if (s->tf) {
2350
        gen_op_single_step();
2351
    } else {
2352
        tcg_gen_exit_tb(0);
2353
    }
2354
    s->is_jmp = 3;
2355
}
2356

    
2357
/* generate a jump to eip. No segment change must happen before as a
2358
   direct call to the next block may occur */
2359
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2360
{
2361
    if (s->jmp_opt) {
2362
        if (s->cc_op != CC_OP_DYNAMIC) {
2363
            gen_op_set_cc_op(s->cc_op);
2364
            s->cc_op = CC_OP_DYNAMIC;
2365
        }
2366
        gen_goto_tb(s, tb_num, eip);
2367
        s->is_jmp = 3;
2368
    } else {
2369
        gen_jmp_im(eip);
2370
        gen_eob(s);
2371
    }
2372
}
2373

    
2374
static void gen_jmp(DisasContext *s, target_ulong eip)
2375
{
2376
    gen_jmp_tb(s, eip, 0);
2377
}
2378

    
2379
static GenOpFunc1 *gen_ldq_env_A0[3] = {
2380
    gen_op_ldq_raw_env_A0,
2381
#ifndef CONFIG_USER_ONLY
2382
    gen_op_ldq_kernel_env_A0,
2383
    gen_op_ldq_user_env_A0,
2384
#endif
2385
};
2386

    
2387
static GenOpFunc1 *gen_stq_env_A0[3] = {
2388
    gen_op_stq_raw_env_A0,
2389
#ifndef CONFIG_USER_ONLY
2390
    gen_op_stq_kernel_env_A0,
2391
    gen_op_stq_user_env_A0,
2392
#endif
2393
};
2394

    
2395
static GenOpFunc1 *gen_ldo_env_A0[3] = {
2396
    gen_op_ldo_raw_env_A0,
2397
#ifndef CONFIG_USER_ONLY
2398
    gen_op_ldo_kernel_env_A0,
2399
    gen_op_ldo_user_env_A0,
2400
#endif
2401
};
2402

    
2403
static GenOpFunc1 *gen_sto_env_A0[3] = {
2404
    gen_op_sto_raw_env_A0,
2405
#ifndef CONFIG_USER_ONLY
2406
    gen_op_sto_kernel_env_A0,
2407
    gen_op_sto_user_env_A0,
2408
#endif
2409
};
2410

    
2411
#define SSE_SPECIAL ((GenOpFunc2 *)1)
2412
#define SSE_DUMMY ((GenOpFunc2 *)2)
2413

    
2414
#define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2415
#define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2416
                     gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2417

    
2418
static GenOpFunc2 *sse_op_table1[256][4] = {
2419
    /* 3DNow! extensions */
2420
    [0x0e] = { SSE_DUMMY }, /* femms */
2421
    [0x0f] = { SSE_DUMMY }, /* pf... */
2422
    /* pure SSE operations */
2423
    [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2424
    [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2425
    [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2426
    [0x13] = { SSE_SPECIAL, SSE_SPECIAL },  /* movlps, movlpd */
2427
    [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2428
    [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2429
    [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd, movshdup */
2430
    [0x17] = { SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd */
2431

    
2432
    [0x28] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2433
    [0x29] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2434
    [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2435
    [0x2b] = { SSE_SPECIAL, SSE_SPECIAL },  /* movntps, movntpd */
2436
    [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2437
    [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2438
    [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2439
    [0x2f] = { gen_op_comiss, gen_op_comisd },
2440
    [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2441
    [0x51] = SSE_FOP(sqrt),
2442
    [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2443
    [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2444
    [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2445
    [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2446
    [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2447
    [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2448
    [0x58] = SSE_FOP(add),
2449
    [0x59] = SSE_FOP(mul),
2450
    [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps,
2451
               gen_op_cvtss2sd, gen_op_cvtsd2ss },
2452
    [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2453
    [0x5c] = SSE_FOP(sub),
2454
    [0x5d] = SSE_FOP(min),
2455
    [0x5e] = SSE_FOP(div),
2456
    [0x5f] = SSE_FOP(max),
2457

    
2458
    [0xc2] = SSE_FOP(cmpeq),
2459
    [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2460

    
2461
    /* MMX ops and their SSE extensions */
2462
    [0x60] = MMX_OP2(punpcklbw),
2463
    [0x61] = MMX_OP2(punpcklwd),
2464
    [0x62] = MMX_OP2(punpckldq),
2465
    [0x63] = MMX_OP2(packsswb),
2466
    [0x64] = MMX_OP2(pcmpgtb),
2467
    [0x65] = MMX_OP2(pcmpgtw),
2468
    [0x66] = MMX_OP2(pcmpgtl),
2469
    [0x67] = MMX_OP2(packuswb),
2470
    [0x68] = MMX_OP2(punpckhbw),
2471
    [0x69] = MMX_OP2(punpckhwd),
2472
    [0x6a] = MMX_OP2(punpckhdq),
2473
    [0x6b] = MMX_OP2(packssdw),
2474
    [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2475
    [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2476
    [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2477
    [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2478
    [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx,
2479
               (GenOpFunc2 *)gen_op_pshufd_xmm,
2480
               (GenOpFunc2 *)gen_op_pshufhw_xmm,
2481
               (GenOpFunc2 *)gen_op_pshuflw_xmm },
2482
    [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2483
    [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2484
    [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2485
    [0x74] = MMX_OP2(pcmpeqb),
2486
    [0x75] = MMX_OP2(pcmpeqw),
2487
    [0x76] = MMX_OP2(pcmpeql),
2488
    [0x77] = { SSE_DUMMY }, /* emms */
2489
    [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2490
    [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2491
    [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2492
    [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2493
    [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2494
    [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2495
    [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2496
    [0xd1] = MMX_OP2(psrlw),
2497
    [0xd2] = MMX_OP2(psrld),
2498
    [0xd3] = MMX_OP2(psrlq),
2499
    [0xd4] = MMX_OP2(paddq),
2500
    [0xd5] = MMX_OP2(pmullw),
2501
    [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2502
    [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2503
    [0xd8] = MMX_OP2(psubusb),
2504
    [0xd9] = MMX_OP2(psubusw),
2505
    [0xda] = MMX_OP2(pminub),
2506
    [0xdb] = MMX_OP2(pand),
2507
    [0xdc] = MMX_OP2(paddusb),
2508
    [0xdd] = MMX_OP2(paddusw),
2509
    [0xde] = MMX_OP2(pmaxub),
2510
    [0xdf] = MMX_OP2(pandn),
2511
    [0xe0] = MMX_OP2(pavgb),
2512
    [0xe1] = MMX_OP2(psraw),
2513
    [0xe2] = MMX_OP2(psrad),
2514
    [0xe3] = MMX_OP2(pavgw),
2515
    [0xe4] = MMX_OP2(pmulhuw),
2516
    [0xe5] = MMX_OP2(pmulhw),
2517
    [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2518
    [0xe7] = { SSE_SPECIAL , SSE_SPECIAL },  /* movntq, movntq */
2519
    [0xe8] = MMX_OP2(psubsb),
2520
    [0xe9] = MMX_OP2(psubsw),
2521
    [0xea] = MMX_OP2(pminsw),
2522
    [0xeb] = MMX_OP2(por),
2523
    [0xec] = MMX_OP2(paddsb),
2524
    [0xed] = MMX_OP2(paddsw),
2525
    [0xee] = MMX_OP2(pmaxsw),
2526
    [0xef] = MMX_OP2(pxor),
2527
    [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2528
    [0xf1] = MMX_OP2(psllw),
2529
    [0xf2] = MMX_OP2(pslld),
2530
    [0xf3] = MMX_OP2(psllq),
2531
    [0xf4] = MMX_OP2(pmuludq),
2532
    [0xf5] = MMX_OP2(pmaddwd),
2533
    [0xf6] = MMX_OP2(psadbw),
2534
    [0xf7] = MMX_OP2(maskmov),
2535
    [0xf8] = MMX_OP2(psubb),
2536
    [0xf9] = MMX_OP2(psubw),
2537
    [0xfa] = MMX_OP2(psubl),
2538
    [0xfb] = MMX_OP2(psubq),
2539
    [0xfc] = MMX_OP2(paddb),
2540
    [0xfd] = MMX_OP2(paddw),
2541
    [0xfe] = MMX_OP2(paddl),
2542
};
2543

    
2544
static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2545
    [0 + 2] = MMX_OP2(psrlw),
2546
    [0 + 4] = MMX_OP2(psraw),
2547
    [0 + 6] = MMX_OP2(psllw),
2548
    [8 + 2] = MMX_OP2(psrld),
2549
    [8 + 4] = MMX_OP2(psrad),
2550
    [8 + 6] = MMX_OP2(pslld),
2551
    [16 + 2] = MMX_OP2(psrlq),
2552
    [16 + 3] = { NULL, gen_op_psrldq_xmm },
2553
    [16 + 6] = MMX_OP2(psllq),
2554
    [16 + 7] = { NULL, gen_op_pslldq_xmm },
2555
};
2556

    
2557
static GenOpFunc1 *sse_op_table3[4 * 3] = {
2558
    gen_op_cvtsi2ss,
2559
    gen_op_cvtsi2sd,
2560
    X86_64_ONLY(gen_op_cvtsq2ss),
2561
    X86_64_ONLY(gen_op_cvtsq2sd),
2562

    
2563
    gen_op_cvttss2si,
2564
    gen_op_cvttsd2si,
2565
    X86_64_ONLY(gen_op_cvttss2sq),
2566
    X86_64_ONLY(gen_op_cvttsd2sq),
2567

    
2568
    gen_op_cvtss2si,
2569
    gen_op_cvtsd2si,
2570
    X86_64_ONLY(gen_op_cvtss2sq),
2571
    X86_64_ONLY(gen_op_cvtsd2sq),
2572
};
2573

    
2574
static GenOpFunc2 *sse_op_table4[8][4] = {
2575
    SSE_FOP(cmpeq),
2576
    SSE_FOP(cmplt),
2577
    SSE_FOP(cmple),
2578
    SSE_FOP(cmpunord),
2579
    SSE_FOP(cmpneq),
2580
    SSE_FOP(cmpnlt),
2581
    SSE_FOP(cmpnle),
2582
    SSE_FOP(cmpord),
2583
};
2584

    
2585
static GenOpFunc2 *sse_op_table5[256] = {
2586
    [0x0c] = gen_op_pi2fw,
2587
    [0x0d] = gen_op_pi2fd,
2588
    [0x1c] = gen_op_pf2iw,
2589
    [0x1d] = gen_op_pf2id,
2590
    [0x8a] = gen_op_pfnacc,
2591
    [0x8e] = gen_op_pfpnacc,
2592
    [0x90] = gen_op_pfcmpge,
2593
    [0x94] = gen_op_pfmin,
2594
    [0x96] = gen_op_pfrcp,
2595
    [0x97] = gen_op_pfrsqrt,
2596
    [0x9a] = gen_op_pfsub,
2597
    [0x9e] = gen_op_pfadd,
2598
    [0xa0] = gen_op_pfcmpgt,
2599
    [0xa4] = gen_op_pfmax,
2600
    [0xa6] = gen_op_movq, /* pfrcpit1; no need to actually increase precision */
2601
    [0xa7] = gen_op_movq, /* pfrsqit1 */
2602
    [0xaa] = gen_op_pfsubr,
2603
    [0xae] = gen_op_pfacc,
2604
    [0xb0] = gen_op_pfcmpeq,
2605
    [0xb4] = gen_op_pfmul,
2606
    [0xb6] = gen_op_movq, /* pfrcpit2 */
2607
    [0xb7] = gen_op_pmulhrw_mmx,
2608
    [0xbb] = gen_op_pswapd,
2609
    [0xbf] = gen_op_pavgb_mmx /* pavgusb */
2610
};
2611

    
2612
static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2613
{
2614
    int b1, op1_offset, op2_offset, is_xmm, val, ot;
2615
    int modrm, mod, rm, reg, reg_addr, offset_addr;
2616
    GenOpFunc2 *sse_op2;
2617
    GenOpFunc3 *sse_op3;
2618

    
2619
    b &= 0xff;
2620
    if (s->prefix & PREFIX_DATA)
2621
        b1 = 1;
2622
    else if (s->prefix & PREFIX_REPZ)
2623
        b1 = 2;
2624
    else if (s->prefix & PREFIX_REPNZ)
2625
        b1 = 3;
2626
    else
2627
        b1 = 0;
2628
    sse_op2 = sse_op_table1[b][b1];
2629
    if (!sse_op2)
2630
        goto illegal_op;
2631
    if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2632
        is_xmm = 1;
2633
    } else {
2634
        if (b1 == 0) {
2635
            /* MMX case */
2636
            is_xmm = 0;
2637
        } else {
2638
            is_xmm = 1;
2639
        }
2640
    }
2641
    /* simple MMX/SSE operation */
2642
    if (s->flags & HF_TS_MASK) {
2643
        gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2644
        return;
2645
    }
2646
    if (s->flags & HF_EM_MASK) {
2647
    illegal_op:
2648
        gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2649
        return;
2650
    }
2651
    if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2652
        goto illegal_op;
2653
    if (b == 0x0e) {
2654
        if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
2655
            goto illegal_op;
2656
        /* femms */
2657
        gen_op_emms();
2658
        return;
2659
    }
2660
    if (b == 0x77) {
2661
        /* emms */
2662
        gen_op_emms();
2663
        return;
2664
    }
2665
    /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2666
       the static cpu state) */
2667
    if (!is_xmm) {
2668
        gen_op_enter_mmx();
2669
    }
2670

    
2671
    modrm = ldub_code(s->pc++);
2672
    reg = ((modrm >> 3) & 7);
2673
    if (is_xmm)
2674
        reg |= rex_r;
2675
    mod = (modrm >> 6) & 3;
2676
    if (sse_op2 == SSE_SPECIAL) {
2677
        b |= (b1 << 8);
2678
        switch(b) {
2679
        case 0x0e7: /* movntq */
2680
            if (mod == 3)
2681
                goto illegal_op;
2682
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2683
            gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2684
            break;
2685
        case 0x1e7: /* movntdq */
2686
        case 0x02b: /* movntps */
2687
        case 0x12b: /* movntps */
2688
        case 0x3f0: /* lddqu */
2689
            if (mod == 3)
2690
                goto illegal_op;
2691
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2692
            gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2693
            break;
2694
        case 0x6e: /* movd mm, ea */
2695
#ifdef TARGET_X86_64
2696
            if (s->dflag == 2) {
2697
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2698
                gen_op_movq_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2699
            } else
2700
#endif
2701
            {
2702
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2703
                gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2704
            }
2705
            break;
2706
        case 0x16e: /* movd xmm, ea */
2707
#ifdef TARGET_X86_64
2708
            if (s->dflag == 2) {
2709
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2710
                gen_op_movq_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2711
            } else
2712
#endif
2713
            {
2714
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2715
                gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2716
            }
2717
            break;
2718
        case 0x6f: /* movq mm, ea */
2719
            if (mod != 3) {
2720
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2721
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2722
            } else {
2723
                rm = (modrm & 7);
2724
                gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2725
                            offsetof(CPUX86State,fpregs[rm].mmx));
2726
            }
2727
            break;
2728
        case 0x010: /* movups */
2729
        case 0x110: /* movupd */
2730
        case 0x028: /* movaps */
2731
        case 0x128: /* movapd */
2732
        case 0x16f: /* movdqa xmm, ea */
2733
        case 0x26f: /* movdqu xmm, ea */
2734
            if (mod != 3) {
2735
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2736
                gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2737
            } else {
2738
                rm = (modrm & 7) | REX_B(s);
2739
                gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2740
                            offsetof(CPUX86State,xmm_regs[rm]));
2741
            }
2742
            break;
2743
        case 0x210: /* movss xmm, ea */
2744
            if (mod != 3) {
2745
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2746
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
2747
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2748
                gen_op_movl_T0_0();
2749
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2750
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2751
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2752
            } else {
2753
                rm = (modrm & 7) | REX_B(s);
2754
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2755
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2756
            }
2757
            break;
2758
        case 0x310: /* movsd xmm, ea */
2759
            if (mod != 3) {
2760
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2761
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2762
                gen_op_movl_T0_0();
2763
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2764
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2765
            } else {
2766
                rm = (modrm & 7) | REX_B(s);
2767
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2768
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2769
            }
2770
            break;
2771
        case 0x012: /* movlps */
2772
        case 0x112: /* movlpd */
2773
            if (mod != 3) {
2774
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2775
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2776
            } else {
2777
                /* movhlps */
2778
                rm = (modrm & 7) | REX_B(s);
2779
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2780
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2781
            }
2782
            break;
2783
        case 0x212: /* movsldup */
2784
            if (mod != 3) {
2785
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2786
                gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2787
            } else {
2788
                rm = (modrm & 7) | REX_B(s);
2789
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2790
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2791
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2792
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2793
            }
2794
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2795
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2796
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2797
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2798
            break;
2799
        case 0x312: /* movddup */
2800
            if (mod != 3) {
2801
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2802
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2803
            } else {
2804
                rm = (modrm & 7) | REX_B(s);
2805
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2806
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2807
            }
2808
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2809
                        offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2810
            break;
2811
        case 0x016: /* movhps */
2812
        case 0x116: /* movhpd */
2813
            if (mod != 3) {
2814
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2815
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2816
            } else {
2817
                /* movlhps */
2818
                rm = (modrm & 7) | REX_B(s);
2819
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2820
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2821
            }
2822
            break;
2823
        case 0x216: /* movshdup */
2824
            if (mod != 3) {
2825
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2826
                gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2827
            } else {
2828
                rm = (modrm & 7) | REX_B(s);
2829
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2830
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2831
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2832
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2833
            }
2834
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2835
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2836
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2837
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2838
            break;
2839
        case 0x7e: /* movd ea, mm */
2840
#ifdef TARGET_X86_64
2841
            if (s->dflag == 2) {
2842
                gen_op_movq_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2843
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2844
            } else
2845
#endif
2846
            {
2847
                gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2848
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2849
            }
2850
            break;
2851
        case 0x17e: /* movd ea, xmm */
2852
#ifdef TARGET_X86_64
2853
            if (s->dflag == 2) {
2854
                gen_op_movq_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2855
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2856
            } else
2857
#endif
2858
            {
2859
                gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2860
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2861
            }
2862
            break;
2863
        case 0x27e: /* movq xmm, ea */
2864
            if (mod != 3) {
2865
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2866
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2867
            } else {
2868
                rm = (modrm & 7) | REX_B(s);
2869
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2870
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2871
            }
2872
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2873
            break;
2874
        case 0x7f: /* movq ea, mm */
2875
            if (mod != 3) {
2876
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2877
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2878
            } else {
2879
                rm = (modrm & 7);
2880
                gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2881
                            offsetof(CPUX86State,fpregs[reg].mmx));
2882
            }
2883
            break;
2884
        case 0x011: /* movups */
2885
        case 0x111: /* movupd */
2886
        case 0x029: /* movaps */
2887
        case 0x129: /* movapd */
2888
        case 0x17f: /* movdqa ea, xmm */
2889
        case 0x27f: /* movdqu ea, xmm */
2890
            if (mod != 3) {
2891
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2892
                gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2893
            } else {
2894
                rm = (modrm & 7) | REX_B(s);
2895
                gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2896
                            offsetof(CPUX86State,xmm_regs[reg]));
2897
            }
2898
            break;
2899
        case 0x211: /* movss ea, xmm */
2900
            if (mod != 3) {
2901
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2902
                gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2903
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
2904
            } else {
2905
                rm = (modrm & 7) | REX_B(s);
2906
                gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2907
                            offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2908
            }
2909
            break;
2910
        case 0x311: /* movsd ea, xmm */
2911
            if (mod != 3) {
2912
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2913
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2914
            } else {
2915
                rm = (modrm & 7) | REX_B(s);
2916
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2917
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2918
            }
2919
            break;
2920
        case 0x013: /* movlps */
2921
        case 0x113: /* movlpd */
2922
            if (mod != 3) {
2923
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2924
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2925
            } else {
2926
                goto illegal_op;
2927
            }
2928
            break;
2929
        case 0x017: /* movhps */
2930
        case 0x117: /* movhpd */
2931
            if (mod != 3) {
2932
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2933
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2934
            } else {
2935
                goto illegal_op;
2936
            }
2937
            break;
2938
        case 0x71: /* shift mm, im */
2939
        case 0x72:
2940
        case 0x73:
2941
        case 0x171: /* shift xmm, im */
2942
        case 0x172:
2943
        case 0x173:
2944
            val = ldub_code(s->pc++);
2945
            if (is_xmm) {
2946
                gen_op_movl_T0_im(val);
2947
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2948
                gen_op_movl_T0_0();
2949
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2950
                op1_offset = offsetof(CPUX86State,xmm_t0);
2951
            } else {
2952
                gen_op_movl_T0_im(val);
2953
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2954
                gen_op_movl_T0_0();
2955
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2956
                op1_offset = offsetof(CPUX86State,mmx_t0);
2957
            }
2958
            sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2959
            if (!sse_op2)
2960
                goto illegal_op;
2961
            if (is_xmm) {
2962
                rm = (modrm & 7) | REX_B(s);
2963
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2964
            } else {
2965
                rm = (modrm & 7);
2966
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2967
            }
2968
            sse_op2(op2_offset, op1_offset);
2969
            break;
2970
        case 0x050: /* movmskps */
2971
            rm = (modrm & 7) | REX_B(s);
2972
            gen_op_movmskps(offsetof(CPUX86State,xmm_regs[rm]));
2973
            gen_op_mov_reg_T0(OT_LONG, reg);
2974
            break;
2975
        case 0x150: /* movmskpd */
2976
            rm = (modrm & 7) | REX_B(s);
2977
            gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[rm]));
2978
            gen_op_mov_reg_T0(OT_LONG, reg);
2979
            break;
2980
        case 0x02a: /* cvtpi2ps */
2981
        case 0x12a: /* cvtpi2pd */
2982
            gen_op_enter_mmx();
2983
            if (mod != 3) {
2984
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2985
                op2_offset = offsetof(CPUX86State,mmx_t0);
2986
                gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2987
            } else {
2988
                rm = (modrm & 7);
2989
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2990
            }
2991
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2992
            switch(b >> 8) {
2993
            case 0x0:
2994
                gen_op_cvtpi2ps(op1_offset, op2_offset);
2995
                break;
2996
            default:
2997
            case 0x1:
2998
                gen_op_cvtpi2pd(op1_offset, op2_offset);
2999
                break;
3000
            }
3001
            break;
3002
        case 0x22a: /* cvtsi2ss */
3003
        case 0x32a: /* cvtsi2sd */
3004
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3005
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3006
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3007
            sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
3008
            break;
3009
        case 0x02c: /* cvttps2pi */
3010
        case 0x12c: /* cvttpd2pi */
3011
        case 0x02d: /* cvtps2pi */
3012
        case 0x12d: /* cvtpd2pi */
3013
            gen_op_enter_mmx();
3014
            if (mod != 3) {
3015
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3016
                op2_offset = offsetof(CPUX86State,xmm_t0);
3017
                gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3018
            } else {
3019
                rm = (modrm & 7) | REX_B(s);
3020
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3021
            }
3022
            op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3023
            switch(b) {
3024
            case 0x02c:
3025
                gen_op_cvttps2pi(op1_offset, op2_offset);
3026
                break;
3027
            case 0x12c:
3028
                gen_op_cvttpd2pi(op1_offset, op2_offset);
3029
                break;
3030
            case 0x02d:
3031
                gen_op_cvtps2pi(op1_offset, op2_offset);
3032
                break;
3033
            case 0x12d:
3034
                gen_op_cvtpd2pi(op1_offset, op2_offset);
3035
                break;
3036
            }
3037
            break;
3038
        case 0x22c: /* cvttss2si */
3039
        case 0x32c: /* cvttsd2si */
3040
        case 0x22d: /* cvtss2si */
3041
        case 0x32d: /* cvtsd2si */
3042
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3043
            if (mod != 3) {
3044
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3045
                if ((b >> 8) & 1) {
3046
                    gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3047
                } else {
3048
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3049
                    gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3050
                }
3051
                op2_offset = offsetof(CPUX86State,xmm_t0);
3052
            } else {
3053
                rm = (modrm & 7) | REX_B(s);
3054
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3055
            }
3056
            sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3057
                          (b & 1) * 4](op2_offset);
3058
            gen_op_mov_reg_T0(ot, reg);
3059
            break;
3060
        case 0xc4: /* pinsrw */
3061
        case 0x1c4:
3062
            s->rip_offset = 1;
3063
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3064
            val = ldub_code(s->pc++);
3065
            if (b1) {
3066
                val &= 7;
3067
                gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
3068
            } else {
3069
                val &= 3;
3070
                gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
3071
            }
3072
            break;
3073
        case 0xc5: /* pextrw */
3074
        case 0x1c5:
3075
            if (mod != 3)
3076
                goto illegal_op;
3077
            val = ldub_code(s->pc++);
3078
            if (b1) {
3079
                val &= 7;
3080
                rm = (modrm & 7) | REX_B(s);
3081
                gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
3082
            } else {
3083
                val &= 3;
3084
                rm = (modrm & 7);
3085
                gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
3086
            }
3087
            reg = ((modrm >> 3) & 7) | rex_r;
3088
            gen_op_mov_reg_T0(OT_LONG, reg);
3089
            break;
3090
        case 0x1d6: /* movq ea, xmm */
3091
            if (mod != 3) {
3092
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3093
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3094
            } else {
3095
                rm = (modrm & 7) | REX_B(s);
3096
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3097
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3098
                gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3099
            }
3100
            break;
3101
        case 0x2d6: /* movq2dq */
3102
            gen_op_enter_mmx();
3103
            rm = (modrm & 7);
3104
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3105
                        offsetof(CPUX86State,fpregs[rm].mmx));
3106
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3107
            break;
3108
        case 0x3d6: /* movdq2q */
3109
            gen_op_enter_mmx();
3110
            rm = (modrm & 7) | REX_B(s);
3111
            gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3112
                        offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3113
            break;
3114
        case 0xd7: /* pmovmskb */
3115
        case 0x1d7:
3116
            if (mod != 3)
3117
                goto illegal_op;
3118
            if (b1) {
3119
                rm = (modrm & 7) | REX_B(s);
3120
                gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
3121
            } else {
3122
                rm = (modrm & 7);
3123
                gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
3124
            }
3125
            reg = ((modrm >> 3) & 7) | rex_r;
3126
            gen_op_mov_reg_T0(OT_LONG, reg);
3127
            break;
3128
        default:
3129
            goto illegal_op;
3130
        }
3131
    } else {
3132
        /* generic MMX or SSE operation */
3133
        switch(b) {
3134
        case 0xf7:
3135
            /* maskmov : we must prepare A0 */
3136
            if (mod != 3)
3137
                goto illegal_op;
3138
#ifdef TARGET_X86_64
3139
            if (s->aflag == 2) {
3140
                gen_op_movq_A0_reg(R_EDI);
3141
            } else
3142
#endif
3143
            {
3144
                gen_op_movl_A0_reg(R_EDI);
3145
                if (s->aflag == 0)
3146
                    gen_op_andl_A0_ffff();
3147
            }
3148
            gen_add_A0_ds_seg(s);
3149
            break;
3150
        case 0x70: /* pshufx insn */
3151
        case 0xc6: /* pshufx insn */
3152
        case 0xc2: /* compare insns */
3153
            s->rip_offset = 1;
3154
            break;
3155
        default:
3156
            break;
3157
        }
3158
        if (is_xmm) {
3159
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3160
            if (mod != 3) {
3161
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3162
                op2_offset = offsetof(CPUX86State,xmm_t0);
3163
                if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3164
                                b == 0xc2)) {
3165
                    /* specific case for SSE single instructions */
3166
                    if (b1 == 2) {
3167
                        /* 32 bit access */
3168
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3169
                        gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3170
                    } else {
3171
                        /* 64 bit access */
3172
                        gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3173
                    }
3174
                } else {
3175
                    gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3176
                }
3177
            } else {
3178
                rm = (modrm & 7) | REX_B(s);
3179
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3180
            }
3181
        } else {
3182
            op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3183
            if (mod != 3) {
3184
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3185
                op2_offset = offsetof(CPUX86State,mmx_t0);
3186
                gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
3187
            } else {
3188
                rm = (modrm & 7);
3189
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3190
            }
3191
        }
3192
        switch(b) {
3193
        case 0x0f: /* 3DNow! data insns */
3194
            if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3195
                goto illegal_op;
3196
            val = ldub_code(s->pc++);
3197
            sse_op2 = sse_op_table5[val];
3198
            if (!sse_op2)
3199
                goto illegal_op;
3200
            sse_op2(op1_offset, op2_offset);
3201
            break;
3202
        case 0x70: /* pshufx insn */
3203
        case 0xc6: /* pshufx insn */
3204
            val = ldub_code(s->pc++);
3205
            sse_op3 = (GenOpFunc3 *)sse_op2;
3206
            sse_op3(op1_offset, op2_offset, val);
3207
            break;
3208
        case 0xc2:
3209
            /* compare insns */
3210
            val = ldub_code(s->pc++);
3211
            if (val >= 8)
3212
                goto illegal_op;
3213
            sse_op2 = sse_op_table4[val][b1];
3214
            sse_op2(op1_offset, op2_offset);
3215
            break;
3216
        default:
3217
            sse_op2(op1_offset, op2_offset);
3218
            break;
3219
        }
3220
        if (b == 0x2e || b == 0x2f) {
3221
            s->cc_op = CC_OP_EFLAGS;
3222
        }
3223
    }
3224
}
3225

    
3226

    
3227
/* convert one instruction. s->is_jmp is set if the translation must
3228
   be stopped. Return the next pc value */
3229
static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3230
{
3231
    int b, prefixes, aflag, dflag;
3232
    int shift, ot;
3233
    int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3234
    target_ulong next_eip, tval;
3235
    int rex_w, rex_r;
3236

    
3237
    s->pc = pc_start;
3238
    prefixes = 0;
3239
    aflag = s->code32;
3240
    dflag = s->code32;
3241
    s->override = -1;
3242
    rex_w = -1;
3243
    rex_r = 0;
3244
#ifdef TARGET_X86_64
3245
    s->rex_x = 0;
3246
    s->rex_b = 0;
3247
    x86_64_hregs = 0;
3248
#endif
3249
    s->rip_offset = 0; /* for relative ip address */
3250
 next_byte:
3251
    b = ldub_code(s->pc);
3252
    s->pc++;
3253
    /* check prefixes */
3254
#ifdef TARGET_X86_64
3255
    if (CODE64(s)) {
3256
        switch (b) {
3257
        case 0xf3:
3258
            prefixes |= PREFIX_REPZ;
3259
            goto next_byte;
3260
        case 0xf2:
3261
            prefixes |= PREFIX_REPNZ;
3262
            goto next_byte;
3263
        case 0xf0:
3264
            prefixes |= PREFIX_LOCK;
3265
            goto next_byte;
3266
        case 0x2e:
3267
            s->override = R_CS;
3268
            goto next_byte;
3269
        case 0x36:
3270
            s->override = R_SS;
3271
            goto next_byte;
3272
        case 0x3e:
3273
            s->override = R_DS;
3274
            goto next_byte;
3275
        case 0x26:
3276
            s->override = R_ES;
3277
            goto next_byte;
3278
        case 0x64:
3279
            s->override = R_FS;
3280
            goto next_byte;
3281
        case 0x65:
3282
            s->override = R_GS;
3283
            goto next_byte;
3284
        case 0x66:
3285
            prefixes |= PREFIX_DATA;
3286
            goto next_byte;
3287
        case 0x67:
3288
            prefixes |= PREFIX_ADR;
3289
            goto next_byte;
3290
        case 0x40 ... 0x4f:
3291
            /* REX prefix */
3292
            rex_w = (b >> 3) & 1;
3293
            rex_r = (b & 0x4) << 1;
3294
            s->rex_x = (b & 0x2) << 2;
3295
            REX_B(s) = (b & 0x1) << 3;
3296
            x86_64_hregs = 1; /* select uniform byte register addressing */
3297
            goto next_byte;
3298
        }
3299
        if (rex_w == 1) {
3300
            /* 0x66 is ignored if rex.w is set */
3301
            dflag = 2;
3302
        } else {
3303
            if (prefixes & PREFIX_DATA)
3304
                dflag ^= 1;
3305
        }
3306
        if (!(prefixes & PREFIX_ADR))
3307
            aflag = 2;
3308
    } else
3309
#endif
3310
    {
3311
        switch (b) {
3312
        case 0xf3:
3313
            prefixes |= PREFIX_REPZ;
3314
            goto next_byte;
3315
        case 0xf2:
3316
            prefixes |= PREFIX_REPNZ;
3317
            goto next_byte;
3318
        case 0xf0:
3319
            prefixes |= PREFIX_LOCK;
3320
            goto next_byte;
3321
        case 0x2e:
3322
            s->override = R_CS;
3323
            goto next_byte;
3324
        case 0x36:
3325
            s->override = R_SS;
3326
            goto next_byte;
3327
        case 0x3e:
3328
            s->override = R_DS;
3329
            goto next_byte;
3330
        case 0x26:
3331
            s->override = R_ES;
3332
            goto next_byte;
3333
        case 0x64:
3334
            s->override = R_FS;
3335
            goto next_byte;
3336
        case 0x65:
3337
            s->override = R_GS;
3338
            goto next_byte;
3339
        case 0x66:
3340
            prefixes |= PREFIX_DATA;
3341
            goto next_byte;
3342
        case 0x67:
3343
            prefixes |= PREFIX_ADR;
3344
            goto next_byte;
3345
        }
3346
        if (prefixes & PREFIX_DATA)
3347
            dflag ^= 1;
3348
        if (prefixes & PREFIX_ADR)
3349
            aflag ^= 1;
3350
    }
3351

    
3352
    s->prefix = prefixes;
3353
    s->aflag = aflag;
3354
    s->dflag = dflag;
3355

    
3356
    /* lock generation */
3357
    if (prefixes & PREFIX_LOCK)
3358
        gen_op_lock();
3359

    
3360
    /* now check op code */
3361
 reswitch:
3362
    switch(b) {
3363
    case 0x0f:
3364
        /**************************/
3365
        /* extended op code */
3366
        b = ldub_code(s->pc++) | 0x100;
3367
        goto reswitch;
3368

    
3369
        /**************************/
3370
        /* arith & logic */
3371
    case 0x00 ... 0x05:
3372
    case 0x08 ... 0x0d:
3373
    case 0x10 ... 0x15:
3374
    case 0x18 ... 0x1d:
3375
    case 0x20 ... 0x25:
3376
    case 0x28 ... 0x2d:
3377
    case 0x30 ... 0x35:
3378
    case 0x38 ... 0x3d:
3379
        {
3380
            int op, f, val;
3381
            op = (b >> 3) & 7;
3382
            f = (b >> 1) & 3;
3383

    
3384
            if ((b & 1) == 0)
3385
                ot = OT_BYTE;
3386
            else
3387
                ot = dflag + OT_WORD;
3388

    
3389
            switch(f) {
3390
            case 0: /* OP Ev, Gv */
3391
                modrm = ldub_code(s->pc++);
3392
                reg = ((modrm >> 3) & 7) | rex_r;
3393
                mod = (modrm >> 6) & 3;
3394
                rm = (modrm & 7) | REX_B(s);
3395
                if (mod != 3) {
3396
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3397
                    opreg = OR_TMP0;
3398
                } else if (op == OP_XORL && rm == reg) {
3399
                xor_zero:
3400
                    /* xor reg, reg optimisation */
3401
                    gen_op_movl_T0_0();
3402
                    s->cc_op = CC_OP_LOGICB + ot;
3403
                    gen_op_mov_reg_T0(ot, reg);
3404
                    gen_op_update1_cc();
3405
                    break;
3406
                } else {
3407
                    opreg = rm;
3408
                }
3409
                gen_op_mov_TN_reg(ot, 1, reg);
3410
                gen_op(s, op, ot, opreg);
3411
                break;
3412
            case 1: /* OP Gv, Ev */
3413
                modrm = ldub_code(s->pc++);
3414
                mod = (modrm >> 6) & 3;
3415
                reg = ((modrm >> 3) & 7) | rex_r;
3416
                rm = (modrm & 7) | REX_B(s);
3417
                if (mod != 3) {
3418
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3419
                    gen_op_ld_T1_A0(ot + s->mem_index);
3420
                } else if (op == OP_XORL && rm == reg) {
3421
                    goto xor_zero;
3422
                } else {
3423
                    gen_op_mov_TN_reg(ot, 1, rm);
3424
                }
3425
                gen_op(s, op, ot, reg);
3426
                break;
3427
            case 2: /* OP A, Iv */
3428
                val = insn_get(s, ot);
3429
                gen_op_movl_T1_im(val);
3430
                gen_op(s, op, ot, OR_EAX);
3431
                break;
3432
            }
3433
        }
3434
        break;
3435

    
3436
    case 0x80: /* GRP1 */
3437
    case 0x81:
3438
    case 0x82:
3439
    case 0x83:
3440
        {
3441
            int val;
3442

    
3443
            if ((b & 1) == 0)
3444
                ot = OT_BYTE;
3445
            else
3446
                ot = dflag + OT_WORD;
3447

    
3448
            modrm = ldub_code(s->pc++);
3449
            mod = (modrm >> 6) & 3;
3450
            rm = (modrm & 7) | REX_B(s);
3451
            op = (modrm >> 3) & 7;
3452

    
3453
            if (mod != 3) {
3454
                if (b == 0x83)
3455
                    s->rip_offset = 1;
3456
                else
3457
                    s->rip_offset = insn_const_size(ot);
3458
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3459
                opreg = OR_TMP0;
3460
            } else {
3461
                opreg = rm;
3462
            }
3463

    
3464
            switch(b) {
3465
            default:
3466
            case 0x80:
3467
            case 0x81:
3468
            case 0x82:
3469
                val = insn_get(s, ot);
3470
                break;
3471
            case 0x83:
3472
                val = (int8_t)insn_get(s, OT_BYTE);
3473
                break;
3474
            }
3475
            gen_op_movl_T1_im(val);
3476
            gen_op(s, op, ot, opreg);
3477
        }
3478
        break;
3479

    
3480
        /**************************/
3481
        /* inc, dec, and other misc arith */
3482
    case 0x40 ... 0x47: /* inc Gv */
3483
        ot = dflag ? OT_LONG : OT_WORD;
3484
        gen_inc(s, ot, OR_EAX + (b & 7), 1);
3485
        break;
3486
    case 0x48 ... 0x4f: /* dec Gv */
3487
        ot = dflag ? OT_LONG : OT_WORD;
3488
        gen_inc(s, ot, OR_EAX + (b & 7), -1);
3489
        break;
3490
    case 0xf6: /* GRP3 */
3491
    case 0xf7:
3492
        if ((b & 1) == 0)
3493
            ot = OT_BYTE;
3494
        else
3495
            ot = dflag + OT_WORD;
3496

    
3497
        modrm = ldub_code(s->pc++);
3498
        mod = (modrm >> 6) & 3;
3499
        rm = (modrm & 7) | REX_B(s);
3500
        op = (modrm >> 3) & 7;
3501
        if (mod != 3) {
3502
            if (op == 0)
3503
                s->rip_offset = insn_const_size(ot);
3504
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3505
            gen_op_ld_T0_A0(ot + s->mem_index);
3506
        } else {
3507
            gen_op_mov_TN_reg(ot, 0, rm);
3508
        }
3509

    
3510
        switch(op) {
3511
        case 0: /* test */
3512
            val = insn_get(s, ot);
3513
            gen_op_movl_T1_im(val);
3514
            gen_op_testl_T0_T1_cc();
3515
            s->cc_op = CC_OP_LOGICB + ot;
3516
            break;
3517
        case 2: /* not */
3518
            gen_op_notl_T0();
3519
            if (mod != 3) {
3520
                gen_op_st_T0_A0(ot + s->mem_index);
3521
            } else {
3522
                gen_op_mov_reg_T0(ot, rm);
3523
            }
3524
            break;
3525
        case 3: /* neg */
3526
            gen_op_negl_T0();
3527
            if (mod != 3) {
3528
                gen_op_st_T0_A0(ot + s->mem_index);
3529
            } else {
3530
                gen_op_mov_reg_T0(ot, rm);
3531
            }
3532
            gen_op_update_neg_cc();
3533
            s->cc_op = CC_OP_SUBB + ot;
3534
            break;
3535
        case 4: /* mul */
3536
            switch(ot) {
3537
            case OT_BYTE:
3538
                gen_op_mulb_AL_T0();
3539
                s->cc_op = CC_OP_MULB;
3540
                break;
3541
            case OT_WORD:
3542
                gen_op_mulw_AX_T0();
3543
                s->cc_op = CC_OP_MULW;
3544
                break;
3545
            default:
3546
            case OT_LONG:
3547
                gen_op_mull_EAX_T0();
3548
                s->cc_op = CC_OP_MULL;
3549
                break;
3550
#ifdef TARGET_X86_64
3551
            case OT_QUAD:
3552
                gen_op_mulq_EAX_T0();
3553
                s->cc_op = CC_OP_MULQ;
3554
                break;
3555
#endif
3556
            }
3557
            break;
3558
        case 5: /* imul */
3559
            switch(ot) {
3560
            case OT_BYTE:
3561
                gen_op_imulb_AL_T0();
3562
                s->cc_op = CC_OP_MULB;
3563
                break;
3564
            case OT_WORD:
3565
                gen_op_imulw_AX_T0();
3566
                s->cc_op = CC_OP_MULW;
3567
                break;
3568
            default:
3569
            case OT_LONG:
3570
                gen_op_imull_EAX_T0();
3571
                s->cc_op = CC_OP_MULL;
3572
                break;
3573
#ifdef TARGET_X86_64
3574
            case OT_QUAD:
3575
                gen_op_imulq_EAX_T0();
3576
                s->cc_op = CC_OP_MULQ;
3577
                break;
3578
#endif
3579
            }
3580
            break;
3581
        case 6: /* div */
3582
            switch(ot) {
3583
            case OT_BYTE:
3584
                gen_jmp_im(pc_start - s->cs_base);
3585
                gen_op_divb_AL_T0();
3586
                break;
3587
            case OT_WORD:
3588
                gen_jmp_im(pc_start - s->cs_base);
3589
                gen_op_divw_AX_T0();
3590
                break;
3591
            default:
3592
            case OT_LONG:
3593
                gen_jmp_im(pc_start - s->cs_base);
3594
#ifdef MACRO_TEST
3595
                /* XXX: this is just a test */
3596
                tcg_gen_macro_2(cpu_T[0], cpu_T[0], MACRO_TEST);
3597
#else
3598
                tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
3599
#endif
3600
                break;
3601
#ifdef TARGET_X86_64
3602
            case OT_QUAD:
3603
                gen_jmp_im(pc_start - s->cs_base);
3604
                gen_op_divq_EAX_T0();
3605
                break;
3606
#endif
3607
            }
3608
            break;
3609
        case 7: /* idiv */
3610
            switch(ot) {
3611
            case OT_BYTE:
3612
                gen_jmp_im(pc_start - s->cs_base);
3613
                gen_op_idivb_AL_T0();
3614
                break;
3615
            case OT_WORD:
3616
                gen_jmp_im(pc_start - s->cs_base);
3617
                gen_op_idivw_AX_T0();
3618
                break;
3619
            default:
3620
            case OT_LONG:
3621
                gen_jmp_im(pc_start - s->cs_base);
3622
                tcg_gen_helper_0_1(helper_idivl_EAX_T0, cpu_T[0]);
3623
                break;
3624
#ifdef TARGET_X86_64
3625
            case OT_QUAD:
3626
                gen_jmp_im(pc_start - s->cs_base);
3627
                gen_op_idivq_EAX_T0();
3628
                break;
3629
#endif
3630
            }
3631
            break;
3632
        default:
3633
            goto illegal_op;
3634
        }
3635
        break;
3636

    
3637
    case 0xfe: /* GRP4 */
3638
    case 0xff: /* GRP5 */
3639
        if ((b & 1) == 0)
3640
            ot = OT_BYTE;
3641
        else
3642
            ot = dflag + OT_WORD;
3643

    
3644
        modrm = ldub_code(s->pc++);
3645
        mod = (modrm >> 6) & 3;
3646
        rm = (modrm & 7) | REX_B(s);
3647
        op = (modrm >> 3) & 7;
3648
        if (op >= 2 && b == 0xfe) {
3649
            goto illegal_op;
3650
        }
3651
        if (CODE64(s)) {
3652
            if (op == 2 || op == 4) {
3653
                /* operand size for jumps is 64 bit */
3654
                ot = OT_QUAD;
3655
            } else if (op == 3 || op == 5) {
3656
                /* for call calls, the operand is 16 or 32 bit, even
3657
                   in long mode */
3658
                ot = dflag ? OT_LONG : OT_WORD;
3659
            } else if (op == 6) {
3660
                /* default push size is 64 bit */
3661
                ot = dflag ? OT_QUAD : OT_WORD;
3662
            }
3663
        }
3664
        if (mod != 3) {
3665
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3666
            if (op >= 2 && op != 3 && op != 5)
3667
                gen_op_ld_T0_A0(ot + s->mem_index);
3668
        } else {
3669
            gen_op_mov_TN_reg(ot, 0, rm);
3670
        }
3671

    
3672
        switch(op) {
3673
        case 0: /* inc Ev */
3674
            if (mod != 3)
3675
                opreg = OR_TMP0;
3676
            else
3677
                opreg = rm;
3678
            gen_inc(s, ot, opreg, 1);
3679
            break;
3680
        case 1: /* dec Ev */
3681
            if (mod != 3)
3682
                opreg = OR_TMP0;
3683
            else
3684
                opreg = rm;
3685
            gen_inc(s, ot, opreg, -1);
3686
            break;
3687
        case 2: /* call Ev */
3688
            /* XXX: optimize if memory (no 'and' is necessary) */
3689
            if (s->dflag == 0)
3690
                gen_op_andl_T0_ffff();
3691
            next_eip = s->pc - s->cs_base;
3692
            gen_movtl_T1_im(next_eip);
3693
            gen_push_T1(s);
3694
            gen_op_jmp_T0();
3695
            gen_eob(s);
3696
            break;
3697
        case 3: /* lcall Ev */
3698
            gen_op_ld_T1_A0(ot + s->mem_index);
3699
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3700
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3701
        do_lcall:
3702
            if (s->pe && !s->vm86) {
3703
                if (s->cc_op != CC_OP_DYNAMIC)
3704
                    gen_op_set_cc_op(s->cc_op);
3705
                gen_jmp_im(pc_start - s->cs_base);
3706
                gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3707
            } else {
3708
                gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3709
            }
3710
            gen_eob(s);
3711
            break;
3712
        case 4: /* jmp Ev */
3713
            if (s->dflag == 0)
3714
                gen_op_andl_T0_ffff();
3715
            gen_op_jmp_T0();
3716
            gen_eob(s);
3717
            break;
3718
        case 5: /* ljmp Ev */
3719
            gen_op_ld_T1_A0(ot + s->mem_index);
3720
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3721
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3722
        do_ljmp:
3723
            if (s->pe && !s->vm86) {
3724
                if (s->cc_op != CC_OP_DYNAMIC)
3725
                    gen_op_set_cc_op(s->cc_op);
3726
                gen_jmp_im(pc_start - s->cs_base);
3727
                gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3728
            } else {
3729
                gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3730
                gen_op_movl_T0_T1();
3731
                gen_op_jmp_T0();
3732
            }
3733
            gen_eob(s);
3734
            break;
3735
        case 6: /* push Ev */
3736
            gen_push_T0(s);
3737
            break;
3738
        default:
3739
            goto illegal_op;
3740
        }
3741
        break;
3742

    
3743
    case 0x84: /* test Ev, Gv */
3744
    case 0x85:
3745
        if ((b & 1) == 0)
3746
            ot = OT_BYTE;
3747
        else
3748
            ot = dflag + OT_WORD;
3749

    
3750
        modrm = ldub_code(s->pc++);
3751
        mod = (modrm >> 6) & 3;
3752
        rm = (modrm & 7) | REX_B(s);
3753
        reg = ((modrm >> 3) & 7) | rex_r;
3754

    
3755
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3756
        gen_op_mov_TN_reg(ot, 1, reg);
3757
        gen_op_testl_T0_T1_cc();
3758
        s->cc_op = CC_OP_LOGICB + ot;
3759
        break;
3760

    
3761
    case 0xa8: /* test eAX, Iv */
3762
    case 0xa9:
3763
        if ((b & 1) == 0)
3764
            ot = OT_BYTE;
3765
        else
3766
            ot = dflag + OT_WORD;
3767
        val = insn_get(s, ot);
3768

    
3769
        gen_op_mov_TN_reg(ot, 0, OR_EAX);
3770
        gen_op_movl_T1_im(val);
3771
        gen_op_testl_T0_T1_cc();
3772
        s->cc_op = CC_OP_LOGICB + ot;
3773
        break;
3774

    
3775
    case 0x98: /* CWDE/CBW */
3776
#ifdef TARGET_X86_64
3777
        if (dflag == 2) {
3778
            gen_op_movslq_RAX_EAX();
3779
        } else
3780
#endif
3781
        if (dflag == 1)
3782
            gen_op_movswl_EAX_AX();
3783
        else
3784
            gen_op_movsbw_AX_AL();
3785
        break;
3786
    case 0x99: /* CDQ/CWD */
3787
#ifdef TARGET_X86_64
3788
        if (dflag == 2) {
3789
            gen_op_movsqo_RDX_RAX();
3790
        } else
3791
#endif
3792
        if (dflag == 1)
3793
            gen_op_movslq_EDX_EAX();
3794
        else
3795
            gen_op_movswl_DX_AX();
3796
        break;
3797
    case 0x1af: /* imul Gv, Ev */
3798
    case 0x69: /* imul Gv, Ev, I */
3799
    case 0x6b:
3800
        ot = dflag + OT_WORD;
3801
        modrm = ldub_code(s->pc++);
3802
        reg = ((modrm >> 3) & 7) | rex_r;
3803
        if (b == 0x69)
3804
            s->rip_offset = insn_const_size(ot);
3805
        else if (b == 0x6b)
3806
            s->rip_offset = 1;
3807
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3808
        if (b == 0x69) {
3809
            val = insn_get(s, ot);
3810
            gen_op_movl_T1_im(val);
3811
        } else if (b == 0x6b) {
3812
            val = (int8_t)insn_get(s, OT_BYTE);
3813
            gen_op_movl_T1_im(val);
3814
        } else {
3815
            gen_op_mov_TN_reg(ot, 1, reg);
3816
        }
3817

    
3818
#ifdef TARGET_X86_64
3819
        if (ot == OT_QUAD) {
3820
            gen_op_imulq_T0_T1();
3821
        } else
3822
#endif
3823
        if (ot == OT_LONG) {
3824
            gen_op_imull_T0_T1();
3825
        } else {
3826
            gen_op_imulw_T0_T1();
3827
        }
3828
        gen_op_mov_reg_T0(ot, reg);
3829
        s->cc_op = CC_OP_MULB + ot;
3830
        break;
3831
    case 0x1c0:
3832
    case 0x1c1: /* xadd Ev, Gv */
3833
        if ((b & 1) == 0)
3834
            ot = OT_BYTE;
3835
        else
3836
            ot = dflag + OT_WORD;
3837
        modrm = ldub_code(s->pc++);
3838
        reg = ((modrm >> 3) & 7) | rex_r;
3839
        mod = (modrm >> 6) & 3;
3840
        if (mod == 3) {
3841
            rm = (modrm & 7) | REX_B(s);
3842
            gen_op_mov_TN_reg(ot, 0, reg);
3843
            gen_op_mov_TN_reg(ot, 1, rm);
3844
            gen_op_addl_T0_T1();
3845
            gen_op_mov_reg_T1(ot, reg);
3846
            gen_op_mov_reg_T0(ot, rm);
3847
        } else {
3848
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3849
            gen_op_mov_TN_reg(ot, 0, reg);
3850
            gen_op_ld_T1_A0(ot + s->mem_index);
3851
            gen_op_addl_T0_T1();
3852
            gen_op_st_T0_A0(ot + s->mem_index);
3853
            gen_op_mov_reg_T1(ot, reg);
3854
        }
3855
        gen_op_update2_cc();
3856
        s->cc_op = CC_OP_ADDB + ot;
3857
        break;
3858
    case 0x1b0:
3859
    case 0x1b1: /* cmpxchg Ev, Gv */
3860
        if ((b & 1) == 0)
3861
            ot = OT_BYTE;
3862
        else
3863
            ot = dflag + OT_WORD;
3864
        modrm = ldub_code(s->pc++);
3865
        reg = ((modrm >> 3) & 7) | rex_r;
3866
        mod = (modrm >> 6) & 3;
3867
        gen_op_mov_TN_reg(ot, 1, reg);
3868
        if (mod == 3) {
3869
            rm = (modrm & 7) | REX_B(s);
3870
            gen_op_mov_TN_reg(ot, 0, rm);
3871
            gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3872
            gen_op_mov_reg_T0(ot, rm);
3873
        } else {
3874
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3875
            gen_op_ld_T0_A0(ot + s->mem_index);
3876
            gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3877
        }
3878
        s->cc_op = CC_OP_SUBB + ot;
3879
        break;
3880
    case 0x1c7: /* cmpxchg8b */
3881
        modrm = ldub_code(s->pc++);
3882
        mod = (modrm >> 6) & 3;
3883
        if ((mod == 3) || ((modrm & 0x38) != 0x8))
3884
            goto illegal_op;
3885
        gen_jmp_im(pc_start - s->cs_base);
3886
        if (s->cc_op != CC_OP_DYNAMIC)
3887
            gen_op_set_cc_op(s->cc_op);
3888
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3889
        gen_op_cmpxchg8b();
3890
        s->cc_op = CC_OP_EFLAGS;
3891
        break;
3892

    
3893
        /**************************/
3894
        /* push/pop */
3895
    case 0x50 ... 0x57: /* push */
3896
        gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
3897
        gen_push_T0(s);
3898
        break;
3899
    case 0x58 ... 0x5f: /* pop */
3900
        if (CODE64(s)) {
3901
            ot = dflag ? OT_QUAD : OT_WORD;
3902
        } else {
3903
            ot = dflag + OT_WORD;
3904
        }
3905
        gen_pop_T0(s);
3906
        /* NOTE: order is important for pop %sp */
3907
        gen_pop_update(s);
3908
        gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
3909
        break;
3910
    case 0x60: /* pusha */
3911
        if (CODE64(s))
3912
            goto illegal_op;
3913
        gen_pusha(s);
3914
        break;
3915
    case 0x61: /* popa */
3916
        if (CODE64(s))
3917
            goto illegal_op;
3918
        gen_popa(s);
3919
        break;
3920
    case 0x68: /* push Iv */
3921
    case 0x6a:
3922
        if (CODE64(s)) {
3923
            ot = dflag ? OT_QUAD : OT_WORD;
3924
        } else {
3925
            ot = dflag + OT_WORD;
3926
        }
3927
        if (b == 0x68)
3928
            val = insn_get(s, ot);
3929
        else
3930
            val = (int8_t)insn_get(s, OT_BYTE);
3931
        gen_op_movl_T0_im(val);
3932
        gen_push_T0(s);
3933
        break;
3934
    case 0x8f: /* pop Ev */
3935
        if (CODE64(s)) {
3936
            ot = dflag ? OT_QUAD : OT_WORD;
3937
        } else {
3938
            ot = dflag + OT_WORD;
3939
        }
3940
        modrm = ldub_code(s->pc++);
3941
        mod = (modrm >> 6) & 3;
3942
        gen_pop_T0(s);
3943
        if (mod == 3) {
3944
            /* NOTE: order is important for pop %sp */
3945
            gen_pop_update(s);
3946
            rm = (modrm & 7) | REX_B(s);
3947
            gen_op_mov_reg_T0(ot, rm);
3948
        } else {
3949
            /* NOTE: order is important too for MMU exceptions */
3950
            s->popl_esp_hack = 1 << ot;
3951
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3952
            s->popl_esp_hack = 0;
3953
            gen_pop_update(s);
3954
        }
3955
        break;
3956
    case 0xc8: /* enter */
3957
        {
3958
            int level;
3959
            val = lduw_code(s->pc);
3960
            s->pc += 2;
3961
            level = ldub_code(s->pc++);
3962
            gen_enter(s, val, level);
3963
        }
3964
        break;
3965
    case 0xc9: /* leave */
3966
        /* XXX: exception not precise (ESP is updated before potential exception) */
3967
        if (CODE64(s)) {
3968
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
3969
            gen_op_mov_reg_T0(OT_QUAD, R_ESP);
3970
        } else if (s->ss32) {
3971
            gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3972
            gen_op_mov_reg_T0(OT_LONG, R_ESP);
3973
        } else {
3974
            gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
3975
            gen_op_mov_reg_T0(OT_WORD, R_ESP);
3976
        }
3977
        gen_pop_T0(s);
3978
        if (CODE64(s)) {
3979
            ot = dflag ? OT_QUAD : OT_WORD;
3980
        } else {
3981
            ot = dflag + OT_WORD;
3982
        }
3983
        gen_op_mov_reg_T0(ot, R_EBP);
3984
        gen_pop_update(s);
3985
        break;
3986
    case 0x06: /* push es */
3987
    case 0x0e: /* push cs */
3988
    case 0x16: /* push ss */
3989
    case 0x1e: /* push ds */
3990
        if (CODE64(s))
3991
            goto illegal_op;
3992
        gen_op_movl_T0_seg(b >> 3);
3993
        gen_push_T0(s);
3994
        break;
3995
    case 0x1a0: /* push fs */
3996
    case 0x1a8: /* push gs */
3997
        gen_op_movl_T0_seg((b >> 3) & 7);
3998
        gen_push_T0(s);
3999
        break;
4000
    case 0x07: /* pop es */
4001
    case 0x17: /* pop ss */
4002
    case 0x1f: /* pop ds */
4003
        if (CODE64(s))
4004
            goto illegal_op;
4005
        reg = b >> 3;
4006
        gen_pop_T0(s);
4007
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4008
        gen_pop_update(s);
4009
        if (reg == R_SS) {
4010
            /* if reg == SS, inhibit interrupts/trace. */
4011
            /* If several instructions disable interrupts, only the
4012
               _first_ does it */
4013
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4014
                gen_op_set_inhibit_irq();
4015
            s->tf = 0;
4016
        }
4017
        if (s->is_jmp) {
4018
            gen_jmp_im(s->pc - s->cs_base);
4019
            gen_eob(s);
4020
        }
4021
        break;
4022
    case 0x1a1: /* pop fs */
4023
    case 0x1a9: /* pop gs */
4024
        gen_pop_T0(s);
4025
        gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4026
        gen_pop_update(s);
4027
        if (s->is_jmp) {
4028
            gen_jmp_im(s->pc - s->cs_base);
4029
            gen_eob(s);
4030
        }
4031
        break;
4032

    
4033
        /**************************/
4034
        /* mov */
4035
    case 0x88:
4036
    case 0x89: /* mov Gv, Ev */
4037
        if ((b & 1) == 0)
4038
            ot = OT_BYTE;
4039
        else
4040
            ot = dflag + OT_WORD;
4041
        modrm = ldub_code(s->pc++);
4042
        reg = ((modrm >> 3) & 7) | rex_r;
4043

    
4044
        /* generate a generic store */
4045
        gen_ldst_modrm(s, modrm, ot, reg, 1);
4046
        break;
4047
    case 0xc6:
4048
    case 0xc7: /* mov Ev, Iv */
4049
        if ((b & 1) == 0)
4050
            ot = OT_BYTE;
4051
        else
4052
            ot = dflag + OT_WORD;
4053
        modrm = ldub_code(s->pc++);
4054
        mod = (modrm >> 6) & 3;
4055
        if (mod != 3) {
4056
            s->rip_offset = insn_const_size(ot);
4057
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4058
        }
4059
        val = insn_get(s, ot);
4060
        gen_op_movl_T0_im(val);
4061
        if (mod != 3)
4062
            gen_op_st_T0_A0(ot + s->mem_index);
4063
        else
4064
            gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4065
        break;
4066
    case 0x8a:
4067
    case 0x8b: /* mov Ev, Gv */
4068
        if ((b & 1) == 0)
4069
            ot = OT_BYTE;
4070
        else
4071
            ot = OT_WORD + dflag;
4072
        modrm = ldub_code(s->pc++);
4073
        reg = ((modrm >> 3) & 7) | rex_r;
4074

    
4075
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4076
        gen_op_mov_reg_T0(ot, reg);
4077
        break;
4078
    case 0x8e: /* mov seg, Gv */
4079
        modrm = ldub_code(s->pc++);
4080
        reg = (modrm >> 3) & 7;
4081
        if (reg >= 6 || reg == R_CS)
4082
            goto illegal_op;
4083
        gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4084
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4085
        if (reg == R_SS) {
4086
            /* if reg == SS, inhibit interrupts/trace */
4087
            /* If several instructions disable interrupts, only the
4088
               _first_ does it */
4089
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4090
                gen_op_set_inhibit_irq();
4091
            s->tf = 0;
4092
        }
4093
        if (s->is_jmp) {
4094
            gen_jmp_im(s->pc - s->cs_base);
4095
            gen_eob(s);
4096
        }
4097
        break;
4098
    case 0x8c: /* mov Gv, seg */
4099
        modrm = ldub_code(s->pc++);
4100
        reg = (modrm >> 3) & 7;
4101
        mod = (modrm >> 6) & 3;
4102
        if (reg >= 6)
4103
            goto illegal_op;
4104
        gen_op_movl_T0_seg(reg);
4105
        if (mod == 3)
4106
            ot = OT_WORD + dflag;
4107
        else
4108
            ot = OT_WORD;
4109
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4110
        break;
4111

    
4112
    case 0x1b6: /* movzbS Gv, Eb */
4113
    case 0x1b7: /* movzwS Gv, Eb */
4114
    case 0x1be: /* movsbS Gv, Eb */
4115
    case 0x1bf: /* movswS Gv, Eb */
4116
        {
4117
            int d_ot;
4118
            /* d_ot is the size of destination */
4119
            d_ot = dflag + OT_WORD;
4120
            /* ot is the size of source */
4121
            ot = (b & 1) + OT_BYTE;
4122
            modrm = ldub_code(s->pc++);
4123
            reg = ((modrm >> 3) & 7) | rex_r;
4124
            mod = (modrm >> 6) & 3;
4125
            rm = (modrm & 7) | REX_B(s);
4126

    
4127
            if (mod == 3) {
4128
                gen_op_mov_TN_reg(ot, 0, rm);
4129
                switch(ot | (b & 8)) {
4130
                case OT_BYTE:
4131
                    gen_op_movzbl_T0_T0();
4132
                    break;
4133
                case OT_BYTE | 8:
4134
                    gen_op_movsbl_T0_T0();
4135
                    break;
4136
                case OT_WORD:
4137
                    gen_op_movzwl_T0_T0();
4138
                    break;
4139
                default:
4140
                case OT_WORD | 8:
4141
                    gen_op_movswl_T0_T0();
4142
                    break;
4143
                }
4144
                gen_op_mov_reg_T0(d_ot, reg);
4145
            } else {
4146
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4147
                if (b & 8) {
4148
                    gen_op_lds_T0_A0(ot + s->mem_index);
4149
                } else {
4150
                    gen_op_ldu_T0_A0(ot + s->mem_index);
4151
                }
4152
                gen_op_mov_reg_T0(d_ot, reg);
4153
            }
4154
        }
4155
        break;
4156

    
4157
    case 0x8d: /* lea */
4158
        ot = dflag + OT_WORD;
4159
        modrm = ldub_code(s->pc++);
4160
        mod = (modrm >> 6) & 3;
4161
        if (mod == 3)
4162
            goto illegal_op;
4163
        reg = ((modrm >> 3) & 7) | rex_r;
4164
        /* we must ensure that no segment is added */
4165
        s->override = -1;
4166
        val = s->addseg;
4167
        s->addseg = 0;
4168
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4169
        s->addseg = val;
4170
        gen_op_mov_reg_A0(ot - OT_WORD, reg);
4171
        break;
4172

    
4173
    case 0xa0: /* mov EAX, Ov */
4174
    case 0xa1:
4175
    case 0xa2: /* mov Ov, EAX */
4176
    case 0xa3:
4177
        {
4178
            target_ulong offset_addr;
4179

    
4180
            if ((b & 1) == 0)
4181
                ot = OT_BYTE;
4182
            else
4183
                ot = dflag + OT_WORD;
4184
#ifdef TARGET_X86_64
4185
            if (s->aflag == 2) {
4186
                offset_addr = ldq_code(s->pc);
4187
                s->pc += 8;
4188
                gen_op_movq_A0_im(offset_addr);
4189
            } else
4190
#endif
4191
            {
4192
                if (s->aflag) {
4193
                    offset_addr = insn_get(s, OT_LONG);
4194
                } else {
4195
                    offset_addr = insn_get(s, OT_WORD);
4196
                }
4197
                gen_op_movl_A0_im(offset_addr);
4198
            }
4199
            gen_add_A0_ds_seg(s);
4200
            if ((b & 2) == 0) {
4201
                gen_op_ld_T0_A0(ot + s->mem_index);
4202
                gen_op_mov_reg_T0(ot, R_EAX);
4203
            } else {
4204
                gen_op_mov_TN_reg(ot, 0, R_EAX);
4205
                gen_op_st_T0_A0(ot + s->mem_index);
4206
            }
4207
        }
4208
        break;
4209
    case 0xd7: /* xlat */
4210
#ifdef TARGET_X86_64
4211
        if (s->aflag == 2) {
4212
            gen_op_movq_A0_reg(R_EBX);
4213
            gen_op_addq_A0_AL();
4214
        } else
4215
#endif
4216
        {
4217
            gen_op_movl_A0_reg(R_EBX);
4218
            gen_op_addl_A0_AL();
4219
            if (s->aflag == 0)
4220
                gen_op_andl_A0_ffff();
4221
        }
4222
        gen_add_A0_ds_seg(s);
4223
        gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4224
        gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4225
        break;
4226
    case 0xb0 ... 0xb7: /* mov R, Ib */
4227
        val = insn_get(s, OT_BYTE);
4228
        gen_op_movl_T0_im(val);
4229
        gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4230
        break;
4231
    case 0xb8 ... 0xbf: /* mov R, Iv */
4232
#ifdef TARGET_X86_64
4233
        if (dflag == 2) {
4234
            uint64_t tmp;
4235
            /* 64 bit case */
4236
            tmp = ldq_code(s->pc);
4237
            s->pc += 8;
4238
            reg = (b & 7) | REX_B(s);
4239
            gen_movtl_T0_im(tmp);
4240
            gen_op_mov_reg_T0(OT_QUAD, reg);
4241
        } else
4242
#endif
4243
        {
4244
            ot = dflag ? OT_LONG : OT_WORD;
4245
            val = insn_get(s, ot);
4246
            reg = (b & 7) | REX_B(s);
4247
            gen_op_movl_T0_im(val);
4248
            gen_op_mov_reg_T0(ot, reg);
4249
        }
4250
        break;
4251

    
4252
    case 0x91 ... 0x97: /* xchg R, EAX */
4253
        ot = dflag + OT_WORD;
4254
        reg = (b & 7) | REX_B(s);
4255
        rm = R_EAX;
4256
        goto do_xchg_reg;
4257
    case 0x86:
4258
    case 0x87: /* xchg Ev, Gv */
4259
        if ((b & 1) == 0)
4260
            ot = OT_BYTE;
4261
        else
4262
            ot = dflag + OT_WORD;
4263
        modrm = ldub_code(s->pc++);
4264
        reg = ((modrm >> 3) & 7) | rex_r;
4265
        mod = (modrm >> 6) & 3;
4266
        if (mod == 3) {
4267
            rm = (modrm & 7) | REX_B(s);
4268
        do_xchg_reg:
4269
            gen_op_mov_TN_reg(ot, 0, reg);
4270
            gen_op_mov_TN_reg(ot, 1, rm);
4271
            gen_op_mov_reg_T0(ot, rm);
4272
            gen_op_mov_reg_T1(ot, reg);
4273
        } else {
4274
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4275
            gen_op_mov_TN_reg(ot, 0, reg);
4276
            /* for xchg, lock is implicit */
4277
            if (!(prefixes & PREFIX_LOCK))
4278
                gen_op_lock();
4279
            gen_op_ld_T1_A0(ot + s->mem_index);
4280
            gen_op_st_T0_A0(ot + s->mem_index);
4281
            if (!(prefixes & PREFIX_LOCK))
4282
                gen_op_unlock();
4283
            gen_op_mov_reg_T1(ot, reg);
4284
        }
4285
        break;
4286
    case 0xc4: /* les Gv */
4287
        if (CODE64(s))
4288
            goto illegal_op;
4289
        op = R_ES;
4290
        goto do_lxx;
4291
    case 0xc5: /* lds Gv */
4292
        if (CODE64(s))
4293
            goto illegal_op;
4294
        op = R_DS;
4295
        goto do_lxx;
4296
    case 0x1b2: /* lss Gv */
4297
        op = R_SS;
4298
        goto do_lxx;
4299
    case 0x1b4: /* lfs Gv */
4300
        op = R_FS;
4301
        goto do_lxx;
4302
    case 0x1b5: /* lgs Gv */
4303
        op = R_GS;
4304
    do_lxx:
4305
        ot = dflag ? OT_LONG : OT_WORD;
4306
        modrm = ldub_code(s->pc++);
4307
        reg = ((modrm >> 3) & 7) | rex_r;
4308
        mod = (modrm >> 6) & 3;
4309
        if (mod == 3)
4310
            goto illegal_op;
4311
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4312
        gen_op_ld_T1_A0(ot + s->mem_index);
4313
        gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4314
        /* load the segment first to handle exceptions properly */
4315
        gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4316
        gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4317
        /* then put the data */
4318
        gen_op_mov_reg_T1(ot, reg);
4319
        if (s->is_jmp) {
4320
            gen_jmp_im(s->pc - s->cs_base);
4321
            gen_eob(s);
4322
        }
4323
        break;
4324

    
4325
        /************************/
4326
        /* shifts */
4327
    case 0xc0:
4328
    case 0xc1:
4329
        /* shift Ev,Ib */
4330
        shift = 2;
4331
    grp2:
4332
        {
4333
            if ((b & 1) == 0)
4334
                ot = OT_BYTE;
4335
            else
4336
                ot = dflag + OT_WORD;
4337

    
4338
            modrm = ldub_code(s->pc++);
4339
            mod = (modrm >> 6) & 3;
4340
            op = (modrm >> 3) & 7;
4341

    
4342
            if (mod != 3) {
4343
                if (shift == 2) {
4344
                    s->rip_offset = 1;
4345
                }
4346
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4347
                opreg = OR_TMP0;
4348
            } else {
4349
                opreg = (modrm & 7) | REX_B(s);
4350
            }
4351

    
4352
            /* simpler op */
4353
            if (shift == 0) {
4354
                gen_shift(s, op, ot, opreg, OR_ECX);
4355
            } else {
4356
                if (shift == 2) {
4357
                    shift = ldub_code(s->pc++);
4358
                }
4359
                gen_shifti(s, op, ot, opreg, shift);
4360
            }
4361
        }
4362
        break;
4363
    case 0xd0:
4364
    case 0xd1:
4365
        /* shift Ev,1 */
4366
        shift = 1;
4367
        goto grp2;
4368
    case 0xd2:
4369
    case 0xd3:
4370
        /* shift Ev,cl */
4371
        shift = 0;
4372
        goto grp2;
4373

    
4374
    case 0x1a4: /* shld imm */
4375
        op = 0;
4376
        shift = 1;
4377
        goto do_shiftd;
4378
    case 0x1a5: /* shld cl */
4379
        op = 0;
4380
        shift = 0;
4381
        goto do_shiftd;
4382
    case 0x1ac: /* shrd imm */
4383
        op = 1;
4384
        shift = 1;
4385
        goto do_shiftd;
4386
    case 0x1ad: /* shrd cl */
4387
        op = 1;
4388
        shift = 0;
4389
    do_shiftd:
4390
        ot = dflag + OT_WORD;
4391
        modrm = ldub_code(s->pc++);
4392
        mod = (modrm >> 6) & 3;
4393
        rm = (modrm & 7) | REX_B(s);
4394
        reg = ((modrm >> 3) & 7) | rex_r;
4395

    
4396
        if (mod != 3) {
4397
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4398
            gen_op_ld_T0_A0(ot + s->mem_index);
4399
        } else {
4400
            gen_op_mov_TN_reg(ot, 0, rm);
4401
        }
4402
        gen_op_mov_TN_reg(ot, 1, reg);
4403

    
4404
        if (shift) {
4405
            val = ldub_code(s->pc++);
4406
            if (ot == OT_QUAD)
4407
                val &= 0x3f;
4408
            else
4409
                val &= 0x1f;
4410
            if (val) {
4411
                if (mod == 3)
4412
                    gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4413
                else
4414
                    gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4415
                if (op == 0 && ot != OT_WORD)
4416
                    s->cc_op = CC_OP_SHLB + ot;
4417
                else
4418
                    s->cc_op = CC_OP_SARB + ot;
4419
            }
4420
        } else {
4421
            if (s->cc_op != CC_OP_DYNAMIC)
4422
                gen_op_set_cc_op(s->cc_op);
4423
            if (mod == 3)
4424
                gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4425
            else
4426
                gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4427
            s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4428
        }
4429
        if (mod == 3) {
4430
            gen_op_mov_reg_T0(ot, rm);
4431
        }
4432
        break;
4433

    
4434
        /************************/
4435
        /* floats */
4436
    case 0xd8 ... 0xdf:
4437
        if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4438
            /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4439
            /* XXX: what to do if illegal op ? */
4440
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4441
            break;
4442
        }
4443
        modrm = ldub_code(s->pc++);
4444
        mod = (modrm >> 6) & 3;
4445
        rm = modrm & 7;
4446
        op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4447
        if (mod != 3) {
4448
            /* memory op */
4449
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4450
            switch(op) {
4451
            case 0x00 ... 0x07: /* fxxxs */
4452
            case 0x10 ... 0x17: /* fixxxl */
4453
            case 0x20 ... 0x27: /* fxxxl */
4454
            case 0x30 ... 0x37: /* fixxx */
4455
                {
4456
                    int op1;
4457
                    op1 = op & 7;
4458

    
4459
                    switch(op >> 4) {
4460
                    case 0:
4461
                        gen_op_flds_FT0_A0();
4462
                        break;
4463
                    case 1:
4464
                        gen_op_fildl_FT0_A0();
4465
                        break;
4466
                    case 2:
4467
                        gen_op_fldl_FT0_A0();
4468
                        break;
4469
                    case 3:
4470
                    default:
4471
                        gen_op_fild_FT0_A0();
4472
                        break;
4473
                    }
4474

    
4475
                    gen_op_fp_arith_ST0_FT0[op1]();
4476
                    if (op1 == 3) {
4477
                        /* fcomp needs pop */
4478
                        gen_op_fpop();
4479
                    }
4480
                }
4481
                break;
4482
            case 0x08: /* flds */
4483
            case 0x0a: /* fsts */
4484
            case 0x0b: /* fstps */
4485
            case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4486
            case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4487
            case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4488
                switch(op & 7) {
4489
                case 0:
4490
                    switch(op >> 4) {
4491
                    case 0:
4492
                        gen_op_flds_ST0_A0();
4493
                        break;
4494
                    case 1:
4495
                        gen_op_fildl_ST0_A0();
4496
                        break;
4497
                    case 2:
4498
                        gen_op_fldl_ST0_A0();
4499
                        break;
4500
                    case 3:
4501
                    default:
4502
                        gen_op_fild_ST0_A0();
4503
                        break;
4504
                    }
4505
                    break;
4506
                case 1:
4507
                    switch(op >> 4) {
4508
                    case 1:
4509
                        gen_op_fisttl_ST0_A0();
4510
                        break;
4511
                    case 2:
4512
                        gen_op_fisttll_ST0_A0();
4513
                        break;
4514
                    case 3:
4515
                    default:
4516
                        gen_op_fistt_ST0_A0();
4517
                    }
4518
                    gen_op_fpop();
4519
                    break;
4520
                default:
4521
                    switch(op >> 4) {
4522
                    case 0:
4523
                        gen_op_fsts_ST0_A0();
4524
                        break;
4525
                    case 1:
4526
                        gen_op_fistl_ST0_A0();
4527
                        break;
4528
                    case 2:
4529
                        gen_op_fstl_ST0_A0();
4530
                        break;
4531
                    case 3:
4532
                    default:
4533
                        gen_op_fist_ST0_A0();
4534
                        break;
4535
                    }
4536
                    if ((op & 7) == 3)
4537
                        gen_op_fpop();
4538
                    break;
4539
                }
4540
                break;
4541
            case 0x0c: /* fldenv mem */
4542
                gen_op_fldenv_A0(s->dflag);
4543
                break;
4544
            case 0x0d: /* fldcw mem */
4545
                gen_op_fldcw_A0();
4546
                break;
4547
            case 0x0e: /* fnstenv mem */
4548
                gen_op_fnstenv_A0(s->dflag);
4549
                break;
4550
            case 0x0f: /* fnstcw mem */
4551
                gen_op_fnstcw_A0();
4552
                break;
4553
            case 0x1d: /* fldt mem */
4554
                gen_op_fldt_ST0_A0();
4555
                break;
4556
            case 0x1f: /* fstpt mem */
4557
                gen_op_fstt_ST0_A0();
4558
                gen_op_fpop();
4559
                break;
4560
            case 0x2c: /* frstor mem */
4561
                gen_op_frstor_A0(s->dflag);
4562
                break;
4563
            case 0x2e: /* fnsave mem */
4564
                gen_op_fnsave_A0(s->dflag);
4565
                break;
4566
            case 0x2f: /* fnstsw mem */
4567
                gen_op_fnstsw_A0();
4568
                break;
4569
            case 0x3c: /* fbld */
4570
                gen_op_fbld_ST0_A0();
4571
                break;
4572
            case 0x3e: /* fbstp */
4573
                gen_op_fbst_ST0_A0();
4574
                gen_op_fpop();
4575
                break;
4576
            case 0x3d: /* fildll */
4577
                gen_op_fildll_ST0_A0();
4578
                break;
4579
            case 0x3f: /* fistpll */
4580
                gen_op_fistll_ST0_A0();
4581
                gen_op_fpop();
4582
                break;
4583
            default:
4584
                goto illegal_op;
4585
            }
4586
        } else {
4587
            /* register float ops */
4588
            opreg = rm;
4589

    
4590
            switch(op) {
4591
            case 0x08: /* fld sti */
4592
                gen_op_fpush();
4593
                gen_op_fmov_ST0_STN((opreg + 1) & 7);
4594
                break;
4595
            case 0x09: /* fxchg sti */
4596
            case 0x29: /* fxchg4 sti, undocumented op */
4597
            case 0x39: /* fxchg7 sti, undocumented op */
4598
                gen_op_fxchg_ST0_STN(opreg);
4599
                break;
4600
            case 0x0a: /* grp d9/2 */
4601
                switch(rm) {
4602
                case 0: /* fnop */
4603
                    /* check exceptions (FreeBSD FPU probe) */
4604
                    if (s->cc_op != CC_OP_DYNAMIC)
4605
                        gen_op_set_cc_op(s->cc_op);
4606
                    gen_jmp_im(pc_start - s->cs_base);
4607
                    gen_op_fwait();
4608
                    break;
4609
                default:
4610
                    goto illegal_op;
4611
                }
4612
                break;
4613
            case 0x0c: /* grp d9/4 */
4614
                switch(rm) {
4615
                case 0: /* fchs */
4616
                    gen_op_fchs_ST0();
4617
                    break;
4618
                case 1: /* fabs */
4619
                    gen_op_fabs_ST0();
4620
                    break;
4621
                case 4: /* ftst */
4622
                    gen_op_fldz_FT0();
4623
                    gen_op_fcom_ST0_FT0();
4624
                    break;
4625
                case 5: /* fxam */
4626
                    gen_op_fxam_ST0();
4627
                    break;
4628
                default:
4629
                    goto illegal_op;
4630
                }
4631
                break;
4632
            case 0x0d: /* grp d9/5 */
4633
                {
4634
                    switch(rm) {
4635
                    case 0:
4636
                        gen_op_fpush();
4637
                        gen_op_fld1_ST0();
4638
                        break;
4639
                    case 1:
4640
                        gen_op_fpush();
4641
                        gen_op_fldl2t_ST0();
4642
                        break;
4643
                    case 2:
4644
                        gen_op_fpush();
4645
                        gen_op_fldl2e_ST0();
4646
                        break;
4647
                    case 3:
4648
                        gen_op_fpush();
4649
                        gen_op_fldpi_ST0();
4650
                        break;
4651
                    case 4:
4652
                        gen_op_fpush();
4653
                        gen_op_fldlg2_ST0();
4654
                        break;
4655
                    case 5:
4656
                        gen_op_fpush();
4657
                        gen_op_fldln2_ST0();
4658
                        break;
4659
                    case 6:
4660
                        gen_op_fpush();
4661
                        gen_op_fldz_ST0();
4662
                        break;
4663
                    default:
4664
                        goto illegal_op;
4665
                    }
4666
                }
4667
                break;
4668
            case 0x0e: /* grp d9/6 */
4669
                switch(rm) {
4670
                case 0: /* f2xm1 */
4671
                    gen_op_f2xm1();
4672
                    break;
4673
                case 1: /* fyl2x */
4674
                    gen_op_fyl2x();
4675
                    break;
4676
                case 2: /* fptan */
4677
                    gen_op_fptan();
4678
                    break;
4679
                case 3: /* fpatan */
4680
                    gen_op_fpatan();
4681
                    break;
4682
                case 4: /* fxtract */
4683
                    gen_op_fxtract();
4684
                    break;
4685
                case 5: /* fprem1 */
4686
                    gen_op_fprem1();
4687
                    break;
4688
                case 6: /* fdecstp */
4689
                    gen_op_fdecstp();
4690
                    break;
4691
                default:
4692
                case 7: /* fincstp */
4693
                    gen_op_fincstp();
4694
                    break;
4695
                }
4696
                break;
4697
            case 0x0f: /* grp d9/7 */
4698
                switch(rm) {
4699
                case 0: /* fprem */
4700
                    gen_op_fprem();
4701
                    break;
4702
                case 1: /* fyl2xp1 */
4703
                    gen_op_fyl2xp1();
4704
                    break;
4705
                case 2: /* fsqrt */
4706
                    gen_op_fsqrt();
4707
                    break;
4708
                case 3: /* fsincos */
4709
                    gen_op_fsincos();
4710
                    break;
4711
                case 5: /* fscale */
4712
                    gen_op_fscale();
4713
                    break;
4714
                case 4: /* frndint */
4715
                    gen_op_frndint();
4716
                    break;
4717
                case 6: /* fsin */
4718
                    gen_op_fsin();
4719
                    break;
4720
                default:
4721
                case 7: /* fcos */
4722
                    gen_op_fcos();
4723
                    break;
4724
                }
4725
                break;
4726
            case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4727
            case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4728
            case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4729
                {
4730
                    int op1;
4731

    
4732
                    op1 = op & 7;
4733
                    if (op >= 0x20) {
4734
                        gen_op_fp_arith_STN_ST0[op1](opreg);
4735
                        if (op >= 0x30)
4736
                            gen_op_fpop();
4737
                    } else {
4738
                        gen_op_fmov_FT0_STN(opreg);
4739
                        gen_op_fp_arith_ST0_FT0[op1]();
4740
                    }
4741
                }
4742
                break;
4743
            case 0x02: /* fcom */
4744
            case 0x22: /* fcom2, undocumented op */
4745
                gen_op_fmov_FT0_STN(opreg);
4746
                gen_op_fcom_ST0_FT0();
4747
                break;
4748
            case 0x03: /* fcomp */
4749
            case 0x23: /* fcomp3, undocumented op */
4750
            case 0x32: /* fcomp5, undocumented op */
4751
                gen_op_fmov_FT0_STN(opreg);
4752
                gen_op_fcom_ST0_FT0();
4753
                gen_op_fpop();
4754
                break;
4755
            case 0x15: /* da/5 */
4756
                switch(rm) {
4757
                case 1: /* fucompp */
4758
                    gen_op_fmov_FT0_STN(1);
4759
                    gen_op_fucom_ST0_FT0();
4760
                    gen_op_fpop();
4761
                    gen_op_fpop();
4762
                    break;
4763
                default:
4764
                    goto illegal_op;
4765
                }
4766
                break;
4767
            case 0x1c:
4768
                switch(rm) {
4769
                case 0: /* feni (287 only, just do nop here) */
4770
                    break;
4771
                case 1: /* fdisi (287 only, just do nop here) */
4772
                    break;
4773
                case 2: /* fclex */
4774
                    gen_op_fclex();
4775
                    break;
4776
                case 3: /* fninit */
4777
                    gen_op_fninit();
4778
                    break;
4779
                case 4: /* fsetpm (287 only, just do nop here) */
4780
                    break;
4781
                default:
4782
                    goto illegal_op;
4783
                }
4784
                break;
4785
            case 0x1d: /* fucomi */
4786
                if (s->cc_op != CC_OP_DYNAMIC)
4787
                    gen_op_set_cc_op(s->cc_op);
4788
                gen_op_fmov_FT0_STN(opreg);
4789
                gen_op_fucomi_ST0_FT0();
4790
                s->cc_op = CC_OP_EFLAGS;
4791
                break;
4792
            case 0x1e: /* fcomi */
4793
                if (s->cc_op != CC_OP_DYNAMIC)
4794
                    gen_op_set_cc_op(s->cc_op);
4795
                gen_op_fmov_FT0_STN(opreg);
4796
                gen_op_fcomi_ST0_FT0();
4797
                s->cc_op = CC_OP_EFLAGS;
4798
                break;
4799
            case 0x28: /* ffree sti */
4800
                gen_op_ffree_STN(opreg);
4801
                break;
4802
            case 0x2a: /* fst sti */
4803
                gen_op_fmov_STN_ST0(opreg);
4804
                break;
4805
            case 0x2b: /* fstp sti */
4806
            case 0x0b: /* fstp1 sti, undocumented op */
4807
            case 0x3a: /* fstp8 sti, undocumented op */
4808
            case 0x3b: /* fstp9 sti, undocumented op */
4809
                gen_op_fmov_STN_ST0(opreg);
4810
                gen_op_fpop();
4811
                break;
4812
            case 0x2c: /* fucom st(i) */
4813
                gen_op_fmov_FT0_STN(opreg);
4814
                gen_op_fucom_ST0_FT0();
4815
                break;
4816
            case 0x2d: /* fucomp st(i) */
4817
                gen_op_fmov_FT0_STN(opreg);
4818
                gen_op_fucom_ST0_FT0();
4819
                gen_op_fpop();
4820
                break;
4821
            case 0x33: /* de/3 */
4822
                switch(rm) {
4823
                case 1: /* fcompp */
4824
                    gen_op_fmov_FT0_STN(1);
4825
                    gen_op_fcom_ST0_FT0();
4826
                    gen_op_fpop();
4827
                    gen_op_fpop();
4828
                    break;
4829
                default:
4830
                    goto illegal_op;
4831
                }
4832
                break;
4833
            case 0x38: /* ffreep sti, undocumented op */
4834
                gen_op_ffree_STN(opreg);
4835
                gen_op_fpop();
4836
                break;
4837
            case 0x3c: /* df/4 */
4838
                switch(rm) {
4839
                case 0:
4840
                    gen_op_fnstsw_EAX();
4841
                    break;
4842
                default:
4843
                    goto illegal_op;
4844
                }
4845
                break;
4846
            case 0x3d: /* fucomip */
4847
                if (s->cc_op != CC_OP_DYNAMIC)
4848
                    gen_op_set_cc_op(s->cc_op);
4849
                gen_op_fmov_FT0_STN(opreg);
4850
                gen_op_fucomi_ST0_FT0();
4851
                gen_op_fpop();
4852
                s->cc_op = CC_OP_EFLAGS;
4853
                break;
4854
            case 0x3e: /* fcomip */
4855
                if (s->cc_op != CC_OP_DYNAMIC)
4856
                    gen_op_set_cc_op(s->cc_op);
4857
                gen_op_fmov_FT0_STN(opreg);
4858
                gen_op_fcomi_ST0_FT0();
4859
                gen_op_fpop();
4860
                s->cc_op = CC_OP_EFLAGS;
4861
                break;
4862
            case 0x10 ... 0x13: /* fcmovxx */
4863
            case 0x18 ... 0x1b:
4864
                {
4865
                    int op1;
4866
                    const static uint8_t fcmov_cc[8] = {
4867
                        (JCC_B << 1),
4868
                        (JCC_Z << 1),
4869
                        (JCC_BE << 1),
4870
                        (JCC_P << 1),
4871
                    };
4872
                    op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
4873
                    gen_setcc(s, op1);
4874
                    gen_op_fcmov_ST0_STN_T0(opreg);
4875
                }
4876
                break;
4877
            default:
4878
                goto illegal_op;
4879
            }
4880
        }
4881
        break;
4882
        /************************/
4883
        /* string ops */
4884

    
4885
    case 0xa4: /* movsS */
4886
    case 0xa5:
4887
        if ((b & 1) == 0)
4888
            ot = OT_BYTE;
4889
        else
4890
            ot = dflag + OT_WORD;
4891

    
4892
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4893
            gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4894
        } else {
4895
            gen_movs(s, ot);
4896
        }
4897
        break;
4898

    
4899
    case 0xaa: /* stosS */
4900
    case 0xab:
4901
        if ((b & 1) == 0)
4902
            ot = OT_BYTE;
4903
        else
4904
            ot = dflag + OT_WORD;
4905

    
4906
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4907
            gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4908
        } else {
4909
            gen_stos(s, ot);
4910
        }
4911
        break;
4912
    case 0xac: /* lodsS */
4913
    case 0xad:
4914
        if ((b & 1) == 0)
4915
            ot = OT_BYTE;
4916
        else
4917
            ot = dflag + OT_WORD;
4918
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4919
            gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4920
        } else {
4921
            gen_lods(s, ot);
4922
        }
4923
        break;
4924
    case 0xae: /* scasS */
4925
    case 0xaf:
4926
        if ((b & 1) == 0)
4927
            ot = OT_BYTE;
4928
        else
4929
            ot = dflag + OT_WORD;
4930
        if (prefixes & PREFIX_REPNZ) {
4931
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4932
        } else if (prefixes & PREFIX_REPZ) {
4933
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4934
        } else {
4935
            gen_scas(s, ot);
4936
            s->cc_op = CC_OP_SUBB + ot;
4937
        }
4938
        break;
4939

    
4940
    case 0xa6: /* cmpsS */
4941
    case 0xa7:
4942
        if ((b & 1) == 0)
4943
            ot = OT_BYTE;
4944
        else
4945
            ot = dflag + OT_WORD;
4946
        if (prefixes & PREFIX_REPNZ) {
4947
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4948
        } else if (prefixes & PREFIX_REPZ) {
4949
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4950
        } else {
4951
            gen_cmps(s, ot);
4952
            s->cc_op = CC_OP_SUBB + ot;
4953
        }
4954
        break;
4955
    case 0x6c: /* insS */
4956
    case 0x6d:
4957
        if ((b & 1) == 0)
4958
            ot = OT_BYTE;
4959
        else
4960
            ot = dflag ? OT_LONG : OT_WORD;
4961
        gen_check_io(s, ot, 1, pc_start - s->cs_base);
4962
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
4963
        gen_op_andl_T0_ffff();
4964
        if (gen_svm_check_io(s, pc_start,
4965
                             SVM_IOIO_TYPE_MASK | (1 << (4+ot)) |
4966
                             svm_is_rep(prefixes) | 4 | (1 << (7+s->aflag))))
4967
            break;
4968
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4969
            gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4970
        } else {
4971
            gen_ins(s, ot);
4972
        }
4973
        break;
4974
    case 0x6e: /* outsS */
4975
    case 0x6f:
4976
        if ((b & 1) == 0)
4977
            ot = OT_BYTE;
4978
        else
4979
            ot = dflag ? OT_LONG : OT_WORD;
4980
        gen_check_io(s, ot, 1, pc_start - s->cs_base);
4981
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
4982
        gen_op_andl_T0_ffff();
4983
        if (gen_svm_check_io(s, pc_start,
4984
                             (1 << (4+ot)) | svm_is_rep(prefixes) |
4985
                             4 | (1 << (7+s->aflag))))
4986
            break;
4987
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4988
            gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4989
        } else {
4990
            gen_outs(s, ot);
4991
        }
4992
        break;
4993

    
4994
        /************************/
4995
        /* port I/O */
4996

    
4997
    case 0xe4:
4998
    case 0xe5:
4999
        if ((b & 1) == 0)
5000
            ot = OT_BYTE;
5001
        else
5002
            ot = dflag ? OT_LONG : OT_WORD;
5003
        val = ldub_code(s->pc++);
5004
        gen_op_movl_T0_im(val);
5005
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5006
        if (gen_svm_check_io(s, pc_start,
5007
                             SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5008
                             (1 << (4+ot))))
5009
            break;
5010
        gen_op_in[ot]();
5011
        gen_op_mov_reg_T1(ot, R_EAX);
5012
        break;
5013
    case 0xe6:
5014
    case 0xe7:
5015
        if ((b & 1) == 0)
5016
            ot = OT_BYTE;
5017
        else
5018
            ot = dflag ? OT_LONG : OT_WORD;
5019
        val = ldub_code(s->pc++);
5020
        gen_op_movl_T0_im(val);
5021
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5022
        if (gen_svm_check_io(s, pc_start, svm_is_rep(prefixes) |
5023
                             (1 << (4+ot))))
5024
            break;
5025
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5026
        gen_op_out[ot]();
5027
        break;
5028
    case 0xec:
5029
    case 0xed:
5030
        if ((b & 1) == 0)
5031
            ot = OT_BYTE;
5032
        else
5033
            ot = dflag ? OT_LONG : OT_WORD;
5034
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5035
        gen_op_andl_T0_ffff();
5036
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5037
        if (gen_svm_check_io(s, pc_start,
5038
                             SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5039
                             (1 << (4+ot))))
5040
            break;
5041
        gen_op_in[ot]();
5042
        gen_op_mov_reg_T1(ot, R_EAX);
5043
        break;
5044
    case 0xee:
5045
    case 0xef:
5046
        if ((b & 1) == 0)
5047
            ot = OT_BYTE;
5048
        else
5049
            ot = dflag ? OT_LONG : OT_WORD;
5050
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5051
        gen_op_andl_T0_ffff();
5052
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5053
        if (gen_svm_check_io(s, pc_start,
5054
                             svm_is_rep(prefixes) | (1 << (4+ot))))
5055
            break;
5056
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5057
        gen_op_out[ot]();
5058
        break;
5059

    
5060
        /************************/
5061
        /* control */
5062
    case 0xc2: /* ret im */
5063
        val = ldsw_code(s->pc);
5064
        s->pc += 2;
5065
        gen_pop_T0(s);
5066
        if (CODE64(s) && s->dflag)
5067
            s->dflag = 2;
5068
        gen_stack_update(s, val + (2 << s->dflag));
5069
        if (s->dflag == 0)
5070
            gen_op_andl_T0_ffff();
5071
        gen_op_jmp_T0();
5072
        gen_eob(s);
5073
        break;
5074
    case 0xc3: /* ret */
5075
        gen_pop_T0(s);
5076
        gen_pop_update(s);
5077
        if (s->dflag == 0)
5078
            gen_op_andl_T0_ffff();
5079
        gen_op_jmp_T0();
5080
        gen_eob(s);
5081
        break;
5082
    case 0xca: /* lret im */
5083
        val = ldsw_code(s->pc);
5084
        s->pc += 2;
5085
    do_lret:
5086
        if (s->pe && !s->vm86) {
5087
            if (s->cc_op != CC_OP_DYNAMIC)
5088
                gen_op_set_cc_op(s->cc_op);
5089
            gen_jmp_im(pc_start - s->cs_base);
5090
            gen_op_lret_protected(s->dflag, val);
5091
        } else {
5092
            gen_stack_A0(s);
5093
            /* pop offset */
5094
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5095
            if (s->dflag == 0)
5096
                gen_op_andl_T0_ffff();
5097
            /* NOTE: keeping EIP updated is not a problem in case of
5098
               exception */
5099
            gen_op_jmp_T0();
5100
            /* pop selector */
5101
            gen_op_addl_A0_im(2 << s->dflag);
5102
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5103
            gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5104
            /* add stack offset */
5105
            gen_stack_update(s, val + (4 << s->dflag));
5106
        }
5107
        gen_eob(s);
5108
        break;
5109
    case 0xcb: /* lret */
5110
        val = 0;
5111
        goto do_lret;
5112
    case 0xcf: /* iret */
5113
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5114
            break;
5115
        if (!s->pe) {
5116
            /* real mode */
5117
            gen_op_iret_real(s->dflag);
5118
            s->cc_op = CC_OP_EFLAGS;
5119
        } else if (s->vm86) {
5120
            if (s->iopl != 3) {
5121
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5122
            } else {
5123
                gen_op_iret_real(s->dflag);
5124
                s->cc_op = CC_OP_EFLAGS;
5125
            }
5126
        } else {
5127
            if (s->cc_op != CC_OP_DYNAMIC)
5128
                gen_op_set_cc_op(s->cc_op);
5129
            gen_jmp_im(pc_start - s->cs_base);
5130
            gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
5131
            s->cc_op = CC_OP_EFLAGS;
5132
        }
5133
        gen_eob(s);
5134
        break;
5135
    case 0xe8: /* call im */
5136
        {
5137
            if (dflag)
5138
                tval = (int32_t)insn_get(s, OT_LONG);
5139
            else
5140
                tval = (int16_t)insn_get(s, OT_WORD);
5141
            next_eip = s->pc - s->cs_base;
5142
            tval += next_eip;
5143
            if (s->dflag == 0)
5144
                tval &= 0xffff;
5145
            gen_movtl_T0_im(next_eip);
5146
            gen_push_T0(s);
5147
            gen_jmp(s, tval);
5148
        }
5149
        break;
5150
    case 0x9a: /* lcall im */
5151
        {
5152
            unsigned int selector, offset;
5153

    
5154
            if (CODE64(s))
5155
                goto illegal_op;
5156
            ot = dflag ? OT_LONG : OT_WORD;
5157
            offset = insn_get(s, ot);
5158
            selector = insn_get(s, OT_WORD);
5159

    
5160
            gen_op_movl_T0_im(selector);
5161
            gen_op_movl_T1_imu(offset);
5162
        }
5163
        goto do_lcall;
5164
    case 0xe9: /* jmp im */
5165
        if (dflag)
5166
            tval = (int32_t)insn_get(s, OT_LONG);
5167
        else
5168
            tval = (int16_t)insn_get(s, OT_WORD);
5169
        tval += s->pc - s->cs_base;
5170
        if (s->dflag == 0)
5171
            tval &= 0xffff;
5172
        gen_jmp(s, tval);
5173
        break;
5174
    case 0xea: /* ljmp im */
5175
        {
5176
            unsigned int selector, offset;
5177

    
5178
            if (CODE64(s))
5179
                goto illegal_op;
5180
            ot = dflag ? OT_LONG : OT_WORD;
5181
            offset = insn_get(s, ot);
5182
            selector = insn_get(s, OT_WORD);
5183

    
5184
            gen_op_movl_T0_im(selector);
5185
            gen_op_movl_T1_imu(offset);
5186
        }
5187
        goto do_ljmp;
5188
    case 0xeb: /* jmp Jb */
5189
        tval = (int8_t)insn_get(s, OT_BYTE);
5190
        tval += s->pc - s->cs_base;
5191
        if (s->dflag == 0)
5192
            tval &= 0xffff;
5193
        gen_jmp(s, tval);
5194
        break;
5195
    case 0x70 ... 0x7f: /* jcc Jb */
5196
        tval = (int8_t)insn_get(s, OT_BYTE);
5197
        goto do_jcc;
5198
    case 0x180 ... 0x18f: /* jcc Jv */
5199
        if (dflag) {
5200
            tval = (int32_t)insn_get(s, OT_LONG);
5201
        } else {
5202
            tval = (int16_t)insn_get(s, OT_WORD);
5203
        }
5204
    do_jcc:
5205
        next_eip = s->pc - s->cs_base;
5206
        tval += next_eip;
5207
        if (s->dflag == 0)
5208
            tval &= 0xffff;
5209
        gen_jcc(s, b, tval, next_eip);
5210
        break;
5211

    
5212
    case 0x190 ... 0x19f: /* setcc Gv */
5213
        modrm = ldub_code(s->pc++);
5214
        gen_setcc(s, b);
5215
        gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5216
        break;
5217
    case 0x140 ... 0x14f: /* cmov Gv, Ev */
5218
        ot = dflag + OT_WORD;
5219
        modrm = ldub_code(s->pc++);
5220
        reg = ((modrm >> 3) & 7) | rex_r;
5221
        mod = (modrm >> 6) & 3;
5222
        gen_setcc(s, b);
5223
        if (mod != 3) {
5224
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5225
            gen_op_ld_T1_A0(ot + s->mem_index);
5226
        } else {
5227
            rm = (modrm & 7) | REX_B(s);
5228
            gen_op_mov_TN_reg(ot, 1, rm);
5229
        }
5230
        gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5231
        break;
5232

    
5233
        /************************/
5234
        /* flags */
5235
    case 0x9c: /* pushf */
5236
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5237
            break;
5238
        if (s->vm86 && s->iopl != 3) {
5239
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5240
        } else {
5241
            if (s->cc_op != CC_OP_DYNAMIC)
5242
                gen_op_set_cc_op(s->cc_op);
5243
            gen_op_movl_T0_eflags();
5244
            gen_push_T0(s);
5245
        }
5246
        break;
5247
    case 0x9d: /* popf */
5248
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5249
            break;
5250
        if (s->vm86 && s->iopl != 3) {
5251
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5252
        } else {
5253
            gen_pop_T0(s);
5254
            if (s->cpl == 0) {
5255
                if (s->dflag) {
5256
                    gen_op_movl_eflags_T0_cpl0();
5257
                } else {
5258
                    gen_op_movw_eflags_T0_cpl0();
5259
                }
5260
            } else {
5261
                if (s->cpl <= s->iopl) {
5262
                    if (s->dflag) {
5263
                        gen_op_movl_eflags_T0_io();
5264
                    } else {
5265
                        gen_op_movw_eflags_T0_io();
5266
                    }
5267
                } else {
5268
                    if (s->dflag) {
5269
                        gen_op_movl_eflags_T0();
5270
                    } else {
5271
                        gen_op_movw_eflags_T0();
5272
                    }
5273
                }
5274
            }
5275
            gen_pop_update(s);
5276
            s->cc_op = CC_OP_EFLAGS;
5277
            /* abort translation because TF flag may change */
5278
            gen_jmp_im(s->pc - s->cs_base);
5279
            gen_eob(s);
5280
        }
5281
        break;
5282
    case 0x9e: /* sahf */
5283
        if (CODE64(s))
5284
            goto illegal_op;
5285
        gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5286
        if (s->cc_op != CC_OP_DYNAMIC)
5287
            gen_op_set_cc_op(s->cc_op);
5288
        gen_op_movb_eflags_T0();
5289
        s->cc_op = CC_OP_EFLAGS;
5290
        break;
5291
    case 0x9f: /* lahf */
5292
        if (CODE64(s))
5293
            goto illegal_op;
5294
        if (s->cc_op != CC_OP_DYNAMIC)
5295
            gen_op_set_cc_op(s->cc_op);
5296
        gen_op_movl_T0_eflags();
5297
        gen_op_mov_reg_T0(OT_BYTE, R_AH);
5298
        break;
5299
    case 0xf5: /* cmc */
5300
        if (s->cc_op != CC_OP_DYNAMIC)
5301
            gen_op_set_cc_op(s->cc_op);
5302
        gen_op_cmc();
5303
        s->cc_op = CC_OP_EFLAGS;
5304
        break;
5305
    case 0xf8: /* clc */
5306
        if (s->cc_op != CC_OP_DYNAMIC)
5307
            gen_op_set_cc_op(s->cc_op);
5308
        gen_op_clc();
5309
        s->cc_op = CC_OP_EFLAGS;
5310
        break;
5311
    case 0xf9: /* stc */
5312
        if (s->cc_op != CC_OP_DYNAMIC)
5313
            gen_op_set_cc_op(s->cc_op);
5314
        gen_op_stc();
5315
        s->cc_op = CC_OP_EFLAGS;
5316
        break;
5317
    case 0xfc: /* cld */
5318
        gen_op_cld();
5319
        break;
5320
    case 0xfd: /* std */
5321
        gen_op_std();
5322
        break;
5323

    
5324
        /************************/
5325
        /* bit operations */
5326
    case 0x1ba: /* bt/bts/btr/btc Gv, im */
5327
        ot = dflag + OT_WORD;
5328
        modrm = ldub_code(s->pc++);
5329
        op = (modrm >> 3) & 7;
5330
        mod = (modrm >> 6) & 3;
5331
        rm = (modrm & 7) | REX_B(s);
5332
        if (mod != 3) {
5333
            s->rip_offset = 1;
5334
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5335
            gen_op_ld_T0_A0(ot + s->mem_index);
5336
        } else {
5337
            gen_op_mov_TN_reg(ot, 0, rm);
5338
        }
5339
        /* load shift */
5340
        val = ldub_code(s->pc++);
5341
        gen_op_movl_T1_im(val);
5342
        if (op < 4)
5343
            goto illegal_op;
5344
        op -= 4;
5345
        gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5346
        s->cc_op = CC_OP_SARB + ot;
5347
        if (op != 0) {
5348
            if (mod != 3)
5349
                gen_op_st_T0_A0(ot + s->mem_index);
5350
            else
5351
                gen_op_mov_reg_T0(ot, rm);
5352
            gen_op_update_bt_cc();
5353
        }
5354
        break;
5355
    case 0x1a3: /* bt Gv, Ev */
5356
        op = 0;
5357
        goto do_btx;
5358
    case 0x1ab: /* bts */
5359
        op = 1;
5360
        goto do_btx;
5361
    case 0x1b3: /* btr */
5362
        op = 2;
5363
        goto do_btx;
5364
    case 0x1bb: /* btc */
5365
        op = 3;
5366
    do_btx:
5367
        ot = dflag + OT_WORD;
5368
        modrm = ldub_code(s->pc++);
5369
        reg = ((modrm >> 3) & 7) | rex_r;
5370
        mod = (modrm >> 6) & 3;
5371
        rm = (modrm & 7) | REX_B(s);
5372
        gen_op_mov_TN_reg(OT_LONG, 1, reg);
5373
        if (mod != 3) {
5374
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5375
            /* specific case: we need to add a displacement */
5376
            gen_op_add_bit_A0_T1[ot - OT_WORD]();
5377
            gen_op_ld_T0_A0(ot + s->mem_index);
5378
        } else {
5379
            gen_op_mov_TN_reg(ot, 0, rm);
5380
        }
5381
        gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5382
        s->cc_op = CC_OP_SARB + ot;
5383
        if (op != 0) {
5384
            if (mod != 3)
5385
                gen_op_st_T0_A0(ot + s->mem_index);
5386
            else
5387
                gen_op_mov_reg_T0(ot, rm);
5388
            gen_op_update_bt_cc();
5389
        }
5390
        break;
5391
    case 0x1bc: /* bsf */
5392
    case 0x1bd: /* bsr */
5393
        ot = dflag + OT_WORD;
5394
        modrm = ldub_code(s->pc++);
5395
        reg = ((modrm >> 3) & 7) | rex_r;
5396
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5397
        /* NOTE: in order to handle the 0 case, we must load the
5398
           result. It could be optimized with a generated jump */
5399
        gen_op_mov_TN_reg(ot, 1, reg);
5400
        gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5401
        gen_op_mov_reg_T1(ot, reg);
5402
        s->cc_op = CC_OP_LOGICB + ot;
5403
        break;
5404
        /************************/
5405
        /* bcd */
5406
    case 0x27: /* daa */
5407
        if (CODE64(s))
5408
            goto illegal_op;
5409
        if (s->cc_op != CC_OP_DYNAMIC)
5410
            gen_op_set_cc_op(s->cc_op);
5411
        gen_op_daa();
5412
        s->cc_op = CC_OP_EFLAGS;
5413
        break;
5414
    case 0x2f: /* das */
5415
        if (CODE64(s))
5416
            goto illegal_op;
5417
        if (s->cc_op != CC_OP_DYNAMIC)
5418
            gen_op_set_cc_op(s->cc_op);
5419
        gen_op_das();
5420
        s->cc_op = CC_OP_EFLAGS;
5421
        break;
5422
    case 0x37: /* aaa */
5423
        if (CODE64(s))
5424
            goto illegal_op;
5425
        if (s->cc_op != CC_OP_DYNAMIC)
5426
            gen_op_set_cc_op(s->cc_op);
5427
        gen_op_aaa();
5428
        s->cc_op = CC_OP_EFLAGS;
5429
        break;
5430
    case 0x3f: /* aas */
5431
        if (CODE64(s))
5432
            goto illegal_op;
5433
        if (s->cc_op != CC_OP_DYNAMIC)
5434
            gen_op_set_cc_op(s->cc_op);
5435
        gen_op_aas();
5436
        s->cc_op = CC_OP_EFLAGS;
5437
        break;
5438
    case 0xd4: /* aam */
5439
        if (CODE64(s))
5440
            goto illegal_op;
5441
        val = ldub_code(s->pc++);
5442
        if (val == 0) {
5443
            gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5444
        } else {
5445
            gen_op_aam(val);
5446
            s->cc_op = CC_OP_LOGICB;
5447
        }
5448
        break;
5449
    case 0xd5: /* aad */
5450
        if (CODE64(s))
5451
            goto illegal_op;
5452
        val = ldub_code(s->pc++);
5453
        gen_op_aad(val);
5454
        s->cc_op = CC_OP_LOGICB;
5455
        break;
5456
        /************************/
5457
        /* misc */
5458
    case 0x90: /* nop */
5459
        /* XXX: xchg + rex handling */
5460
        /* XXX: correct lock test for all insn */
5461
        if (prefixes & PREFIX_LOCK)
5462
            goto illegal_op;
5463
        if (prefixes & PREFIX_REPZ) {
5464
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
5465
        }
5466
        break;
5467
    case 0x9b: /* fwait */
5468
        if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5469
            (HF_MP_MASK | HF_TS_MASK)) {
5470
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5471
        } else {
5472
            if (s->cc_op != CC_OP_DYNAMIC)
5473
                gen_op_set_cc_op(s->cc_op);
5474
            gen_jmp_im(pc_start - s->cs_base);
5475
            gen_op_fwait();
5476
        }
5477
        break;
5478
    case 0xcc: /* int3 */
5479
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5480
            break;
5481
        gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5482
        break;
5483
    case 0xcd: /* int N */
5484
        val = ldub_code(s->pc++);
5485
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5486
            break;
5487
        if (s->vm86 && s->iopl != 3) {
5488
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5489
        } else {
5490
            gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5491
        }
5492
        break;
5493
    case 0xce: /* into */
5494
        if (CODE64(s))
5495
            goto illegal_op;
5496
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5497
            break;
5498
        if (s->cc_op != CC_OP_DYNAMIC)
5499
            gen_op_set_cc_op(s->cc_op);
5500
        gen_jmp_im(pc_start - s->cs_base);
5501
        gen_op_into(s->pc - pc_start);
5502
        break;
5503
    case 0xf1: /* icebp (undocumented, exits to external debugger) */
5504
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
5505
            break;
5506
#if 1
5507
        gen_debug(s, pc_start - s->cs_base);
5508
#else
5509
        /* start debug */
5510
        tb_flush(cpu_single_env);
5511
        cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5512
#endif
5513
        break;
5514
    case 0xfa: /* cli */
5515
        if (!s->vm86) {
5516
            if (s->cpl <= s->iopl) {
5517
                gen_op_cli();
5518
            } else {
5519
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5520
            }
5521
        } else {
5522
            if (s->iopl == 3) {
5523
                gen_op_cli();
5524
            } else {
5525
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5526
            }
5527
        }
5528
        break;
5529
    case 0xfb: /* sti */
5530
        if (!s->vm86) {
5531
            if (s->cpl <= s->iopl) {
5532
            gen_sti:
5533
                gen_op_sti();
5534
                /* interruptions are enabled only the first insn after sti */
5535
                /* If several instructions disable interrupts, only the
5536
                   _first_ does it */
5537
                if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5538
                    gen_op_set_inhibit_irq();
5539
                /* give a chance to handle pending irqs */
5540
                gen_jmp_im(s->pc - s->cs_base);
5541
                gen_eob(s);
5542
            } else {
5543
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5544
            }
5545
        } else {
5546
            if (s->iopl == 3) {
5547
                goto gen_sti;
5548
            } else {
5549
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5550
            }
5551
        }
5552
        break;
5553
    case 0x62: /* bound */
5554
        if (CODE64(s))
5555
            goto illegal_op;
5556
        ot = dflag ? OT_LONG : OT_WORD;
5557
        modrm = ldub_code(s->pc++);
5558
        reg = (modrm >> 3) & 7;
5559
        mod = (modrm >> 6) & 3;
5560
        if (mod == 3)
5561
            goto illegal_op;
5562
        gen_op_mov_TN_reg(ot, 0, reg);
5563
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5564
        gen_jmp_im(pc_start - s->cs_base);
5565
        if (ot == OT_WORD)
5566
            gen_op_boundw();
5567
        else
5568
            gen_op_boundl();
5569
        break;
5570
    case 0x1c8 ... 0x1cf: /* bswap reg */
5571
        reg = (b & 7) | REX_B(s);
5572
#ifdef TARGET_X86_64
5573
        if (dflag == 2) {
5574
            gen_op_mov_TN_reg(OT_QUAD, 0, reg);
5575
            tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
5576
            gen_op_mov_reg_T0(OT_QUAD, reg);
5577
        } else
5578
        {
5579
            TCGv tmp0;
5580
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
5581
            
5582
            tmp0 = tcg_temp_new(TCG_TYPE_I32);
5583
            tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
5584
            tcg_gen_bswap_i32(tmp0, tmp0);
5585
            tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
5586
            gen_op_mov_reg_T0(OT_LONG, reg);
5587
        }
5588
#else
5589
        {
5590
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
5591
            tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
5592
            gen_op_mov_reg_T0(OT_LONG, reg);
5593
        }
5594
#endif
5595
        break;
5596
    case 0xd6: /* salc */
5597
        if (CODE64(s))
5598
            goto illegal_op;
5599
        if (s->cc_op != CC_OP_DYNAMIC)
5600
            gen_op_set_cc_op(s->cc_op);
5601
        gen_op_salc();
5602
        break;
5603
    case 0xe0: /* loopnz */
5604
    case 0xe1: /* loopz */
5605
        if (s->cc_op != CC_OP_DYNAMIC)
5606
            gen_op_set_cc_op(s->cc_op);
5607
        /* FALL THRU */
5608
    case 0xe2: /* loop */
5609
    case 0xe3: /* jecxz */
5610
        {
5611
            int l1, l2;
5612

    
5613
            tval = (int8_t)insn_get(s, OT_BYTE);
5614
            next_eip = s->pc - s->cs_base;
5615
            tval += next_eip;
5616
            if (s->dflag == 0)
5617
                tval &= 0xffff;
5618

    
5619
            l1 = gen_new_label();
5620
            l2 = gen_new_label();
5621
            b &= 3;
5622
            if (b == 3) {
5623
                gen_op_jz_ecx[s->aflag](l1);
5624
            } else {
5625
                gen_op_dec_ECX[s->aflag]();
5626
                if (b <= 1)
5627
                    gen_op_mov_T0_cc();
5628
                gen_op_loop[s->aflag][b](l1);
5629
            }
5630

    
5631
            gen_jmp_im(next_eip);
5632
            gen_op_jmp_label(l2);
5633
            gen_set_label(l1);
5634
            gen_jmp_im(tval);
5635
            gen_set_label(l2);
5636
            gen_eob(s);
5637
        }
5638
        break;
5639
    case 0x130: /* wrmsr */
5640
    case 0x132: /* rdmsr */
5641
        if (s->cpl != 0) {
5642
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5643
        } else {
5644
            int retval = 0;
5645
            if (b & 2) {
5646
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
5647
                gen_op_rdmsr();
5648
            } else {
5649
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
5650
                gen_op_wrmsr();
5651
            }
5652
            if(retval)
5653
                gen_eob(s);
5654
        }
5655
        break;
5656
    case 0x131: /* rdtsc */
5657
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
5658
            break;
5659
        gen_jmp_im(pc_start - s->cs_base);
5660
        gen_op_rdtsc();
5661
        break;
5662
    case 0x133: /* rdpmc */
5663
        gen_jmp_im(pc_start - s->cs_base);
5664
        gen_op_rdpmc();
5665
        break;
5666
    case 0x134: /* sysenter */
5667
        if (CODE64(s))
5668
            goto illegal_op;
5669
        if (!s->pe) {
5670
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5671
        } else {
5672
            if (s->cc_op != CC_OP_DYNAMIC) {
5673
                gen_op_set_cc_op(s->cc_op);
5674
                s->cc_op = CC_OP_DYNAMIC;
5675
            }
5676
            gen_jmp_im(pc_start - s->cs_base);
5677
            gen_op_sysenter();
5678
            gen_eob(s);
5679
        }
5680
        break;
5681
    case 0x135: /* sysexit */
5682
        if (CODE64(s))
5683
            goto illegal_op;
5684
        if (!s->pe) {
5685
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5686
        } else {
5687
            if (s->cc_op != CC_OP_DYNAMIC) {
5688
                gen_op_set_cc_op(s->cc_op);
5689
                s->cc_op = CC_OP_DYNAMIC;
5690
            }
5691
            gen_jmp_im(pc_start - s->cs_base);
5692
            gen_op_sysexit();
5693
            gen_eob(s);
5694
        }
5695
        break;
5696
#ifdef TARGET_X86_64
5697
    case 0x105: /* syscall */
5698
        /* XXX: is it usable in real mode ? */
5699
        if (s->cc_op != CC_OP_DYNAMIC) {
5700
            gen_op_set_cc_op(s->cc_op);
5701
            s->cc_op = CC_OP_DYNAMIC;
5702
        }
5703
        gen_jmp_im(pc_start - s->cs_base);
5704
        gen_op_syscall(s->pc - pc_start);
5705
        gen_eob(s);
5706
        break;
5707
    case 0x107: /* sysret */
5708
        if (!s->pe) {
5709
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5710
        } else {
5711
            if (s->cc_op != CC_OP_DYNAMIC) {
5712
                gen_op_set_cc_op(s->cc_op);
5713
                s->cc_op = CC_OP_DYNAMIC;
5714
            }
5715
            gen_jmp_im(pc_start - s->cs_base);
5716
            gen_op_sysret(s->dflag);
5717
            /* condition codes are modified only in long mode */
5718
            if (s->lma)
5719
                s->cc_op = CC_OP_EFLAGS;
5720
            gen_eob(s);
5721
        }
5722
        break;
5723
#endif
5724
    case 0x1a2: /* cpuid */
5725
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
5726
            break;
5727
        gen_op_cpuid();
5728
        break;
5729
    case 0xf4: /* hlt */
5730
        if (s->cpl != 0) {
5731
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5732
        } else {
5733
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
5734
                break;
5735
            if (s->cc_op != CC_OP_DYNAMIC)
5736
                gen_op_set_cc_op(s->cc_op);
5737
            gen_jmp_im(s->pc - s->cs_base);
5738
            gen_op_hlt();
5739
            s->is_jmp = 3;
5740
        }
5741
        break;
5742
    case 0x100:
5743
        modrm = ldub_code(s->pc++);
5744
        mod = (modrm >> 6) & 3;
5745
        op = (modrm >> 3) & 7;
5746
        switch(op) {
5747
        case 0: /* sldt */
5748
            if (!s->pe || s->vm86)
5749
                goto illegal_op;
5750
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
5751
                break;
5752
            gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5753
            ot = OT_WORD;
5754
            if (mod == 3)
5755
                ot += s->dflag;
5756
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5757
            break;
5758
        case 2: /* lldt */
5759
            if (!s->pe || s->vm86)
5760
                goto illegal_op;
5761
            if (s->cpl != 0) {
5762
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5763
            } else {
5764
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
5765
                    break;
5766
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5767
                gen_jmp_im(pc_start - s->cs_base);
5768
                gen_op_lldt_T0();
5769
            }
5770
            break;
5771
        case 1: /* str */
5772
            if (!s->pe || s->vm86)
5773
                goto illegal_op;
5774
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
5775
                break;
5776
            gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5777
            ot = OT_WORD;
5778
            if (mod == 3)
5779
                ot += s->dflag;
5780
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5781
            break;
5782
        case 3: /* ltr */
5783
            if (!s->pe || s->vm86)
5784
                goto illegal_op;
5785
            if (s->cpl != 0) {
5786
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5787
            } else {
5788
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
5789
                    break;
5790
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5791
                gen_jmp_im(pc_start - s->cs_base);
5792
                gen_op_ltr_T0();
5793
            }
5794
            break;
5795
        case 4: /* verr */
5796
        case 5: /* verw */
5797
            if (!s->pe || s->vm86)
5798
                goto illegal_op;
5799
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5800
            if (s->cc_op != CC_OP_DYNAMIC)
5801
                gen_op_set_cc_op(s->cc_op);
5802
            if (op == 4)
5803
                gen_op_verr();
5804
            else
5805
                gen_op_verw();
5806
            s->cc_op = CC_OP_EFLAGS;
5807
            break;
5808
        default:
5809
            goto illegal_op;
5810
        }
5811
        break;
5812
    case 0x101:
5813
        modrm = ldub_code(s->pc++);
5814
        mod = (modrm >> 6) & 3;
5815
        op = (modrm >> 3) & 7;
5816
        rm = modrm & 7;
5817
        switch(op) {
5818
        case 0: /* sgdt */
5819
            if (mod == 3)
5820
                goto illegal_op;
5821
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
5822
                break;
5823
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5824
            gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5825
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
5826
            gen_add_A0_im(s, 2);
5827
            gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5828
            if (!s->dflag)
5829
                gen_op_andl_T0_im(0xffffff);
5830
            gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5831
            break;
5832
        case 1:
5833
            if (mod == 3) {
5834
                switch (rm) {
5835
                case 0: /* monitor */
5836
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5837
                        s->cpl != 0)
5838
                        goto illegal_op;
5839
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
5840
                        break;
5841
                    gen_jmp_im(pc_start - s->cs_base);
5842
#ifdef TARGET_X86_64
5843
                    if (s->aflag == 2) {
5844
                        gen_op_movq_A0_reg(R_EBX);
5845
                        gen_op_addq_A0_AL();
5846
                    } else
5847
#endif
5848
                    {
5849
                        gen_op_movl_A0_reg(R_EBX);
5850
                        gen_op_addl_A0_AL();
5851
                        if (s->aflag == 0)
5852
                            gen_op_andl_A0_ffff();
5853
                    }
5854
                    gen_add_A0_ds_seg(s);
5855
                    gen_op_monitor();
5856
                    break;
5857
                case 1: /* mwait */
5858
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5859
                        s->cpl != 0)
5860
                        goto illegal_op;
5861
                    if (s->cc_op != CC_OP_DYNAMIC) {
5862
                        gen_op_set_cc_op(s->cc_op);
5863
                        s->cc_op = CC_OP_DYNAMIC;
5864
                    }
5865
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
5866
                        break;
5867
                    gen_jmp_im(s->pc - s->cs_base);
5868
                    gen_op_mwait();
5869
                    gen_eob(s);
5870
                    break;
5871
                default:
5872
                    goto illegal_op;
5873
                }
5874
            } else { /* sidt */
5875
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
5876
                    break;
5877
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5878
                gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
5879
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
5880
                gen_add_A0_im(s, 2);
5881
                gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
5882
                if (!s->dflag)
5883
                    gen_op_andl_T0_im(0xffffff);
5884
                gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5885
            }
5886
            break;
5887
        case 2: /* lgdt */
5888
        case 3: /* lidt */
5889
            if (mod == 3) {
5890
                switch(rm) {
5891
                case 0: /* VMRUN */
5892
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
5893
                        break;
5894
                    if (s->cc_op != CC_OP_DYNAMIC)
5895
                        gen_op_set_cc_op(s->cc_op);
5896
                    gen_jmp_im(s->pc - s->cs_base);
5897
                    gen_op_vmrun();
5898
                    s->cc_op = CC_OP_EFLAGS;
5899
                    gen_eob(s);
5900
                    break;
5901
                case 1: /* VMMCALL */
5902
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
5903
                         break;
5904
                    /* FIXME: cause #UD if hflags & SVM */
5905
                    gen_op_vmmcall();
5906
                    break;
5907
                case 2: /* VMLOAD */
5908
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
5909
                         break;
5910
                    gen_op_vmload();
5911
                    break;
5912
                case 3: /* VMSAVE */
5913
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
5914
                         break;
5915
                    gen_op_vmsave();
5916
                    break;
5917
                case 4: /* STGI */
5918
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
5919
                         break;
5920
                    gen_op_stgi();
5921
                    break;
5922
                case 5: /* CLGI */
5923
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
5924
                         break;
5925
                    gen_op_clgi();
5926
                    break;
5927
                case 6: /* SKINIT */
5928
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
5929
                         break;
5930
                    gen_op_skinit();
5931
                    break;
5932
                case 7: /* INVLPGA */
5933
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
5934
                         break;
5935
                    gen_op_invlpga();
5936
                    break;
5937
                default:
5938
                    goto illegal_op;
5939
                }
5940
            } else if (s->cpl != 0) {
5941
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5942
            } else {
5943
                if (gen_svm_check_intercept(s, pc_start,
5944
                                            op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
5945
                    break;
5946
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5947
                gen_op_ld_T1_A0(OT_WORD + s->mem_index);
5948
                gen_add_A0_im(s, 2);
5949
                gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5950
                if (!s->dflag)
5951
                    gen_op_andl_T0_im(0xffffff);
5952
                if (op == 2) {
5953
                    gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
5954
                    gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
5955
                } else {
5956
                    gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
5957
                    gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
5958
                }
5959
            }
5960
            break;
5961
        case 4: /* smsw */
5962
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
5963
                break;
5964
            gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
5965
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
5966
            break;
5967
        case 6: /* lmsw */
5968
            if (s->cpl != 0) {
5969
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5970
            } else {
5971
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
5972
                    break;
5973
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5974
                gen_op_lmsw_T0();
5975
                gen_jmp_im(s->pc - s->cs_base);
5976
                gen_eob(s);
5977
            }
5978
            break;
5979
        case 7: /* invlpg */
5980
            if (s->cpl != 0) {
5981
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5982
            } else {
5983
                if (mod == 3) {
5984
#ifdef TARGET_X86_64
5985
                    if (CODE64(s) && rm == 0) {
5986
                        /* swapgs */
5987
                        gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
5988
                        gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
5989
                        gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
5990
                        gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
5991
                    } else
5992
#endif
5993
                    {
5994
                        goto illegal_op;
5995
                    }
5996
                } else {
5997
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
5998
                        break;
5999
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6000
                    gen_op_invlpg_A0();
6001
                    gen_jmp_im(s->pc - s->cs_base);
6002
                    gen_eob(s);
6003
                }
6004
            }
6005
            break;
6006
        default:
6007
            goto illegal_op;
6008
        }
6009
        break;
6010
    case 0x108: /* invd */
6011
    case 0x109: /* wbinvd */
6012
        if (s->cpl != 0) {
6013
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6014
        } else {
6015
            if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6016
                break;
6017
            /* nothing to do */
6018
        }
6019
        break;
6020
    case 0x63: /* arpl or movslS (x86_64) */
6021
#ifdef TARGET_X86_64
6022
        if (CODE64(s)) {
6023
            int d_ot;
6024
            /* d_ot is the size of destination */
6025
            d_ot = dflag + OT_WORD;
6026

    
6027
            modrm = ldub_code(s->pc++);
6028
            reg = ((modrm >> 3) & 7) | rex_r;
6029
            mod = (modrm >> 6) & 3;
6030
            rm = (modrm & 7) | REX_B(s);
6031

    
6032
            if (mod == 3) {
6033
                gen_op_mov_TN_reg(OT_LONG, 0, rm);
6034
                /* sign extend */
6035
                if (d_ot == OT_QUAD)
6036
                    gen_op_movslq_T0_T0();
6037
                gen_op_mov_reg_T0(d_ot, reg);
6038
            } else {
6039
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6040
                if (d_ot == OT_QUAD) {
6041
                    gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6042
                } else {
6043
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6044
                }
6045
                gen_op_mov_reg_T0(d_ot, reg);
6046
            }
6047
        } else
6048
#endif
6049
        {
6050
            if (!s->pe || s->vm86)
6051
                goto illegal_op;
6052
            ot = dflag ? OT_LONG : OT_WORD;
6053
            modrm = ldub_code(s->pc++);
6054
            reg = (modrm >> 3) & 7;
6055
            mod = (modrm >> 6) & 3;
6056
            rm = modrm & 7;
6057
            if (mod != 3) {
6058
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6059
                gen_op_ld_T0_A0(ot + s->mem_index);
6060
            } else {
6061
                gen_op_mov_TN_reg(ot, 0, rm);
6062
            }
6063
            if (s->cc_op != CC_OP_DYNAMIC)
6064
                gen_op_set_cc_op(s->cc_op);
6065
            gen_op_arpl();
6066
            s->cc_op = CC_OP_EFLAGS;
6067
            if (mod != 3) {
6068
                gen_op_st_T0_A0(ot + s->mem_index);
6069
            } else {
6070
                gen_op_mov_reg_T0(ot, rm);
6071
            }
6072
            gen_op_arpl_update();
6073
        }
6074
        break;
6075
    case 0x102: /* lar */
6076
    case 0x103: /* lsl */
6077
        if (!s->pe || s->vm86)
6078
            goto illegal_op;
6079
        ot = dflag ? OT_LONG : OT_WORD;
6080
        modrm = ldub_code(s->pc++);
6081
        reg = ((modrm >> 3) & 7) | rex_r;
6082
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6083
        gen_op_mov_TN_reg(ot, 1, reg);
6084
        if (s->cc_op != CC_OP_DYNAMIC)
6085
            gen_op_set_cc_op(s->cc_op);
6086
        if (b == 0x102)
6087
            gen_op_lar();
6088
        else
6089
            gen_op_lsl();
6090
        s->cc_op = CC_OP_EFLAGS;
6091
        gen_op_mov_reg_T1(ot, reg);
6092
        break;
6093
    case 0x118:
6094
        modrm = ldub_code(s->pc++);
6095
        mod = (modrm >> 6) & 3;
6096
        op = (modrm >> 3) & 7;
6097
        switch(op) {
6098
        case 0: /* prefetchnta */
6099
        case 1: /* prefetchnt0 */
6100
        case 2: /* prefetchnt0 */
6101
        case 3: /* prefetchnt0 */
6102
            if (mod == 3)
6103
                goto illegal_op;
6104
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6105
            /* nothing more to do */
6106
            break;
6107
        default: /* nop (multi byte) */
6108
            gen_nop_modrm(s, modrm);
6109
            break;
6110
        }
6111
        break;
6112
    case 0x119 ... 0x11f: /* nop (multi byte) */
6113
        modrm = ldub_code(s->pc++);
6114
        gen_nop_modrm(s, modrm);
6115
        break;
6116
    case 0x120: /* mov reg, crN */
6117
    case 0x122: /* mov crN, reg */
6118
        if (s->cpl != 0) {
6119
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6120
        } else {
6121
            modrm = ldub_code(s->pc++);
6122
            if ((modrm & 0xc0) != 0xc0)
6123
                goto illegal_op;
6124
            rm = (modrm & 7) | REX_B(s);
6125
            reg = ((modrm >> 3) & 7) | rex_r;
6126
            if (CODE64(s))
6127
                ot = OT_QUAD;
6128
            else
6129
                ot = OT_LONG;
6130
            switch(reg) {
6131
            case 0:
6132
            case 2:
6133
            case 3:
6134
            case 4:
6135
            case 8:
6136
                if (b & 2) {
6137
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6138
                    gen_op_mov_TN_reg(ot, 0, rm);
6139
                    gen_op_movl_crN_T0(reg);
6140
                    gen_jmp_im(s->pc - s->cs_base);
6141
                    gen_eob(s);
6142
                } else {
6143
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6144
#if !defined(CONFIG_USER_ONLY)
6145
                    if (reg == 8)
6146
                        gen_op_movtl_T0_cr8();
6147
                    else
6148
#endif
6149
                        gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6150
                    gen_op_mov_reg_T0(ot, rm);
6151
                }
6152
                break;
6153
            default:
6154
                goto illegal_op;
6155
            }
6156
        }
6157
        break;
6158
    case 0x121: /* mov reg, drN */
6159
    case 0x123: /* mov drN, reg */
6160
        if (s->cpl != 0) {
6161
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6162
        } else {
6163
            modrm = ldub_code(s->pc++);
6164
            if ((modrm & 0xc0) != 0xc0)
6165
                goto illegal_op;
6166
            rm = (modrm & 7) | REX_B(s);
6167
            reg = ((modrm >> 3) & 7) | rex_r;
6168
            if (CODE64(s))
6169
                ot = OT_QUAD;
6170
            else
6171
                ot = OT_LONG;
6172
            /* XXX: do it dynamically with CR4.DE bit */
6173
            if (reg == 4 || reg == 5 || reg >= 8)
6174
                goto illegal_op;
6175
            if (b & 2) {
6176
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6177
                gen_op_mov_TN_reg(ot, 0, rm);
6178
                gen_op_movl_drN_T0(reg);
6179
                gen_jmp_im(s->pc - s->cs_base);
6180
                gen_eob(s);
6181
            } else {
6182
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6183
                gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6184
                gen_op_mov_reg_T0(ot, rm);
6185
            }
6186
        }
6187
        break;
6188
    case 0x106: /* clts */
6189
        if (s->cpl != 0) {
6190
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6191
        } else {
6192
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6193
            gen_op_clts();
6194
            /* abort block because static cpu state changed */
6195
            gen_jmp_im(s->pc - s->cs_base);
6196
            gen_eob(s);
6197
        }
6198
        break;
6199
    /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6200
    case 0x1c3: /* MOVNTI reg, mem */
6201
        if (!(s->cpuid_features & CPUID_SSE2))
6202
            goto illegal_op;
6203
        ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6204
        modrm = ldub_code(s->pc++);
6205
        mod = (modrm >> 6) & 3;
6206
        if (mod == 3)
6207
            goto illegal_op;
6208
        reg = ((modrm >> 3) & 7) | rex_r;
6209
        /* generate a generic store */
6210
        gen_ldst_modrm(s, modrm, ot, reg, 1);
6211
        break;
6212
    case 0x1ae:
6213
        modrm = ldub_code(s->pc++);
6214
        mod = (modrm >> 6) & 3;
6215
        op = (modrm >> 3) & 7;
6216
        switch(op) {
6217
        case 0: /* fxsave */
6218
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6219
                (s->flags & HF_EM_MASK))
6220
                goto illegal_op;
6221
            if (s->flags & HF_TS_MASK) {
6222
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6223
                break;
6224
            }
6225
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6226
            gen_op_fxsave_A0((s->dflag == 2));
6227
            break;
6228
        case 1: /* fxrstor */
6229
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6230
                (s->flags & HF_EM_MASK))
6231
                goto illegal_op;
6232
            if (s->flags & HF_TS_MASK) {
6233
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6234
                break;
6235
            }
6236
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6237
            gen_op_fxrstor_A0((s->dflag == 2));
6238
            break;
6239
        case 2: /* ldmxcsr */
6240
        case 3: /* stmxcsr */
6241
            if (s->flags & HF_TS_MASK) {
6242
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6243
                break;
6244
            }
6245
            if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6246
                mod == 3)
6247
                goto illegal_op;
6248
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6249
            if (op == 2) {
6250
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6251
                gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6252
            } else {
6253
                gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6254
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
6255
            }
6256
            break;
6257
        case 5: /* lfence */
6258
        case 6: /* mfence */
6259
            if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6260
                goto illegal_op;
6261
            break;
6262
        case 7: /* sfence / clflush */
6263
            if ((modrm & 0xc7) == 0xc0) {
6264
                /* sfence */
6265
                /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6266
                if (!(s->cpuid_features & CPUID_SSE))
6267
                    goto illegal_op;
6268
            } else {
6269
                /* clflush */
6270
                if (!(s->cpuid_features & CPUID_CLFLUSH))
6271
                    goto illegal_op;
6272
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6273
            }
6274
            break;
6275
        default:
6276
            goto illegal_op;
6277
        }
6278
        break;
6279
    case 0x10d: /* 3DNow! prefetch(w) */
6280
        modrm = ldub_code(s->pc++);
6281
        mod = (modrm >> 6) & 3;
6282
        if (mod == 3)
6283
            goto illegal_op;
6284
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6285
        /* ignore for now */
6286
        break;
6287
    case 0x1aa: /* rsm */
6288
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6289
            break;
6290
        if (!(s->flags & HF_SMM_MASK))
6291
            goto illegal_op;
6292
        if (s->cc_op != CC_OP_DYNAMIC) {
6293
            gen_op_set_cc_op(s->cc_op);
6294
            s->cc_op = CC_OP_DYNAMIC;
6295
        }
6296
        gen_jmp_im(s->pc - s->cs_base);
6297
        gen_op_rsm();
6298
        gen_eob(s);
6299
        break;
6300
    case 0x10e ... 0x10f:
6301
        /* 3DNow! instructions, ignore prefixes */
6302
        s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
6303
    case 0x110 ... 0x117:
6304
    case 0x128 ... 0x12f:
6305
    case 0x150 ... 0x177:
6306
    case 0x17c ... 0x17f:
6307
    case 0x1c2:
6308
    case 0x1c4 ... 0x1c6:
6309
    case 0x1d0 ... 0x1fe:
6310
        gen_sse(s, b, pc_start, rex_r);
6311
        break;
6312
    default:
6313
        goto illegal_op;
6314
    }
6315
    /* lock generation */
6316
    if (s->prefix & PREFIX_LOCK)
6317
        gen_op_unlock();
6318
    return s->pc;
6319
 illegal_op:
6320
    if (s->prefix & PREFIX_LOCK)
6321
        gen_op_unlock();
6322
    /* XXX: ensure that no lock was generated */
6323
    gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6324
    return s->pc;
6325
}
6326

    
6327
#define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6328
#define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6329

    
6330
/* flags read by an operation */
6331
static uint16_t opc_read_flags[NB_OPS] = {
6332
    [INDEX_op_aas] = CC_A,
6333
    [INDEX_op_aaa] = CC_A,
6334
    [INDEX_op_das] = CC_A | CC_C,
6335
    [INDEX_op_daa] = CC_A | CC_C,
6336

    
6337
    /* subtle: due to the incl/decl implementation, C is used */
6338
    [INDEX_op_update_inc_cc] = CC_C,
6339

    
6340
    [INDEX_op_into] = CC_O,
6341

    
6342
    [INDEX_op_jb_subb] = CC_C,
6343
    [INDEX_op_jb_subw] = CC_C,
6344
    [INDEX_op_jb_subl] = CC_C,
6345

    
6346
    [INDEX_op_jz_subb] = CC_Z,
6347
    [INDEX_op_jz_subw] = CC_Z,
6348
    [INDEX_op_jz_subl] = CC_Z,
6349

    
6350
    [INDEX_op_jbe_subb] = CC_Z | CC_C,
6351
    [INDEX_op_jbe_subw] = CC_Z | CC_C,
6352
    [INDEX_op_jbe_subl] = CC_Z | CC_C,
6353

    
6354
    [INDEX_op_js_subb] = CC_S,
6355
    [INDEX_op_js_subw] = CC_S,
6356
    [INDEX_op_js_subl] = CC_S,
6357

    
6358
    [INDEX_op_jl_subb] = CC_O | CC_S,
6359
    [INDEX_op_jl_subw] = CC_O | CC_S,
6360
    [INDEX_op_jl_subl] = CC_O | CC_S,
6361

    
6362
    [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6363
    [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6364
    [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6365

    
6366
    [INDEX_op_loopnzw] = CC_Z,
6367
    [INDEX_op_loopnzl] = CC_Z,
6368
    [INDEX_op_loopzw] = CC_Z,
6369
    [INDEX_op_loopzl] = CC_Z,
6370

    
6371
    [INDEX_op_seto_T0_cc] = CC_O,
6372
    [INDEX_op_setb_T0_cc] = CC_C,
6373
    [INDEX_op_setz_T0_cc] = CC_Z,
6374
    [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6375
    [INDEX_op_sets_T0_cc] = CC_S,
6376
    [INDEX_op_setp_T0_cc] = CC_P,
6377
    [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6378
    [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6379

    
6380
    [INDEX_op_setb_T0_subb] = CC_C,
6381
    [INDEX_op_setb_T0_subw] = CC_C,
6382
    [INDEX_op_setb_T0_subl] = CC_C,
6383

    
6384
    [INDEX_op_setz_T0_subb] = CC_Z,
6385
    [INDEX_op_setz_T0_subw] = CC_Z,
6386
    [INDEX_op_setz_T0_subl] = CC_Z,
6387

    
6388
    [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6389
    [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6390
    [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6391

    
6392
    [INDEX_op_sets_T0_subb] = CC_S,
6393
    [INDEX_op_sets_T0_subw] = CC_S,
6394
    [INDEX_op_sets_T0_subl] = CC_S,
6395

    
6396
    [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6397
    [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6398
    [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6399

    
6400
    [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6401
    [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6402
    [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6403

    
6404
    [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6405
    [INDEX_op_cmc] = CC_C,
6406
    [INDEX_op_salc] = CC_C,
6407

    
6408
    /* needed for correct flag optimisation before string ops */
6409
    [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6410
    [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6411
    [INDEX_op_jz_ecxw] = CC_OSZAPC,
6412
    [INDEX_op_jz_ecxl] = CC_OSZAPC,
6413

    
6414
#ifdef TARGET_X86_64
6415
    [INDEX_op_jb_subq] = CC_C,
6416
    [INDEX_op_jz_subq] = CC_Z,
6417
    [INDEX_op_jbe_subq] = CC_Z | CC_C,
6418
    [INDEX_op_js_subq] = CC_S,
6419
    [INDEX_op_jl_subq] = CC_O | CC_S,
6420
    [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6421

    
6422
    [INDEX_op_loopnzq] = CC_Z,
6423
    [INDEX_op_loopzq] = CC_Z,
6424

    
6425
    [INDEX_op_setb_T0_subq] = CC_C,
6426
    [INDEX_op_setz_T0_subq] = CC_Z,
6427
    [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6428
    [INDEX_op_sets_T0_subq] = CC_S,
6429
    [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6430
    [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6431

    
6432
    [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6433
    [INDEX_op_jz_ecxq] = CC_OSZAPC,
6434
#endif
6435

    
6436
#define DEF_READF(SUFFIX)\
6437
    [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6438
    [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6439
    [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6440
    X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6441
    [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6442
    [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6443
    [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6444
    X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6445
\
6446
    [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6447
    [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6448
    [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6449
    X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6450
    [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6451
    [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6452
    [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6453
    X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6454

    
6455
    DEF_READF( )
6456
    DEF_READF(_raw)
6457
#ifndef CONFIG_USER_ONLY
6458
    DEF_READF(_kernel)
6459
    DEF_READF(_user)
6460
#endif
6461
};
6462

    
6463
/* flags written by an operation */
6464
static uint16_t opc_write_flags[NB_OPS] = {
6465
    [INDEX_op_update2_cc] = CC_OSZAPC,
6466
    [INDEX_op_update1_cc] = CC_OSZAPC,
6467
    [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6468
    [INDEX_op_update_neg_cc] = CC_OSZAPC,
6469
    /* subtle: due to the incl/decl implementation, C is used */
6470
    [INDEX_op_update_inc_cc] = CC_OSZAPC,
6471
    [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6472

    
6473
    [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6474
    [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6475
    [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6476
    X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6477
    [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6478
    [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6479
    [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6480
    X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6481
    [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6482
    [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6483
    X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6484

    
6485
    /* sse */
6486
    [INDEX_op_ucomiss] = CC_OSZAPC,
6487
    [INDEX_op_ucomisd] = CC_OSZAPC,
6488
    [INDEX_op_comiss] = CC_OSZAPC,
6489
    [INDEX_op_comisd] = CC_OSZAPC,
6490

    
6491
    /* bcd */
6492
    [INDEX_op_aam] = CC_OSZAPC,
6493
    [INDEX_op_aad] = CC_OSZAPC,
6494
    [INDEX_op_aas] = CC_OSZAPC,
6495
    [INDEX_op_aaa] = CC_OSZAPC,
6496
    [INDEX_op_das] = CC_OSZAPC,
6497
    [INDEX_op_daa] = CC_OSZAPC,
6498

    
6499
    [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6500
    [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6501
    [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6502
    [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6503
    [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6504
    [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6505
    [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6506
    [INDEX_op_clc] = CC_C,
6507
    [INDEX_op_stc] = CC_C,
6508
    [INDEX_op_cmc] = CC_C,
6509

    
6510
    [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6511
    [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6512
    X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6513
    [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6514
    [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6515
    X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6516
    [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6517
    [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6518
    X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6519
    [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6520
    [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6521
    X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6522

    
6523
    [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6524
    [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6525
    X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6526
    [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6527
    [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6528
    X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6529

    
6530
    [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6531
    [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6532
    [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6533
    X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6534

    
6535
    [INDEX_op_cmpxchg8b] = CC_Z,
6536
    [INDEX_op_lar] = CC_Z,
6537
    [INDEX_op_lsl] = CC_Z,
6538
    [INDEX_op_verr] = CC_Z,
6539
    [INDEX_op_verw] = CC_Z,
6540
    [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6541
    [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6542

    
6543
#define DEF_WRITEF(SUFFIX)\
6544
    [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6545
    [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6546
    [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6547
    X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6548
    [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6549
    [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6550
    [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6551
    X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6552
\
6553
    [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6554
    [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6555
    [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6556
    X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6557
    [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6558
    [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6559
    [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6560
    X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6561
\
6562
    [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6563
    [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6564
    [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6565
    X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6566
    [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6567
    [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6568
    [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6569
    X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6570
\
6571
    [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6572
    [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6573
    [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6574
    X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6575
\
6576
    [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6577
    [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6578
    [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6579
    X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6580
\
6581
    [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6582
    [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6583
    [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6584
    X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6585
\
6586
    [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6587
    [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6588
    X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6589
    [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6590
    [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6591
    X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6592
\
6593
    [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6594
    [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6595
    X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6596
    [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6597
    [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6598
    X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6599
\
6600
    [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6601
    [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6602
    [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6603
    X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6604

    
6605

    
6606
    DEF_WRITEF( )
6607
    DEF_WRITEF(_raw)
6608
#ifndef CONFIG_USER_ONLY
6609
    DEF_WRITEF(_kernel)
6610
    DEF_WRITEF(_user)
6611
#endif
6612
};
6613

    
6614
/* simpler form of an operation if no flags need to be generated */
6615
static uint16_t opc_simpler[NB_OPS] = {
6616
    [INDEX_op_update2_cc] = INDEX_op_nop,
6617
    [INDEX_op_update1_cc] = INDEX_op_nop,
6618
    [INDEX_op_update_neg_cc] = INDEX_op_nop,
6619
#if 0
6620
    /* broken: CC_OP logic must be rewritten */
6621
    [INDEX_op_update_inc_cc] = INDEX_op_nop,
6622
#endif
6623

    
6624
    [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6625
    [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6626
    [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6627
    X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6628

    
6629
    [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6630
    [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6631
    [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6632
    X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6633

    
6634
    [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6635
    [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6636
    [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6637
    X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6638

    
6639
#define DEF_SIMPLER(SUFFIX)\
6640
    [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6641
    [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6642
    [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6643
    X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6644
\
6645
    [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6646
    [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6647
    [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6648
    X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6649

    
6650
    DEF_SIMPLER( )
6651
    DEF_SIMPLER(_raw)
6652
#ifndef CONFIG_USER_ONLY
6653
    DEF_SIMPLER(_kernel)
6654
    DEF_SIMPLER(_user)
6655
#endif
6656
};
6657

    
6658
static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
6659
{
6660
    switch(macro_id) {
6661
#ifdef MACRO_TEST
6662
    case MACRO_TEST:
6663
        tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
6664
        break;
6665
#endif
6666
    }
6667
}
6668

    
6669
void optimize_flags_init(void)
6670
{
6671
    int i;
6672
    /* put default values in arrays */
6673
    for(i = 0; i < NB_OPS; i++) {
6674
        if (opc_simpler[i] == 0)
6675
            opc_simpler[i] = i;
6676
    }
6677

    
6678
    tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
6679

    
6680
    cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
6681
#if TARGET_LONG_BITS > HOST_LONG_BITS
6682
    cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL, 
6683
                                  TCG_AREG0, offsetof(CPUState, t0), "T0");
6684
    cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
6685
                                  TCG_AREG0, offsetof(CPUState, t1), "T1");
6686
    cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
6687
                                TCG_AREG0, offsetof(CPUState, t2), "A0");
6688
#else
6689
    cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
6690
    cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
6691
    cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
6692
#endif
6693
    /* the helpers are only registered to print debug info */
6694
    TCG_HELPER(helper_divl_EAX_T0);
6695
    TCG_HELPER(helper_idivl_EAX_T0);
6696
}
6697

    
6698
/* CPU flags computation optimization: we move backward thru the
6699
   generated code to see which flags are needed. The operation is
6700
   modified if suitable */
6701
static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6702
{
6703
    uint16_t *opc_ptr;
6704
    int live_flags, write_flags, op;
6705

    
6706
    opc_ptr = opc_buf + opc_buf_len;
6707
    /* live_flags contains the flags needed by the next instructions
6708
       in the code. At the end of the block, we consider that all the
6709
       flags are live. */
6710
    live_flags = CC_OSZAPC;
6711
    while (opc_ptr > opc_buf) {
6712
        op = *--opc_ptr;
6713
        /* if none of the flags written by the instruction is used,
6714
           then we can try to find a simpler instruction */
6715
        write_flags = opc_write_flags[op];
6716
        if ((live_flags & write_flags) == 0) {
6717
            *opc_ptr = opc_simpler[op];
6718
        }
6719
        /* compute the live flags before the instruction */
6720
        live_flags &= ~write_flags;
6721
        live_flags |= opc_read_flags[op];
6722
    }
6723
}
6724

    
6725
/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6726
   basic block 'tb'. If search_pc is TRUE, also generate PC
6727
   information for each intermediate instruction. */
6728
static inline int gen_intermediate_code_internal(CPUState *env,
6729
                                                 TranslationBlock *tb,
6730
                                                 int search_pc)
6731
{
6732
    DisasContext dc1, *dc = &dc1;
6733
    target_ulong pc_ptr;
6734
    uint16_t *gen_opc_end;
6735
    int j, lj, cflags;
6736
    uint64_t flags;
6737
    target_ulong pc_start;
6738
    target_ulong cs_base;
6739

    
6740
    /* generate intermediate code */
6741
    pc_start = tb->pc;
6742
    cs_base = tb->cs_base;
6743
    flags = tb->flags;
6744
    cflags = tb->cflags;
6745

    
6746
    dc->pe = (flags >> HF_PE_SHIFT) & 1;
6747
    dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6748
    dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6749
    dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6750
    dc->f_st = 0;
6751
    dc->vm86 = (flags >> VM_SHIFT) & 1;
6752
    dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6753
    dc->iopl = (flags >> IOPL_SHIFT) & 3;
6754
    dc->tf = (flags >> TF_SHIFT) & 1;
6755
    dc->singlestep_enabled = env->singlestep_enabled;
6756
    dc->cc_op = CC_OP_DYNAMIC;
6757
    dc->cs_base = cs_base;
6758
    dc->tb = tb;
6759
    dc->popl_esp_hack = 0;
6760
    /* select memory access functions */
6761
    dc->mem_index = 0;
6762
    if (flags & HF_SOFTMMU_MASK) {
6763
        if (dc->cpl == 3)
6764
            dc->mem_index = 2 * 4;
6765
        else
6766
            dc->mem_index = 1 * 4;
6767
    }
6768
    dc->cpuid_features = env->cpuid_features;
6769
    dc->cpuid_ext_features = env->cpuid_ext_features;
6770
    dc->cpuid_ext2_features = env->cpuid_ext2_features;
6771
#ifdef TARGET_X86_64
6772
    dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6773
    dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6774
#endif
6775
    dc->flags = flags;
6776
    dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6777
                    (flags & HF_INHIBIT_IRQ_MASK)
6778
#ifndef CONFIG_SOFTMMU
6779
                    || (flags & HF_SOFTMMU_MASK)
6780
#endif
6781
                    );
6782
#if 0
6783
    /* check addseg logic */
6784
    if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6785
        printf("ERROR addseg\n");
6786
#endif
6787

    
6788
    cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
6789

    
6790
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6791

    
6792
    dc->is_jmp = DISAS_NEXT;
6793
    pc_ptr = pc_start;
6794
    lj = -1;
6795

    
6796
    for(;;) {
6797
        if (env->nb_breakpoints > 0) {
6798
            for(j = 0; j < env->nb_breakpoints; j++) {
6799
                if (env->breakpoints[j] == pc_ptr) {
6800
                    gen_debug(dc, pc_ptr - dc->cs_base);
6801
                    break;
6802
                }
6803
            }
6804
        }
6805
        if (search_pc) {
6806
            j = gen_opc_ptr - gen_opc_buf;
6807
            if (lj < j) {
6808
                lj++;
6809
                while (lj < j)
6810
                    gen_opc_instr_start[lj++] = 0;
6811
            }
6812
            gen_opc_pc[lj] = pc_ptr;
6813
            gen_opc_cc_op[lj] = dc->cc_op;
6814
            gen_opc_instr_start[lj] = 1;
6815
        }
6816
        pc_ptr = disas_insn(dc, pc_ptr);
6817
        /* stop translation if indicated */
6818
        if (dc->is_jmp)
6819
            break;
6820
        /* if single step mode, we generate only one instruction and
6821
           generate an exception */
6822
        /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6823
           the flag and abort the translation to give the irqs a
6824
           change to be happen */
6825
        if (dc->tf || dc->singlestep_enabled ||
6826
            (flags & HF_INHIBIT_IRQ_MASK) ||
6827
            (cflags & CF_SINGLE_INSN)) {
6828
            gen_jmp_im(pc_ptr - dc->cs_base);
6829
            gen_eob(dc);
6830
            break;
6831
        }
6832
        /* if too long translation, stop generation too */
6833
        if (gen_opc_ptr >= gen_opc_end ||
6834
            (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6835
            gen_jmp_im(pc_ptr - dc->cs_base);
6836
            gen_eob(dc);
6837
            break;
6838
        }
6839
    }
6840
    *gen_opc_ptr = INDEX_op_end;
6841
    /* we don't forget to fill the last values */
6842
    if (search_pc) {
6843
        j = gen_opc_ptr - gen_opc_buf;
6844
        lj++;
6845
        while (lj <= j)
6846
            gen_opc_instr_start[lj++] = 0;
6847
    }
6848

    
6849
#ifdef DEBUG_DISAS
6850
    if (loglevel & CPU_LOG_TB_CPU) {
6851
        cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6852
    }
6853
    if (loglevel & CPU_LOG_TB_IN_ASM) {
6854
        int disas_flags;
6855
        fprintf(logfile, "----------------\n");
6856
        fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6857
#ifdef TARGET_X86_64
6858
        if (dc->code64)
6859
            disas_flags = 2;
6860
        else
6861
#endif
6862
            disas_flags = !dc->code32;
6863
        target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6864
        fprintf(logfile, "\n");
6865
        if (loglevel & CPU_LOG_TB_OP_OPT) {
6866
            fprintf(logfile, "OP before opt:\n");
6867
            tcg_dump_ops(&tcg_ctx, logfile);
6868
            fprintf(logfile, "\n");
6869
        }
6870
    }
6871
#endif
6872

    
6873
    /* optimize flag computations */
6874
    optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6875

    
6876
    if (!search_pc)
6877
        tb->size = pc_ptr - pc_start;
6878
    return 0;
6879
}
6880

    
6881
int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6882
{
6883
    return gen_intermediate_code_internal(env, tb, 0);
6884
}
6885

    
6886
int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6887
{
6888
    return gen_intermediate_code_internal(env, tb, 1);
6889
}
6890

    
6891
void gen_pc_load(CPUState *env, TranslationBlock *tb,
6892
                unsigned long searched_pc, int pc_pos, void *puc)
6893
{
6894
    int cc_op;
6895
#ifdef DEBUG_DISAS
6896
    if (loglevel & CPU_LOG_TB_OP) {
6897
        int i;
6898
        fprintf(logfile, "RESTORE:\n");
6899
        for(i = 0;i <= pc_pos; i++) {
6900
            if (gen_opc_instr_start[i]) {
6901
                fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
6902
            }
6903
        }
6904
        fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
6905
                searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
6906
                (uint32_t)tb->cs_base);
6907
    }
6908
#endif
6909
    env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
6910
    cc_op = gen_opc_cc_op[pc_pos];
6911
    if (cc_op != CC_OP_DYNAMIC)
6912
        env->cc_op = cc_op;
6913
}