Statistics
| Branch: | Revision:

root / target-i386 / translate.c @ a35f3ec7

History | View | Annotate | Download (206.8 kB)

1
/*
2
 *  i386 translation
3
 *
4
 *  Copyright (c) 2003 Fabrice Bellard
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20
#include <stdarg.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23
#include <string.h>
24
#include <inttypes.h>
25
#include <signal.h>
26
#include <assert.h>
27

    
28
#include "cpu.h"
29
#include "exec-all.h"
30
#include "disas.h"
31
#include "helper.h"
32
#include "tcg-op.h"
33

    
34
#define PREFIX_REPZ   0x01
35
#define PREFIX_REPNZ  0x02
36
#define PREFIX_LOCK   0x04
37
#define PREFIX_DATA   0x08
38
#define PREFIX_ADR    0x10
39

    
40
#ifdef TARGET_X86_64
41
#define X86_64_ONLY(x) x
42
#define X86_64_DEF(x...) x
43
#define CODE64(s) ((s)->code64)
44
#define REX_X(s) ((s)->rex_x)
45
#define REX_B(s) ((s)->rex_b)
46
/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
47
#if 1
48
#define BUGGY_64(x) NULL
49
#endif
50
#else
51
#define X86_64_ONLY(x) NULL
52
#define X86_64_DEF(x...)
53
#define CODE64(s) 0
54
#define REX_X(s) 0
55
#define REX_B(s) 0
56
#endif
57

    
58
//#define MACRO_TEST   1
59

    
60
/* global register indexes */
61
static TCGv cpu_env, cpu_T[2], cpu_A0;
62
/* local register indexes (only used inside old micro ops) */
63
static TCGv cpu_tmp0;
64

    
65
#ifdef TARGET_X86_64
66
static int x86_64_hregs;
67
#endif
68

    
69
typedef struct DisasContext {
70
    /* current insn context */
71
    int override; /* -1 if no override */
72
    int prefix;
73
    int aflag, dflag;
74
    target_ulong pc; /* pc = eip + cs_base */
75
    int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
76
                   static state change (stop translation) */
77
    /* current block context */
78
    target_ulong cs_base; /* base of CS segment */
79
    int pe;     /* protected mode */
80
    int code32; /* 32 bit code segment */
81
#ifdef TARGET_X86_64
82
    int lma;    /* long mode active */
83
    int code64; /* 64 bit code segment */
84
    int rex_x, rex_b;
85
#endif
86
    int ss32;   /* 32 bit stack segment */
87
    int cc_op;  /* current CC operation */
88
    int addseg; /* non zero if either DS/ES/SS have a non zero base */
89
    int f_st;   /* currently unused */
90
    int vm86;   /* vm86 mode */
91
    int cpl;
92
    int iopl;
93
    int tf;     /* TF cpu flag */
94
    int singlestep_enabled; /* "hardware" single step enabled */
95
    int jmp_opt; /* use direct block chaining for direct jumps */
96
    int mem_index; /* select memory access functions */
97
    uint64_t flags; /* all execution flags */
98
    struct TranslationBlock *tb;
99
    int popl_esp_hack; /* for correct popl with esp base handling */
100
    int rip_offset; /* only used in x86_64, but left for simplicity */
101
    int cpuid_features;
102
    int cpuid_ext_features;
103
} DisasContext;
104

    
105
static void gen_eob(DisasContext *s);
106
static void gen_jmp(DisasContext *s, target_ulong eip);
107
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
108

    
109
/* i386 arith/logic operations */
110
enum {
111
    OP_ADDL,
112
    OP_ORL,
113
    OP_ADCL,
114
    OP_SBBL,
115
    OP_ANDL,
116
    OP_SUBL,
117
    OP_XORL,
118
    OP_CMPL,
119
};
120

    
121
/* i386 shift ops */
122
enum {
123
    OP_ROL,
124
    OP_ROR,
125
    OP_RCL,
126
    OP_RCR,
127
    OP_SHL,
128
    OP_SHR,
129
    OP_SHL1, /* undocumented */
130
    OP_SAR = 7,
131
};
132

    
133
/* operand size */
134
enum {
135
    OT_BYTE = 0,
136
    OT_WORD,
137
    OT_LONG,
138
    OT_QUAD,
139
};
140

    
141
enum {
142
    /* I386 int registers */
143
    OR_EAX,   /* MUST be even numbered */
144
    OR_ECX,
145
    OR_EDX,
146
    OR_EBX,
147
    OR_ESP,
148
    OR_EBP,
149
    OR_ESI,
150
    OR_EDI,
151

    
152
    OR_TMP0 = 16,    /* temporary operand register */
153
    OR_TMP1,
154
    OR_A0, /* temporary register used when doing address evaluation */
155
};
156

    
157
static inline void gen_op_movl_T0_0(void)
158
{
159
    tcg_gen_movi_tl(cpu_T[0], 0);
160
}
161

    
162
static inline void gen_op_movl_T0_im(int32_t val)
163
{
164
    tcg_gen_movi_tl(cpu_T[0], val);
165
}
166

    
167
static inline void gen_op_movl_T0_imu(uint32_t val)
168
{
169
    tcg_gen_movi_tl(cpu_T[0], val);
170
}
171

    
172
static inline void gen_op_movl_T1_im(int32_t val)
173
{
174
    tcg_gen_movi_tl(cpu_T[1], val);
175
}
176

    
177
static inline void gen_op_movl_T1_imu(uint32_t val)
178
{
179
    tcg_gen_movi_tl(cpu_T[1], val);
180
}
181

    
182
static inline void gen_op_movl_A0_im(uint32_t val)
183
{
184
    tcg_gen_movi_tl(cpu_A0, val);
185
}
186

    
187
#ifdef TARGET_X86_64
188
static inline void gen_op_movq_A0_im(int64_t val)
189
{
190
    tcg_gen_movi_tl(cpu_A0, val);
191
}
192
#endif
193

    
194
static inline void gen_movtl_T0_im(target_ulong val)
195
{
196
    tcg_gen_movi_tl(cpu_T[0], val);
197
}
198

    
199
static inline void gen_movtl_T1_im(target_ulong val)
200
{
201
    tcg_gen_movi_tl(cpu_T[1], val);
202
}
203

    
204
static inline void gen_op_andl_T0_ffff(void)
205
{
206
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
207
}
208

    
209
static inline void gen_op_andl_T0_im(uint32_t val)
210
{
211
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
212
}
213

    
214
static inline void gen_op_movl_T0_T1(void)
215
{
216
    tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
217
}
218

    
219
static inline void gen_op_andl_A0_ffff(void)
220
{
221
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
222
}
223

    
224
#ifdef TARGET_X86_64
225

    
226
#define NB_OP_SIZES 4
227

    
228
#define DEF_REGS(prefix, suffix) \
229
  prefix ## EAX ## suffix,\
230
  prefix ## ECX ## suffix,\
231
  prefix ## EDX ## suffix,\
232
  prefix ## EBX ## suffix,\
233
  prefix ## ESP ## suffix,\
234
  prefix ## EBP ## suffix,\
235
  prefix ## ESI ## suffix,\
236
  prefix ## EDI ## suffix,\
237
  prefix ## R8 ## suffix,\
238
  prefix ## R9 ## suffix,\
239
  prefix ## R10 ## suffix,\
240
  prefix ## R11 ## suffix,\
241
  prefix ## R12 ## suffix,\
242
  prefix ## R13 ## suffix,\
243
  prefix ## R14 ## suffix,\
244
  prefix ## R15 ## suffix,
245

    
246
#else /* !TARGET_X86_64 */
247

    
248
#define NB_OP_SIZES 3
249

    
250
#define DEF_REGS(prefix, suffix) \
251
  prefix ## EAX ## suffix,\
252
  prefix ## ECX ## suffix,\
253
  prefix ## EDX ## suffix,\
254
  prefix ## EBX ## suffix,\
255
  prefix ## ESP ## suffix,\
256
  prefix ## EBP ## suffix,\
257
  prefix ## ESI ## suffix,\
258
  prefix ## EDI ## suffix,
259

    
260
#endif /* !TARGET_X86_64 */
261

    
262
#if defined(WORDS_BIGENDIAN)
263
#define REG_B_OFFSET (sizeof(target_ulong) - 1)
264
#define REG_H_OFFSET (sizeof(target_ulong) - 2)
265
#define REG_W_OFFSET (sizeof(target_ulong) - 2)
266
#define REG_L_OFFSET (sizeof(target_ulong) - 4)
267
#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
268
#else
269
#define REG_B_OFFSET 0
270
#define REG_H_OFFSET 1
271
#define REG_W_OFFSET 0
272
#define REG_L_OFFSET 0
273
#define REG_LH_OFFSET 4
274
#endif
275

    
276
static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
277
{
278
    switch(ot) {
279
    case OT_BYTE:
280
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
281
            tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
282
        } else {
283
            tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
284
        }
285
        break;
286
    case OT_WORD:
287
        tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
288
        break;
289
#ifdef TARGET_X86_64
290
    case OT_LONG:
291
        tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
292
        /* high part of register set to zero */
293
        tcg_gen_movi_tl(cpu_tmp0, 0);
294
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
295
        break;
296
    default:
297
    case OT_QUAD:
298
        tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
299
        break;
300
#else
301
    default:
302
    case OT_LONG:
303
        tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
304
        break;
305
#endif
306
    }
307
}
308

    
309
static inline void gen_op_mov_reg_T0(int ot, int reg)
310
{
311
    gen_op_mov_reg_TN(ot, 0, reg);
312
}
313

    
314
static inline void gen_op_mov_reg_T1(int ot, int reg)
315
{
316
    gen_op_mov_reg_TN(ot, 1, reg);
317
}
318

    
319
static inline void gen_op_mov_reg_A0(int size, int reg)
320
{
321
    switch(size) {
322
    case 0:
323
        tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
324
        break;
325
#ifdef TARGET_X86_64
326
    case 1:
327
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
328
        /* high part of register set to zero */
329
        tcg_gen_movi_tl(cpu_tmp0, 0);
330
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
331
        break;
332
    default:
333
    case 2:
334
        tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
335
        break;
336
#else
337
    default:
338
    case 1:
339
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
340
        break;
341
#endif
342
    }
343
}
344

    
345
static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
346
{
347
    switch(ot) {
348
    case OT_BYTE:
349
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
350
            goto std_case;
351
        } else {
352
            tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
353
        }
354
        break;
355
    default:
356
    std_case:
357
        tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
358
        break;
359
    }
360
}
361

    
362
static inline void gen_op_movl_A0_reg(int reg)
363
{
364
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
365
}
366

    
367
static inline void gen_op_addl_A0_im(int32_t val)
368
{
369
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
370
#ifdef TARGET_X86_64
371
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
372
#endif
373
}
374

    
375
#ifdef TARGET_X86_64
376
static inline void gen_op_addq_A0_im(int64_t val)
377
{
378
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
379
}
380
#endif
381
    
382
static void gen_add_A0_im(DisasContext *s, int val)
383
{
384
#ifdef TARGET_X86_64
385
    if (CODE64(s))
386
        gen_op_addq_A0_im(val);
387
    else
388
#endif
389
        gen_op_addl_A0_im(val);
390
}
391

    
392
static inline void gen_op_addl_T0_T1(void)
393
{
394
    tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
395
}
396

    
397
static inline void gen_op_jmp_T0(void)
398
{
399
    tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
400
}
401

    
402
static inline void gen_op_addw_ESP_im(int32_t val)
403
{
404
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
405
    tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
406
    tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]) + REG_W_OFFSET);
407
}
408

    
409
static inline void gen_op_addl_ESP_im(int32_t val)
410
{
411
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
412
    tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
413
#ifdef TARGET_X86_64
414
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
415
#endif
416
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
417
}
418

    
419
#ifdef TARGET_X86_64
420
static inline void gen_op_addq_ESP_im(int32_t val)
421
{
422
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
423
    tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
424
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
425
}
426
#endif
427

    
428
static inline void gen_op_set_cc_op(int32_t val)
429
{
430
    tcg_gen_movi_tl(cpu_tmp0, val);
431
    tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, cc_op));
432
}
433

    
434
static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
435
{
436
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
437
    if (shift != 0) 
438
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
439
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
440
#ifdef TARGET_X86_64
441
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
442
#endif
443
}
444

    
445
static inline void gen_op_movl_A0_seg(int reg)
446
{
447
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
448
}
449

    
450
static inline void gen_op_addl_A0_seg(int reg)
451
{
452
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
453
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
454
#ifdef TARGET_X86_64
455
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
456
#endif
457
}
458

    
459
#ifdef TARGET_X86_64
460
static inline void gen_op_movq_A0_seg(int reg)
461
{
462
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
463
}
464

    
465
static inline void gen_op_addq_A0_seg(int reg)
466
{
467
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
468
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
469
}
470

    
471
static inline void gen_op_movq_A0_reg(int reg)
472
{
473
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
474
}
475

    
476
static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
477
{
478
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
479
    if (shift != 0) 
480
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
481
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
482
}
483
#endif
484

    
485
static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
486
    [0] = {
487
        DEF_REGS(gen_op_cmovw_, _T1_T0)
488
    },
489
    [1] = {
490
        DEF_REGS(gen_op_cmovl_, _T1_T0)
491
    },
492
#ifdef TARGET_X86_64
493
    [2] = {
494
        DEF_REGS(gen_op_cmovq_, _T1_T0)
495
    },
496
#endif
497
};
498

    
499
#define DEF_ARITHC(SUFFIX)\
500
    {\
501
        gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
502
        gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
503
    },\
504
    {\
505
        gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
506
        gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
507
    },\
508
    {\
509
        gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
510
        gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
511
    },\
512
    {\
513
        X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
514
        X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
515
    },
516

    
517
static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
518
    DEF_ARITHC( )
519
};
520

    
521
static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
522
    DEF_ARITHC(_raw)
523
#ifndef CONFIG_USER_ONLY
524
    DEF_ARITHC(_kernel)
525
    DEF_ARITHC(_user)
526
#endif
527
};
528

    
529
static const int cc_op_arithb[8] = {
530
    CC_OP_ADDB,
531
    CC_OP_LOGICB,
532
    CC_OP_ADDB,
533
    CC_OP_SUBB,
534
    CC_OP_LOGICB,
535
    CC_OP_SUBB,
536
    CC_OP_LOGICB,
537
    CC_OP_SUBB,
538
};
539

    
540
#define DEF_CMPXCHG(SUFFIX)\
541
    gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
542
    gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
543
    gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
544
    X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
545

    
546
static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
547
    DEF_CMPXCHG( )
548
};
549

    
550
static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
551
    DEF_CMPXCHG(_raw)
552
#ifndef CONFIG_USER_ONLY
553
    DEF_CMPXCHG(_kernel)
554
    DEF_CMPXCHG(_user)
555
#endif
556
};
557

    
558
#define DEF_SHIFT(SUFFIX)\
559
    {\
560
        gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
561
        gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
562
        gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
563
        gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
564
        gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
565
        gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
566
        gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
567
        gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
568
    },\
569
    {\
570
        gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
571
        gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
572
        gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
573
        gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
574
        gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
575
        gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
576
        gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
577
        gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
578
    },\
579
    {\
580
        gen_op_roll ## SUFFIX ## _T0_T1_cc,\
581
        gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
582
        gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
583
        gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
584
        gen_op_shll ## SUFFIX ## _T0_T1_cc,\
585
        gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
586
        gen_op_shll ## SUFFIX ## _T0_T1_cc,\
587
        gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
588
    },\
589
    {\
590
        X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
591
        X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
592
        X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
593
        X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
594
        X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
595
        X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
596
        X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
597
        X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
598
    },
599

    
600
static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
601
    DEF_SHIFT( )
602
};
603

    
604
static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
605
    DEF_SHIFT(_raw)
606
#ifndef CONFIG_USER_ONLY
607
    DEF_SHIFT(_kernel)
608
    DEF_SHIFT(_user)
609
#endif
610
};
611

    
612
#define DEF_SHIFTD(SUFFIX, op)\
613
    {\
614
        NULL,\
615
        NULL,\
616
    },\
617
    {\
618
        gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
619
        gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
620
     },\
621
    {\
622
        gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
623
        gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
624
    },\
625
    {\
626
X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
627
           gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
628
    },
629

    
630
static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
631
    DEF_SHIFTD(, im)
632
};
633

    
634
static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
635
    DEF_SHIFTD(, ECX)
636
};
637

    
638
static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
639
    DEF_SHIFTD(_raw, im)
640
#ifndef CONFIG_USER_ONLY
641
    DEF_SHIFTD(_kernel, im)
642
    DEF_SHIFTD(_user, im)
643
#endif
644
};
645

    
646
static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
647
    DEF_SHIFTD(_raw, ECX)
648
#ifndef CONFIG_USER_ONLY
649
    DEF_SHIFTD(_kernel, ECX)
650
    DEF_SHIFTD(_user, ECX)
651
#endif
652
};
653

    
654
static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
655
    [0] = {
656
        gen_op_btw_T0_T1_cc,
657
        gen_op_btsw_T0_T1_cc,
658
        gen_op_btrw_T0_T1_cc,
659
        gen_op_btcw_T0_T1_cc,
660
    },
661
    [1] = {
662
        gen_op_btl_T0_T1_cc,
663
        gen_op_btsl_T0_T1_cc,
664
        gen_op_btrl_T0_T1_cc,
665
        gen_op_btcl_T0_T1_cc,
666
    },
667
#ifdef TARGET_X86_64
668
    [2] = {
669
        gen_op_btq_T0_T1_cc,
670
        gen_op_btsq_T0_T1_cc,
671
        gen_op_btrq_T0_T1_cc,
672
        gen_op_btcq_T0_T1_cc,
673
    },
674
#endif
675
};
676

    
677
static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
678
    gen_op_add_bitw_A0_T1,
679
    gen_op_add_bitl_A0_T1,
680
    X86_64_ONLY(gen_op_add_bitq_A0_T1),
681
};
682

    
683
static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
684
    [0] = {
685
        gen_op_bsfw_T0_cc,
686
        gen_op_bsrw_T0_cc,
687
    },
688
    [1] = {
689
        gen_op_bsfl_T0_cc,
690
        gen_op_bsrl_T0_cc,
691
    },
692
#ifdef TARGET_X86_64
693
    [2] = {
694
        gen_op_bsfq_T0_cc,
695
        gen_op_bsrq_T0_cc,
696
    },
697
#endif
698
};
699

    
700
static inline void gen_op_lds_T0_A0(int idx)
701
{
702
    int mem_index = (idx >> 2) - 1;
703
    switch(idx & 3) {
704
    case 0:
705
        tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
706
        break;
707
    case 1:
708
        tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
709
        break;
710
    default:
711
    case 2:
712
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
713
        break;
714
    }
715
}
716

    
717
/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
718
static inline void gen_op_ld_T0_A0(int idx)
719
{
720
    int mem_index = (idx >> 2) - 1;
721
    switch(idx & 3) {
722
    case 0:
723
        tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
724
        break;
725
    case 1:
726
        tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
727
        break;
728
    case 2:
729
        tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
730
        break;
731
    default:
732
    case 3:
733
        tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
734
        break;
735
    }
736
}
737

    
738
static inline void gen_op_ldu_T0_A0(int idx)
739
{
740
    gen_op_ld_T0_A0(idx);
741
}
742

    
743
static inline void gen_op_ld_T1_A0(int idx)
744
{
745
    int mem_index = (idx >> 2) - 1;
746
    switch(idx & 3) {
747
    case 0:
748
        tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
749
        break;
750
    case 1:
751
        tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
752
        break;
753
    case 2:
754
        tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
755
        break;
756
    default:
757
    case 3:
758
        tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
759
        break;
760
    }
761
}
762

    
763
static inline void gen_op_st_T0_A0(int idx)
764
{
765
    int mem_index = (idx >> 2) - 1;
766
    switch(idx & 3) {
767
    case 0:
768
        tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
769
        break;
770
    case 1:
771
        tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
772
        break;
773
    case 2:
774
        tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
775
        break;
776
    default:
777
    case 3:
778
        tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
779
        break;
780
    }
781
}
782

    
783
static inline void gen_op_st_T1_A0(int idx)
784
{
785
    int mem_index = (idx >> 2) - 1;
786
    switch(idx & 3) {
787
    case 0:
788
        tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
789
        break;
790
    case 1:
791
        tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
792
        break;
793
    case 2:
794
        tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
795
        break;
796
    default:
797
    case 3:
798
        tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
799
        break;
800
    }
801
}
802

    
803
static inline void gen_jmp_im(target_ulong pc)
804
{
805
    tcg_gen_movi_tl(cpu_tmp0, pc);
806
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
807
}
808

    
809
static inline void gen_string_movl_A0_ESI(DisasContext *s)
810
{
811
    int override;
812

    
813
    override = s->override;
814
#ifdef TARGET_X86_64
815
    if (s->aflag == 2) {
816
        if (override >= 0) {
817
            gen_op_movq_A0_seg(override);
818
            gen_op_addq_A0_reg_sN(0, R_ESI);
819
        } else {
820
            gen_op_movq_A0_reg(R_ESI);
821
        }
822
    } else
823
#endif
824
    if (s->aflag) {
825
        /* 32 bit address */
826
        if (s->addseg && override < 0)
827
            override = R_DS;
828
        if (override >= 0) {
829
            gen_op_movl_A0_seg(override);
830
            gen_op_addl_A0_reg_sN(0, R_ESI);
831
        } else {
832
            gen_op_movl_A0_reg(R_ESI);
833
        }
834
    } else {
835
        /* 16 address, always override */
836
        if (override < 0)
837
            override = R_DS;
838
        gen_op_movl_A0_reg(R_ESI);
839
        gen_op_andl_A0_ffff();
840
        gen_op_addl_A0_seg(override);
841
    }
842
}
843

    
844
static inline void gen_string_movl_A0_EDI(DisasContext *s)
845
{
846
#ifdef TARGET_X86_64
847
    if (s->aflag == 2) {
848
        gen_op_movq_A0_reg(R_EDI);
849
    } else
850
#endif
851
    if (s->aflag) {
852
        if (s->addseg) {
853
            gen_op_movl_A0_seg(R_ES);
854
            gen_op_addl_A0_reg_sN(0, R_EDI);
855
        } else {
856
            gen_op_movl_A0_reg(R_EDI);
857
        }
858
    } else {
859
        gen_op_movl_A0_reg(R_EDI);
860
        gen_op_andl_A0_ffff();
861
        gen_op_addl_A0_seg(R_ES);
862
    }
863
}
864

    
865
static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
866
    gen_op_movl_T0_Dshiftb,
867
    gen_op_movl_T0_Dshiftw,
868
    gen_op_movl_T0_Dshiftl,
869
    X86_64_ONLY(gen_op_movl_T0_Dshiftq),
870
};
871

    
872
static GenOpFunc1 *gen_op_jnz_ecx[3] = {
873
    gen_op_jnz_ecxw,
874
    gen_op_jnz_ecxl,
875
    X86_64_ONLY(gen_op_jnz_ecxq),
876
};
877

    
878
static GenOpFunc1 *gen_op_jz_ecx[3] = {
879
    gen_op_jz_ecxw,
880
    gen_op_jz_ecxl,
881
    X86_64_ONLY(gen_op_jz_ecxq),
882
};
883

    
884
static GenOpFunc *gen_op_dec_ECX[3] = {
885
    gen_op_decw_ECX,
886
    gen_op_decl_ECX,
887
    X86_64_ONLY(gen_op_decq_ECX),
888
};
889

    
890
static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
891
    {
892
        gen_op_jnz_subb,
893
        gen_op_jnz_subw,
894
        gen_op_jnz_subl,
895
        X86_64_ONLY(gen_op_jnz_subq),
896
    },
897
    {
898
        gen_op_jz_subb,
899
        gen_op_jz_subw,
900
        gen_op_jz_subl,
901
        X86_64_ONLY(gen_op_jz_subq),
902
    },
903
};
904

    
905
static GenOpFunc *gen_op_in_DX_T0[3] = {
906
    gen_op_inb_DX_T0,
907
    gen_op_inw_DX_T0,
908
    gen_op_inl_DX_T0,
909
};
910

    
911
static GenOpFunc *gen_op_out_DX_T0[3] = {
912
    gen_op_outb_DX_T0,
913
    gen_op_outw_DX_T0,
914
    gen_op_outl_DX_T0,
915
};
916

    
917
static GenOpFunc *gen_op_in[3] = {
918
    gen_op_inb_T0_T1,
919
    gen_op_inw_T0_T1,
920
    gen_op_inl_T0_T1,
921
};
922

    
923
static GenOpFunc *gen_op_out[3] = {
924
    gen_op_outb_T0_T1,
925
    gen_op_outw_T0_T1,
926
    gen_op_outl_T0_T1,
927
};
928

    
929
static GenOpFunc *gen_check_io_T0[3] = {
930
    gen_op_check_iob_T0,
931
    gen_op_check_iow_T0,
932
    gen_op_check_iol_T0,
933
};
934

    
935
static GenOpFunc *gen_check_io_DX[3] = {
936
    gen_op_check_iob_DX,
937
    gen_op_check_iow_DX,
938
    gen_op_check_iol_DX,
939
};
940

    
941
static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
942
{
943
    if (s->pe && (s->cpl > s->iopl || s->vm86)) {
944
        if (s->cc_op != CC_OP_DYNAMIC)
945
            gen_op_set_cc_op(s->cc_op);
946
        gen_jmp_im(cur_eip);
947
        if (use_dx)
948
            gen_check_io_DX[ot]();
949
        else
950
            gen_check_io_T0[ot]();
951
    }
952
}
953

    
954
static inline void gen_movs(DisasContext *s, int ot)
955
{
956
    gen_string_movl_A0_ESI(s);
957
    gen_op_ld_T0_A0(ot + s->mem_index);
958
    gen_string_movl_A0_EDI(s);
959
    gen_op_st_T0_A0(ot + s->mem_index);
960
    gen_op_movl_T0_Dshift[ot]();
961
#ifdef TARGET_X86_64
962
    if (s->aflag == 2) {
963
        gen_op_addq_ESI_T0();
964
        gen_op_addq_EDI_T0();
965
    } else
966
#endif
967
    if (s->aflag) {
968
        gen_op_addl_ESI_T0();
969
        gen_op_addl_EDI_T0();
970
    } else {
971
        gen_op_addw_ESI_T0();
972
        gen_op_addw_EDI_T0();
973
    }
974
}
975

    
976
static inline void gen_update_cc_op(DisasContext *s)
977
{
978
    if (s->cc_op != CC_OP_DYNAMIC) {
979
        gen_op_set_cc_op(s->cc_op);
980
        s->cc_op = CC_OP_DYNAMIC;
981
    }
982
}
983

    
984
/* XXX: does not work with gdbstub "ice" single step - not a
985
   serious problem */
986
static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
987
{
988
    int l1, l2;
989

    
990
    l1 = gen_new_label();
991
    l2 = gen_new_label();
992
    gen_op_jnz_ecx[s->aflag](l1);
993
    gen_set_label(l2);
994
    gen_jmp_tb(s, next_eip, 1);
995
    gen_set_label(l1);
996
    return l2;
997
}
998

    
999
static inline void gen_stos(DisasContext *s, int ot)
1000
{
1001
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1002
    gen_string_movl_A0_EDI(s);
1003
    gen_op_st_T0_A0(ot + s->mem_index);
1004
    gen_op_movl_T0_Dshift[ot]();
1005
#ifdef TARGET_X86_64
1006
    if (s->aflag == 2) {
1007
        gen_op_addq_EDI_T0();
1008
    } else
1009
#endif
1010
    if (s->aflag) {
1011
        gen_op_addl_EDI_T0();
1012
    } else {
1013
        gen_op_addw_EDI_T0();
1014
    }
1015
}
1016

    
1017
static inline void gen_lods(DisasContext *s, int ot)
1018
{
1019
    gen_string_movl_A0_ESI(s);
1020
    gen_op_ld_T0_A0(ot + s->mem_index);
1021
    gen_op_mov_reg_T0(ot, R_EAX);
1022
    gen_op_movl_T0_Dshift[ot]();
1023
#ifdef TARGET_X86_64
1024
    if (s->aflag == 2) {
1025
        gen_op_addq_ESI_T0();
1026
    } else
1027
#endif
1028
    if (s->aflag) {
1029
        gen_op_addl_ESI_T0();
1030
    } else {
1031
        gen_op_addw_ESI_T0();
1032
    }
1033
}
1034

    
1035
static inline void gen_scas(DisasContext *s, int ot)
1036
{
1037
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1038
    gen_string_movl_A0_EDI(s);
1039
    gen_op_ld_T1_A0(ot + s->mem_index);
1040
    gen_op_cmpl_T0_T1_cc();
1041
    gen_op_movl_T0_Dshift[ot]();
1042
#ifdef TARGET_X86_64
1043
    if (s->aflag == 2) {
1044
        gen_op_addq_EDI_T0();
1045
    } else
1046
#endif
1047
    if (s->aflag) {
1048
        gen_op_addl_EDI_T0();
1049
    } else {
1050
        gen_op_addw_EDI_T0();
1051
    }
1052
}
1053

    
1054
static inline void gen_cmps(DisasContext *s, int ot)
1055
{
1056
    gen_string_movl_A0_ESI(s);
1057
    gen_op_ld_T0_A0(ot + s->mem_index);
1058
    gen_string_movl_A0_EDI(s);
1059
    gen_op_ld_T1_A0(ot + s->mem_index);
1060
    gen_op_cmpl_T0_T1_cc();
1061
    gen_op_movl_T0_Dshift[ot]();
1062
#ifdef TARGET_X86_64
1063
    if (s->aflag == 2) {
1064
        gen_op_addq_ESI_T0();
1065
        gen_op_addq_EDI_T0();
1066
    } else
1067
#endif
1068
    if (s->aflag) {
1069
        gen_op_addl_ESI_T0();
1070
        gen_op_addl_EDI_T0();
1071
    } else {
1072
        gen_op_addw_ESI_T0();
1073
        gen_op_addw_EDI_T0();
1074
    }
1075
}
1076

    
1077
static inline void gen_ins(DisasContext *s, int ot)
1078
{
1079
    gen_string_movl_A0_EDI(s);
1080
    gen_op_movl_T0_0();
1081
    gen_op_st_T0_A0(ot + s->mem_index);
1082
    gen_op_in_DX_T0[ot]();
1083
    gen_op_st_T0_A0(ot + s->mem_index);
1084
    gen_op_movl_T0_Dshift[ot]();
1085
#ifdef TARGET_X86_64
1086
    if (s->aflag == 2) {
1087
        gen_op_addq_EDI_T0();
1088
    } else
1089
#endif
1090
    if (s->aflag) {
1091
        gen_op_addl_EDI_T0();
1092
    } else {
1093
        gen_op_addw_EDI_T0();
1094
    }
1095
}
1096

    
1097
static inline void gen_outs(DisasContext *s, int ot)
1098
{
1099
    gen_string_movl_A0_ESI(s);
1100
    gen_op_ld_T0_A0(ot + s->mem_index);
1101
    gen_op_out_DX_T0[ot]();
1102
    gen_op_movl_T0_Dshift[ot]();
1103
#ifdef TARGET_X86_64
1104
    if (s->aflag == 2) {
1105
        gen_op_addq_ESI_T0();
1106
    } else
1107
#endif
1108
    if (s->aflag) {
1109
        gen_op_addl_ESI_T0();
1110
    } else {
1111
        gen_op_addw_ESI_T0();
1112
    }
1113
}
1114

    
1115
/* same method as Valgrind : we generate jumps to current or next
1116
   instruction */
1117
#define GEN_REPZ(op)                                                          \
1118
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1119
                                 target_ulong cur_eip, target_ulong next_eip) \
1120
{                                                                             \
1121
    int l2;\
1122
    gen_update_cc_op(s);                                                      \
1123
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1124
    gen_ ## op(s, ot);                                                        \
1125
    gen_op_dec_ECX[s->aflag]();                                               \
1126
    /* a loop would cause two single step exceptions if ECX = 1               \
1127
       before rep string_insn */                                              \
1128
    if (!s->jmp_opt)                                                          \
1129
        gen_op_jz_ecx[s->aflag](l2);                                          \
1130
    gen_jmp(s, cur_eip);                                                      \
1131
}
1132

    
1133
#define GEN_REPZ2(op)                                                         \
1134
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1135
                                   target_ulong cur_eip,                      \
1136
                                   target_ulong next_eip,                     \
1137
                                   int nz)                                    \
1138
{                                                                             \
1139
    int l2;\
1140
    gen_update_cc_op(s);                                                      \
1141
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1142
    gen_ ## op(s, ot);                                                        \
1143
    gen_op_dec_ECX[s->aflag]();                                               \
1144
    gen_op_set_cc_op(CC_OP_SUBB + ot);                                        \
1145
    gen_op_string_jnz_sub[nz][ot](l2);\
1146
    if (!s->jmp_opt)                                                          \
1147
        gen_op_jz_ecx[s->aflag](l2);                                          \
1148
    gen_jmp(s, cur_eip);                                                      \
1149
}
1150

    
1151
GEN_REPZ(movs)
1152
GEN_REPZ(stos)
1153
GEN_REPZ(lods)
1154
GEN_REPZ(ins)
1155
GEN_REPZ(outs)
1156
GEN_REPZ2(scas)
1157
GEN_REPZ2(cmps)
1158

    
1159
enum {
1160
    JCC_O,
1161
    JCC_B,
1162
    JCC_Z,
1163
    JCC_BE,
1164
    JCC_S,
1165
    JCC_P,
1166
    JCC_L,
1167
    JCC_LE,
1168
};
1169

    
1170
static GenOpFunc1 *gen_jcc_sub[4][8] = {
1171
    [OT_BYTE] = {
1172
        NULL,
1173
        gen_op_jb_subb,
1174
        gen_op_jz_subb,
1175
        gen_op_jbe_subb,
1176
        gen_op_js_subb,
1177
        NULL,
1178
        gen_op_jl_subb,
1179
        gen_op_jle_subb,
1180
    },
1181
    [OT_WORD] = {
1182
        NULL,
1183
        gen_op_jb_subw,
1184
        gen_op_jz_subw,
1185
        gen_op_jbe_subw,
1186
        gen_op_js_subw,
1187
        NULL,
1188
        gen_op_jl_subw,
1189
        gen_op_jle_subw,
1190
    },
1191
    [OT_LONG] = {
1192
        NULL,
1193
        gen_op_jb_subl,
1194
        gen_op_jz_subl,
1195
        gen_op_jbe_subl,
1196
        gen_op_js_subl,
1197
        NULL,
1198
        gen_op_jl_subl,
1199
        gen_op_jle_subl,
1200
    },
1201
#ifdef TARGET_X86_64
1202
    [OT_QUAD] = {
1203
        NULL,
1204
        BUGGY_64(gen_op_jb_subq),
1205
        gen_op_jz_subq,
1206
        BUGGY_64(gen_op_jbe_subq),
1207
        gen_op_js_subq,
1208
        NULL,
1209
        BUGGY_64(gen_op_jl_subq),
1210
        BUGGY_64(gen_op_jle_subq),
1211
    },
1212
#endif
1213
};
1214
static GenOpFunc1 *gen_op_loop[3][4] = {
1215
    [0] = {
1216
        gen_op_loopnzw,
1217
        gen_op_loopzw,
1218
        gen_op_jnz_ecxw,
1219
    },
1220
    [1] = {
1221
        gen_op_loopnzl,
1222
        gen_op_loopzl,
1223
        gen_op_jnz_ecxl,
1224
    },
1225
#ifdef TARGET_X86_64
1226
    [2] = {
1227
        gen_op_loopnzq,
1228
        gen_op_loopzq,
1229
        gen_op_jnz_ecxq,
1230
    },
1231
#endif
1232
};
1233

    
1234
static GenOpFunc *gen_setcc_slow[8] = {
1235
    gen_op_seto_T0_cc,
1236
    gen_op_setb_T0_cc,
1237
    gen_op_setz_T0_cc,
1238
    gen_op_setbe_T0_cc,
1239
    gen_op_sets_T0_cc,
1240
    gen_op_setp_T0_cc,
1241
    gen_op_setl_T0_cc,
1242
    gen_op_setle_T0_cc,
1243
};
1244

    
1245
static GenOpFunc *gen_setcc_sub[4][8] = {
1246
    [OT_BYTE] = {
1247
        NULL,
1248
        gen_op_setb_T0_subb,
1249
        gen_op_setz_T0_subb,
1250
        gen_op_setbe_T0_subb,
1251
        gen_op_sets_T0_subb,
1252
        NULL,
1253
        gen_op_setl_T0_subb,
1254
        gen_op_setle_T0_subb,
1255
    },
1256
    [OT_WORD] = {
1257
        NULL,
1258
        gen_op_setb_T0_subw,
1259
        gen_op_setz_T0_subw,
1260
        gen_op_setbe_T0_subw,
1261
        gen_op_sets_T0_subw,
1262
        NULL,
1263
        gen_op_setl_T0_subw,
1264
        gen_op_setle_T0_subw,
1265
    },
1266
    [OT_LONG] = {
1267
        NULL,
1268
        gen_op_setb_T0_subl,
1269
        gen_op_setz_T0_subl,
1270
        gen_op_setbe_T0_subl,
1271
        gen_op_sets_T0_subl,
1272
        NULL,
1273
        gen_op_setl_T0_subl,
1274
        gen_op_setle_T0_subl,
1275
    },
1276
#ifdef TARGET_X86_64
1277
    [OT_QUAD] = {
1278
        NULL,
1279
        gen_op_setb_T0_subq,
1280
        gen_op_setz_T0_subq,
1281
        gen_op_setbe_T0_subq,
1282
        gen_op_sets_T0_subq,
1283
        NULL,
1284
        gen_op_setl_T0_subq,
1285
        gen_op_setle_T0_subq,
1286
    },
1287
#endif
1288
};
1289

    
1290
static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1291
    gen_op_fadd_ST0_FT0,
1292
    gen_op_fmul_ST0_FT0,
1293
    gen_op_fcom_ST0_FT0,
1294
    gen_op_fcom_ST0_FT0,
1295
    gen_op_fsub_ST0_FT0,
1296
    gen_op_fsubr_ST0_FT0,
1297
    gen_op_fdiv_ST0_FT0,
1298
    gen_op_fdivr_ST0_FT0,
1299
};
1300

    
1301
/* NOTE the exception in "r" op ordering */
1302
static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1303
    gen_op_fadd_STN_ST0,
1304
    gen_op_fmul_STN_ST0,
1305
    NULL,
1306
    NULL,
1307
    gen_op_fsubr_STN_ST0,
1308
    gen_op_fsub_STN_ST0,
1309
    gen_op_fdivr_STN_ST0,
1310
    gen_op_fdiv_STN_ST0,
1311
};
1312

    
1313
/* if d == OR_TMP0, it means memory operand (address in A0) */
1314
static void gen_op(DisasContext *s1, int op, int ot, int d)
1315
{
1316
    GenOpFunc *gen_update_cc;
1317

    
1318
    if (d != OR_TMP0) {
1319
        gen_op_mov_TN_reg(ot, 0, d);
1320
    } else {
1321
        gen_op_ld_T0_A0(ot + s1->mem_index);
1322
    }
1323
    switch(op) {
1324
    case OP_ADCL:
1325
    case OP_SBBL:
1326
        if (s1->cc_op != CC_OP_DYNAMIC)
1327
            gen_op_set_cc_op(s1->cc_op);
1328
        if (d != OR_TMP0) {
1329
            gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1330
            gen_op_mov_reg_T0(ot, d);
1331
        } else {
1332
            gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1333
        }
1334
        s1->cc_op = CC_OP_DYNAMIC;
1335
        goto the_end;
1336
    case OP_ADDL:
1337
        gen_op_addl_T0_T1();
1338
        s1->cc_op = CC_OP_ADDB + ot;
1339
        gen_update_cc = gen_op_update2_cc;
1340
        break;
1341
    case OP_SUBL:
1342
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1343
        s1->cc_op = CC_OP_SUBB + ot;
1344
        gen_update_cc = gen_op_update2_cc;
1345
        break;
1346
    default:
1347
    case OP_ANDL:
1348
        tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1349
        s1->cc_op = CC_OP_LOGICB + ot;
1350
        gen_update_cc = gen_op_update1_cc;
1351
        break;
1352
    case OP_ORL:
1353
        tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1354
        s1->cc_op = CC_OP_LOGICB + ot;
1355
        gen_update_cc = gen_op_update1_cc;
1356
        break;
1357
    case OP_XORL:
1358
        tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1359
        s1->cc_op = CC_OP_LOGICB + ot;
1360
        gen_update_cc = gen_op_update1_cc;
1361
        break;
1362
    case OP_CMPL:
1363
        gen_op_cmpl_T0_T1_cc();
1364
        s1->cc_op = CC_OP_SUBB + ot;
1365
        gen_update_cc = NULL;
1366
        break;
1367
    }
1368
    if (op != OP_CMPL) {
1369
        if (d != OR_TMP0)
1370
            gen_op_mov_reg_T0(ot, d);
1371
        else
1372
            gen_op_st_T0_A0(ot + s1->mem_index);
1373
    }
1374
    /* the flags update must happen after the memory write (precise
1375
       exception support) */
1376
    if (gen_update_cc)
1377
        gen_update_cc();
1378
 the_end: ;
1379
}
1380

    
1381
/* if d == OR_TMP0, it means memory operand (address in A0) */
1382
static void gen_inc(DisasContext *s1, int ot, int d, int c)
1383
{
1384
    if (d != OR_TMP0)
1385
        gen_op_mov_TN_reg(ot, 0, d);
1386
    else
1387
        gen_op_ld_T0_A0(ot + s1->mem_index);
1388
    if (s1->cc_op != CC_OP_DYNAMIC)
1389
        gen_op_set_cc_op(s1->cc_op);
1390
    if (c > 0) {
1391
        gen_op_incl_T0();
1392
        s1->cc_op = CC_OP_INCB + ot;
1393
    } else {
1394
        gen_op_decl_T0();
1395
        s1->cc_op = CC_OP_DECB + ot;
1396
    }
1397
    if (d != OR_TMP0)
1398
        gen_op_mov_reg_T0(ot, d);
1399
    else
1400
        gen_op_st_T0_A0(ot + s1->mem_index);
1401
    gen_op_update_inc_cc();
1402
}
1403

    
1404
static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1405
{
1406
    if (d != OR_TMP0)
1407
        gen_op_mov_TN_reg(ot, 0, d);
1408
    else
1409
        gen_op_ld_T0_A0(ot + s1->mem_index);
1410
    if (s != OR_TMP1)
1411
        gen_op_mov_TN_reg(ot, 1, s);
1412
    /* for zero counts, flags are not updated, so must do it dynamically */
1413
    if (s1->cc_op != CC_OP_DYNAMIC)
1414
        gen_op_set_cc_op(s1->cc_op);
1415

    
1416
    if (d != OR_TMP0)
1417
        gen_op_shift_T0_T1_cc[ot][op]();
1418
    else
1419
        gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1420
    if (d != OR_TMP0)
1421
        gen_op_mov_reg_T0(ot, d);
1422
    s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1423
}
1424

    
1425
static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1426
{
1427
    /* currently not optimized */
1428
    gen_op_movl_T1_im(c);
1429
    gen_shift(s1, op, ot, d, OR_TMP1);
1430
}
1431

    
1432
static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1433
{
1434
    target_long disp;
1435
    int havesib;
1436
    int base;
1437
    int index;
1438
    int scale;
1439
    int opreg;
1440
    int mod, rm, code, override, must_add_seg;
1441

    
1442
    override = s->override;
1443
    must_add_seg = s->addseg;
1444
    if (override >= 0)
1445
        must_add_seg = 1;
1446
    mod = (modrm >> 6) & 3;
1447
    rm = modrm & 7;
1448

    
1449
    if (s->aflag) {
1450

    
1451
        havesib = 0;
1452
        base = rm;
1453
        index = 0;
1454
        scale = 0;
1455

    
1456
        if (base == 4) {
1457
            havesib = 1;
1458
            code = ldub_code(s->pc++);
1459
            scale = (code >> 6) & 3;
1460
            index = ((code >> 3) & 7) | REX_X(s);
1461
            base = (code & 7);
1462
        }
1463
        base |= REX_B(s);
1464

    
1465
        switch (mod) {
1466
        case 0:
1467
            if ((base & 7) == 5) {
1468
                base = -1;
1469
                disp = (int32_t)ldl_code(s->pc);
1470
                s->pc += 4;
1471
                if (CODE64(s) && !havesib) {
1472
                    disp += s->pc + s->rip_offset;
1473
                }
1474
            } else {
1475
                disp = 0;
1476
            }
1477
            break;
1478
        case 1:
1479
            disp = (int8_t)ldub_code(s->pc++);
1480
            break;
1481
        default:
1482
        case 2:
1483
            disp = ldl_code(s->pc);
1484
            s->pc += 4;
1485
            break;
1486
        }
1487

    
1488
        if (base >= 0) {
1489
            /* for correct popl handling with esp */
1490
            if (base == 4 && s->popl_esp_hack)
1491
                disp += s->popl_esp_hack;
1492
#ifdef TARGET_X86_64
1493
            if (s->aflag == 2) {
1494
                gen_op_movq_A0_reg(base);
1495
                if (disp != 0) {
1496
                    gen_op_addq_A0_im(disp);
1497
                }
1498
            } else
1499
#endif
1500
            {
1501
                gen_op_movl_A0_reg(base);
1502
                if (disp != 0)
1503
                    gen_op_addl_A0_im(disp);
1504
            }
1505
        } else {
1506
#ifdef TARGET_X86_64
1507
            if (s->aflag == 2) {
1508
                gen_op_movq_A0_im(disp);
1509
            } else
1510
#endif
1511
            {
1512
                gen_op_movl_A0_im(disp);
1513
            }
1514
        }
1515
        /* XXX: index == 4 is always invalid */
1516
        if (havesib && (index != 4 || scale != 0)) {
1517
#ifdef TARGET_X86_64
1518
            if (s->aflag == 2) {
1519
                gen_op_addq_A0_reg_sN(scale, index);
1520
            } else
1521
#endif
1522
            {
1523
                gen_op_addl_A0_reg_sN(scale, index);
1524
            }
1525
        }
1526
        if (must_add_seg) {
1527
            if (override < 0) {
1528
                if (base == R_EBP || base == R_ESP)
1529
                    override = R_SS;
1530
                else
1531
                    override = R_DS;
1532
            }
1533
#ifdef TARGET_X86_64
1534
            if (s->aflag == 2) {
1535
                gen_op_addq_A0_seg(override);
1536
            } else
1537
#endif
1538
            {
1539
                gen_op_addl_A0_seg(override);
1540
            }
1541
        }
1542
    } else {
1543
        switch (mod) {
1544
        case 0:
1545
            if (rm == 6) {
1546
                disp = lduw_code(s->pc);
1547
                s->pc += 2;
1548
                gen_op_movl_A0_im(disp);
1549
                rm = 0; /* avoid SS override */
1550
                goto no_rm;
1551
            } else {
1552
                disp = 0;
1553
            }
1554
            break;
1555
        case 1:
1556
            disp = (int8_t)ldub_code(s->pc++);
1557
            break;
1558
        default:
1559
        case 2:
1560
            disp = lduw_code(s->pc);
1561
            s->pc += 2;
1562
            break;
1563
        }
1564
        switch(rm) {
1565
        case 0:
1566
            gen_op_movl_A0_reg(R_EBX);
1567
            gen_op_addl_A0_reg_sN(0, R_ESI);
1568
            break;
1569
        case 1:
1570
            gen_op_movl_A0_reg(R_EBX);
1571
            gen_op_addl_A0_reg_sN(0, R_EDI);
1572
            break;
1573
        case 2:
1574
            gen_op_movl_A0_reg(R_EBP);
1575
            gen_op_addl_A0_reg_sN(0, R_ESI);
1576
            break;
1577
        case 3:
1578
            gen_op_movl_A0_reg(R_EBP);
1579
            gen_op_addl_A0_reg_sN(0, R_EDI);
1580
            break;
1581
        case 4:
1582
            gen_op_movl_A0_reg(R_ESI);
1583
            break;
1584
        case 5:
1585
            gen_op_movl_A0_reg(R_EDI);
1586
            break;
1587
        case 6:
1588
            gen_op_movl_A0_reg(R_EBP);
1589
            break;
1590
        default:
1591
        case 7:
1592
            gen_op_movl_A0_reg(R_EBX);
1593
            break;
1594
        }
1595
        if (disp != 0)
1596
            gen_op_addl_A0_im(disp);
1597
        gen_op_andl_A0_ffff();
1598
    no_rm:
1599
        if (must_add_seg) {
1600
            if (override < 0) {
1601
                if (rm == 2 || rm == 3 || rm == 6)
1602
                    override = R_SS;
1603
                else
1604
                    override = R_DS;
1605
            }
1606
            gen_op_addl_A0_seg(override);
1607
        }
1608
    }
1609

    
1610
    opreg = OR_A0;
1611
    disp = 0;
1612
    *reg_ptr = opreg;
1613
    *offset_ptr = disp;
1614
}
1615

    
1616
static void gen_nop_modrm(DisasContext *s, int modrm)
1617
{
1618
    int mod, rm, base, code;
1619

    
1620
    mod = (modrm >> 6) & 3;
1621
    if (mod == 3)
1622
        return;
1623
    rm = modrm & 7;
1624

    
1625
    if (s->aflag) {
1626

    
1627
        base = rm;
1628

    
1629
        if (base == 4) {
1630
            code = ldub_code(s->pc++);
1631
            base = (code & 7);
1632
        }
1633

    
1634
        switch (mod) {
1635
        case 0:
1636
            if (base == 5) {
1637
                s->pc += 4;
1638
            }
1639
            break;
1640
        case 1:
1641
            s->pc++;
1642
            break;
1643
        default:
1644
        case 2:
1645
            s->pc += 4;
1646
            break;
1647
        }
1648
    } else {
1649
        switch (mod) {
1650
        case 0:
1651
            if (rm == 6) {
1652
                s->pc += 2;
1653
            }
1654
            break;
1655
        case 1:
1656
            s->pc++;
1657
            break;
1658
        default:
1659
        case 2:
1660
            s->pc += 2;
1661
            break;
1662
        }
1663
    }
1664
}
1665

    
1666
/* used for LEA and MOV AX, mem */
1667
static void gen_add_A0_ds_seg(DisasContext *s)
1668
{
1669
    int override, must_add_seg;
1670
    must_add_seg = s->addseg;
1671
    override = R_DS;
1672
    if (s->override >= 0) {
1673
        override = s->override;
1674
        must_add_seg = 1;
1675
    } else {
1676
        override = R_DS;
1677
    }
1678
    if (must_add_seg) {
1679
#ifdef TARGET_X86_64
1680
        if (CODE64(s)) {
1681
            gen_op_addq_A0_seg(override);
1682
        } else
1683
#endif
1684
        {
1685
            gen_op_addl_A0_seg(override);
1686
        }
1687
    }
1688
}
1689

    
1690
/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1691
   OR_TMP0 */
1692
static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1693
{
1694
    int mod, rm, opreg, disp;
1695

    
1696
    mod = (modrm >> 6) & 3;
1697
    rm = (modrm & 7) | REX_B(s);
1698
    if (mod == 3) {
1699
        if (is_store) {
1700
            if (reg != OR_TMP0)
1701
                gen_op_mov_TN_reg(ot, 0, reg);
1702
            gen_op_mov_reg_T0(ot, rm);
1703
        } else {
1704
            gen_op_mov_TN_reg(ot, 0, rm);
1705
            if (reg != OR_TMP0)
1706
                gen_op_mov_reg_T0(ot, reg);
1707
        }
1708
    } else {
1709
        gen_lea_modrm(s, modrm, &opreg, &disp);
1710
        if (is_store) {
1711
            if (reg != OR_TMP0)
1712
                gen_op_mov_TN_reg(ot, 0, reg);
1713
            gen_op_st_T0_A0(ot + s->mem_index);
1714
        } else {
1715
            gen_op_ld_T0_A0(ot + s->mem_index);
1716
            if (reg != OR_TMP0)
1717
                gen_op_mov_reg_T0(ot, reg);
1718
        }
1719
    }
1720
}
1721

    
1722
static inline uint32_t insn_get(DisasContext *s, int ot)
1723
{
1724
    uint32_t ret;
1725

    
1726
    switch(ot) {
1727
    case OT_BYTE:
1728
        ret = ldub_code(s->pc);
1729
        s->pc++;
1730
        break;
1731
    case OT_WORD:
1732
        ret = lduw_code(s->pc);
1733
        s->pc += 2;
1734
        break;
1735
    default:
1736
    case OT_LONG:
1737
        ret = ldl_code(s->pc);
1738
        s->pc += 4;
1739
        break;
1740
    }
1741
    return ret;
1742
}
1743

    
1744
static inline int insn_const_size(unsigned int ot)
1745
{
1746
    if (ot <= OT_LONG)
1747
        return 1 << ot;
1748
    else
1749
        return 4;
1750
}
1751

    
1752
static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1753
{
1754
    TranslationBlock *tb;
1755
    target_ulong pc;
1756

    
1757
    pc = s->cs_base + eip;
1758
    tb = s->tb;
1759
    /* NOTE: we handle the case where the TB spans two pages here */
1760
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1761
        (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK))  {
1762
        /* jump to same page: we can use a direct jump */
1763
        tcg_gen_goto_tb(tb_num);
1764
        gen_jmp_im(eip);
1765
        tcg_gen_exit_tb((long)tb + tb_num);
1766
    } else {
1767
        /* jump to another page: currently not optimized */
1768
        gen_jmp_im(eip);
1769
        gen_eob(s);
1770
    }
1771
}
1772

    
1773
static inline void gen_jcc(DisasContext *s, int b,
1774
                           target_ulong val, target_ulong next_eip)
1775
{
1776
    TranslationBlock *tb;
1777
    int inv, jcc_op;
1778
    GenOpFunc1 *func;
1779
    target_ulong tmp;
1780
    int l1, l2;
1781

    
1782
    inv = b & 1;
1783
    jcc_op = (b >> 1) & 7;
1784

    
1785
    if (s->jmp_opt) {
1786
        switch(s->cc_op) {
1787
            /* we optimize the cmp/jcc case */
1788
        case CC_OP_SUBB:
1789
        case CC_OP_SUBW:
1790
        case CC_OP_SUBL:
1791
        case CC_OP_SUBQ:
1792
            func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1793
            break;
1794

    
1795
            /* some jumps are easy to compute */
1796
        case CC_OP_ADDB:
1797
        case CC_OP_ADDW:
1798
        case CC_OP_ADDL:
1799
        case CC_OP_ADDQ:
1800

    
1801
        case CC_OP_ADCB:
1802
        case CC_OP_ADCW:
1803
        case CC_OP_ADCL:
1804
        case CC_OP_ADCQ:
1805

    
1806
        case CC_OP_SBBB:
1807
        case CC_OP_SBBW:
1808
        case CC_OP_SBBL:
1809
        case CC_OP_SBBQ:
1810

    
1811
        case CC_OP_LOGICB:
1812
        case CC_OP_LOGICW:
1813
        case CC_OP_LOGICL:
1814
        case CC_OP_LOGICQ:
1815

    
1816
        case CC_OP_INCB:
1817
        case CC_OP_INCW:
1818
        case CC_OP_INCL:
1819
        case CC_OP_INCQ:
1820

    
1821
        case CC_OP_DECB:
1822
        case CC_OP_DECW:
1823
        case CC_OP_DECL:
1824
        case CC_OP_DECQ:
1825

    
1826
        case CC_OP_SHLB:
1827
        case CC_OP_SHLW:
1828
        case CC_OP_SHLL:
1829
        case CC_OP_SHLQ:
1830

    
1831
        case CC_OP_SARB:
1832
        case CC_OP_SARW:
1833
        case CC_OP_SARL:
1834
        case CC_OP_SARQ:
1835
            switch(jcc_op) {
1836
            case JCC_Z:
1837
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1838
                break;
1839
            case JCC_S:
1840
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1841
                break;
1842
            default:
1843
                func = NULL;
1844
                break;
1845
            }
1846
            break;
1847
        default:
1848
            func = NULL;
1849
            break;
1850
        }
1851

    
1852
        if (s->cc_op != CC_OP_DYNAMIC) {
1853
            gen_op_set_cc_op(s->cc_op);
1854
            s->cc_op = CC_OP_DYNAMIC;
1855
        }
1856

    
1857
        if (!func) {
1858
            gen_setcc_slow[jcc_op]();
1859
            func = gen_op_jnz_T0_label;
1860
        }
1861

    
1862
        if (inv) {
1863
            tmp = val;
1864
            val = next_eip;
1865
            next_eip = tmp;
1866
        }
1867
        tb = s->tb;
1868

    
1869
        l1 = gen_new_label();
1870
        func(l1);
1871

    
1872
        gen_goto_tb(s, 0, next_eip);
1873

    
1874
        gen_set_label(l1);
1875
        gen_goto_tb(s, 1, val);
1876

    
1877
        s->is_jmp = 3;
1878
    } else {
1879

    
1880
        if (s->cc_op != CC_OP_DYNAMIC) {
1881
            gen_op_set_cc_op(s->cc_op);
1882
            s->cc_op = CC_OP_DYNAMIC;
1883
        }
1884
        gen_setcc_slow[jcc_op]();
1885
        if (inv) {
1886
            tmp = val;
1887
            val = next_eip;
1888
            next_eip = tmp;
1889
        }
1890
        l1 = gen_new_label();
1891
        l2 = gen_new_label();
1892
        gen_op_jnz_T0_label(l1);
1893
        gen_jmp_im(next_eip);
1894
        gen_op_jmp_label(l2);
1895
        gen_set_label(l1);
1896
        gen_jmp_im(val);
1897
        gen_set_label(l2);
1898
        gen_eob(s);
1899
    }
1900
}
1901

    
1902
static void gen_setcc(DisasContext *s, int b)
1903
{
1904
    int inv, jcc_op;
1905
    GenOpFunc *func;
1906

    
1907
    inv = b & 1;
1908
    jcc_op = (b >> 1) & 7;
1909
    switch(s->cc_op) {
1910
        /* we optimize the cmp/jcc case */
1911
    case CC_OP_SUBB:
1912
    case CC_OP_SUBW:
1913
    case CC_OP_SUBL:
1914
    case CC_OP_SUBQ:
1915
        func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1916
        if (!func)
1917
            goto slow_jcc;
1918
        break;
1919

    
1920
        /* some jumps are easy to compute */
1921
    case CC_OP_ADDB:
1922
    case CC_OP_ADDW:
1923
    case CC_OP_ADDL:
1924
    case CC_OP_ADDQ:
1925

    
1926
    case CC_OP_LOGICB:
1927
    case CC_OP_LOGICW:
1928
    case CC_OP_LOGICL:
1929
    case CC_OP_LOGICQ:
1930

    
1931
    case CC_OP_INCB:
1932
    case CC_OP_INCW:
1933
    case CC_OP_INCL:
1934
    case CC_OP_INCQ:
1935

    
1936
    case CC_OP_DECB:
1937
    case CC_OP_DECW:
1938
    case CC_OP_DECL:
1939
    case CC_OP_DECQ:
1940

    
1941
    case CC_OP_SHLB:
1942
    case CC_OP_SHLW:
1943
    case CC_OP_SHLL:
1944
    case CC_OP_SHLQ:
1945
        switch(jcc_op) {
1946
        case JCC_Z:
1947
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1948
            break;
1949
        case JCC_S:
1950
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1951
            break;
1952
        default:
1953
            goto slow_jcc;
1954
        }
1955
        break;
1956
    default:
1957
    slow_jcc:
1958
        if (s->cc_op != CC_OP_DYNAMIC)
1959
            gen_op_set_cc_op(s->cc_op);
1960
        func = gen_setcc_slow[jcc_op];
1961
        break;
1962
    }
1963
    func();
1964
    if (inv) {
1965
        gen_op_xor_T0_1();
1966
    }
1967
}
1968

    
1969
/* move T0 to seg_reg and compute if the CPU state may change. Never
1970
   call this function with seg_reg == R_CS */
1971
static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1972
{
1973
    if (s->pe && !s->vm86) {
1974
        /* XXX: optimize by finding processor state dynamically */
1975
        if (s->cc_op != CC_OP_DYNAMIC)
1976
            gen_op_set_cc_op(s->cc_op);
1977
        gen_jmp_im(cur_eip);
1978
        gen_op_movl_seg_T0(seg_reg);
1979
        /* abort translation because the addseg value may change or
1980
           because ss32 may change. For R_SS, translation must always
1981
           stop as a special handling must be done to disable hardware
1982
           interrupts for the next instruction */
1983
        if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
1984
            s->is_jmp = 3;
1985
    } else {
1986
        gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
1987
        if (seg_reg == R_SS)
1988
            s->is_jmp = 3;
1989
    }
1990
}
1991

    
1992
#define SVM_movq_T1_im(x) gen_movtl_T1_im(x)
1993

    
1994
static inline int
1995
gen_svm_check_io(DisasContext *s, target_ulong pc_start, uint64_t type)
1996
{
1997
#if !defined(CONFIG_USER_ONLY)
1998
    if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
1999
        if (s->cc_op != CC_OP_DYNAMIC)
2000
            gen_op_set_cc_op(s->cc_op);
2001
        SVM_movq_T1_im(s->pc - s->cs_base);
2002
        gen_jmp_im(pc_start - s->cs_base);
2003
        gen_op_geneflags();
2004
        gen_op_svm_check_intercept_io((uint32_t)(type >> 32), (uint32_t)type);
2005
        s->cc_op = CC_OP_DYNAMIC;
2006
        /* FIXME: maybe we could move the io intercept vector to the TB as well
2007
                  so we know if this is an EOB or not ... let's assume it's not
2008
                  for now. */
2009
    }
2010
#endif
2011
    return 0;
2012
}
2013

    
2014
static inline int svm_is_rep(int prefixes)
2015
{
2016
    return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2017
}
2018

    
2019
static inline int
2020
gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2021
                              uint64_t type, uint64_t param)
2022
{
2023
    if(!(s->flags & (INTERCEPT_SVM_MASK)))
2024
        /* no SVM activated */
2025
        return 0;
2026
    switch(type) {
2027
        /* CRx and DRx reads/writes */
2028
        case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2029
            if (s->cc_op != CC_OP_DYNAMIC) {
2030
                gen_op_set_cc_op(s->cc_op);
2031
                s->cc_op = CC_OP_DYNAMIC;
2032
            }
2033
            gen_jmp_im(pc_start - s->cs_base);
2034
            SVM_movq_T1_im(param);
2035
            gen_op_geneflags();
2036
            gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2037
            /* this is a special case as we do not know if the interception occurs
2038
               so we assume there was none */
2039
            return 0;
2040
        case SVM_EXIT_MSR:
2041
            if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2042
                if (s->cc_op != CC_OP_DYNAMIC) {
2043
                    gen_op_set_cc_op(s->cc_op);
2044
                    s->cc_op = CC_OP_DYNAMIC;
2045
                }
2046
                gen_jmp_im(pc_start - s->cs_base);
2047
                SVM_movq_T1_im(param);
2048
                gen_op_geneflags();
2049
                gen_op_svm_check_intercept_param((uint32_t)(type >> 32), (uint32_t)type);
2050
                /* this is a special case as we do not know if the interception occurs
2051
                   so we assume there was none */
2052
                return 0;
2053
            }
2054
            break;
2055
        default:
2056
            if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2057
                if (s->cc_op != CC_OP_DYNAMIC) {
2058
                    gen_op_set_cc_op(s->cc_op);
2059
                    s->cc_op = CC_OP_EFLAGS;
2060
                }
2061
                gen_jmp_im(pc_start - s->cs_base);
2062
                SVM_movq_T1_im(param);
2063
                gen_op_geneflags();
2064
                gen_op_svm_vmexit(type >> 32, type);
2065
                /* we can optimize this one so TBs don't get longer
2066
                   than up to vmexit */
2067
                gen_eob(s);
2068
                return 1;
2069
            }
2070
    }
2071
    return 0;
2072
}
2073

    
2074
static inline int
2075
gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2076
{
2077
    return gen_svm_check_intercept_param(s, pc_start, type, 0);
2078
}
2079

    
2080
static inline void gen_stack_update(DisasContext *s, int addend)
2081
{
2082
#ifdef TARGET_X86_64
2083
    if (CODE64(s)) {
2084
        gen_op_addq_ESP_im(addend);
2085
    } else
2086
#endif
2087
    if (s->ss32) {
2088
        gen_op_addl_ESP_im(addend);
2089
    } else {
2090
        gen_op_addw_ESP_im(addend);
2091
    }
2092
}
2093

    
2094
/* generate a push. It depends on ss32, addseg and dflag */
2095
static void gen_push_T0(DisasContext *s)
2096
{
2097
#ifdef TARGET_X86_64
2098
    if (CODE64(s)) {
2099
        gen_op_movq_A0_reg(R_ESP);
2100
        if (s->dflag) {
2101
            gen_op_addq_A0_im(-8);
2102
            gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2103
        } else {
2104
            gen_op_addq_A0_im(-2);
2105
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2106
        }
2107
        gen_op_mov_reg_A0(2, R_ESP);
2108
    } else
2109
#endif
2110
    {
2111
        gen_op_movl_A0_reg(R_ESP);
2112
        if (!s->dflag)
2113
            gen_op_addl_A0_im(-2);
2114
        else
2115
            gen_op_addl_A0_im(-4);
2116
        if (s->ss32) {
2117
            if (s->addseg) {
2118
                gen_op_movl_T1_A0();
2119
                gen_op_addl_A0_seg(R_SS);
2120
            }
2121
        } else {
2122
            gen_op_andl_A0_ffff();
2123
            gen_op_movl_T1_A0();
2124
            gen_op_addl_A0_seg(R_SS);
2125
        }
2126
        gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2127
        if (s->ss32 && !s->addseg)
2128
            gen_op_mov_reg_A0(1, R_ESP);
2129
        else
2130
            gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2131
    }
2132
}
2133

    
2134
/* generate a push. It depends on ss32, addseg and dflag */
2135
/* slower version for T1, only used for call Ev */
2136
static void gen_push_T1(DisasContext *s)
2137
{
2138
#ifdef TARGET_X86_64
2139
    if (CODE64(s)) {
2140
        gen_op_movq_A0_reg(R_ESP);
2141
        if (s->dflag) {
2142
            gen_op_addq_A0_im(-8);
2143
            gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2144
        } else {
2145
            gen_op_addq_A0_im(-2);
2146
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2147
        }
2148
        gen_op_mov_reg_A0(2, R_ESP);
2149
    } else
2150
#endif
2151
    {
2152
        gen_op_movl_A0_reg(R_ESP);
2153
        if (!s->dflag)
2154
            gen_op_addl_A0_im(-2);
2155
        else
2156
            gen_op_addl_A0_im(-4);
2157
        if (s->ss32) {
2158
            if (s->addseg) {
2159
                gen_op_addl_A0_seg(R_SS);
2160
            }
2161
        } else {
2162
            gen_op_andl_A0_ffff();
2163
            gen_op_addl_A0_seg(R_SS);
2164
        }
2165
        gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2166

    
2167
        if (s->ss32 && !s->addseg)
2168
            gen_op_mov_reg_A0(1, R_ESP);
2169
        else
2170
            gen_stack_update(s, (-2) << s->dflag);
2171
    }
2172
}
2173

    
2174
/* two step pop is necessary for precise exceptions */
2175
static void gen_pop_T0(DisasContext *s)
2176
{
2177
#ifdef TARGET_X86_64
2178
    if (CODE64(s)) {
2179
        gen_op_movq_A0_reg(R_ESP);
2180
        gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2181
    } else
2182
#endif
2183
    {
2184
        gen_op_movl_A0_reg(R_ESP);
2185
        if (s->ss32) {
2186
            if (s->addseg)
2187
                gen_op_addl_A0_seg(R_SS);
2188
        } else {
2189
            gen_op_andl_A0_ffff();
2190
            gen_op_addl_A0_seg(R_SS);
2191
        }
2192
        gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2193
    }
2194
}
2195

    
2196
static void gen_pop_update(DisasContext *s)
2197
{
2198
#ifdef TARGET_X86_64
2199
    if (CODE64(s) && s->dflag) {
2200
        gen_stack_update(s, 8);
2201
    } else
2202
#endif
2203
    {
2204
        gen_stack_update(s, 2 << s->dflag);
2205
    }
2206
}
2207

    
2208
static void gen_stack_A0(DisasContext *s)
2209
{
2210
    gen_op_movl_A0_reg(R_ESP);
2211
    if (!s->ss32)
2212
        gen_op_andl_A0_ffff();
2213
    gen_op_movl_T1_A0();
2214
    if (s->addseg)
2215
        gen_op_addl_A0_seg(R_SS);
2216
}
2217

    
2218
/* NOTE: wrap around in 16 bit not fully handled */
2219
static void gen_pusha(DisasContext *s)
2220
{
2221
    int i;
2222
    gen_op_movl_A0_reg(R_ESP);
2223
    gen_op_addl_A0_im(-16 <<  s->dflag);
2224
    if (!s->ss32)
2225
        gen_op_andl_A0_ffff();
2226
    gen_op_movl_T1_A0();
2227
    if (s->addseg)
2228
        gen_op_addl_A0_seg(R_SS);
2229
    for(i = 0;i < 8; i++) {
2230
        gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2231
        gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2232
        gen_op_addl_A0_im(2 <<  s->dflag);
2233
    }
2234
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2235
}
2236

    
2237
/* NOTE: wrap around in 16 bit not fully handled */
2238
static void gen_popa(DisasContext *s)
2239
{
2240
    int i;
2241
    gen_op_movl_A0_reg(R_ESP);
2242
    if (!s->ss32)
2243
        gen_op_andl_A0_ffff();
2244
    gen_op_movl_T1_A0();
2245
    gen_op_addl_T1_im(16 <<  s->dflag);
2246
    if (s->addseg)
2247
        gen_op_addl_A0_seg(R_SS);
2248
    for(i = 0;i < 8; i++) {
2249
        /* ESP is not reloaded */
2250
        if (i != 3) {
2251
            gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2252
            gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2253
        }
2254
        gen_op_addl_A0_im(2 <<  s->dflag);
2255
    }
2256
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2257
}
2258

    
2259
static void gen_enter(DisasContext *s, int esp_addend, int level)
2260
{
2261
    int ot, opsize;
2262

    
2263
    level &= 0x1f;
2264
#ifdef TARGET_X86_64
2265
    if (CODE64(s)) {
2266
        ot = s->dflag ? OT_QUAD : OT_WORD;
2267
        opsize = 1 << ot;
2268

    
2269
        gen_op_movl_A0_reg(R_ESP);
2270
        gen_op_addq_A0_im(-opsize);
2271
        gen_op_movl_T1_A0();
2272

    
2273
        /* push bp */
2274
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2275
        gen_op_st_T0_A0(ot + s->mem_index);
2276
        if (level) {
2277
            gen_op_enter64_level(level, (ot == OT_QUAD));
2278
        }
2279
        gen_op_mov_reg_T1(ot, R_EBP);
2280
        gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2281
        gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2282
    } else
2283
#endif
2284
    {
2285
        ot = s->dflag + OT_WORD;
2286
        opsize = 2 << s->dflag;
2287

    
2288
        gen_op_movl_A0_reg(R_ESP);
2289
        gen_op_addl_A0_im(-opsize);
2290
        if (!s->ss32)
2291
            gen_op_andl_A0_ffff();
2292
        gen_op_movl_T1_A0();
2293
        if (s->addseg)
2294
            gen_op_addl_A0_seg(R_SS);
2295
        /* push bp */
2296
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2297
        gen_op_st_T0_A0(ot + s->mem_index);
2298
        if (level) {
2299
            gen_op_enter_level(level, s->dflag);
2300
        }
2301
        gen_op_mov_reg_T1(ot, R_EBP);
2302
        gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2303
        gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2304
    }
2305
}
2306

    
2307
static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2308
{
2309
    if (s->cc_op != CC_OP_DYNAMIC)
2310
        gen_op_set_cc_op(s->cc_op);
2311
    gen_jmp_im(cur_eip);
2312
    gen_op_raise_exception(trapno);
2313
    s->is_jmp = 3;
2314
}
2315

    
2316
/* an interrupt is different from an exception because of the
2317
   privilege checks */
2318
static void gen_interrupt(DisasContext *s, int intno,
2319
                          target_ulong cur_eip, target_ulong next_eip)
2320
{
2321
    if (s->cc_op != CC_OP_DYNAMIC)
2322
        gen_op_set_cc_op(s->cc_op);
2323
    gen_jmp_im(cur_eip);
2324
    gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2325
    s->is_jmp = 3;
2326
}
2327

    
2328
static void gen_debug(DisasContext *s, target_ulong cur_eip)
2329
{
2330
    if (s->cc_op != CC_OP_DYNAMIC)
2331
        gen_op_set_cc_op(s->cc_op);
2332
    gen_jmp_im(cur_eip);
2333
    gen_op_debug();
2334
    s->is_jmp = 3;
2335
}
2336

    
2337
/* generate a generic end of block. Trace exception is also generated
2338
   if needed */
2339
static void gen_eob(DisasContext *s)
2340
{
2341
    if (s->cc_op != CC_OP_DYNAMIC)
2342
        gen_op_set_cc_op(s->cc_op);
2343
    if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2344
        gen_op_reset_inhibit_irq();
2345
    }
2346
    if (s->singlestep_enabled) {
2347
        gen_op_debug();
2348
    } else if (s->tf) {
2349
        gen_op_single_step();
2350
    } else {
2351
        tcg_gen_exit_tb(0);
2352
    }
2353
    s->is_jmp = 3;
2354
}
2355

    
2356
/* generate a jump to eip. No segment change must happen before as a
2357
   direct call to the next block may occur */
2358
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2359
{
2360
    if (s->jmp_opt) {
2361
        if (s->cc_op != CC_OP_DYNAMIC) {
2362
            gen_op_set_cc_op(s->cc_op);
2363
            s->cc_op = CC_OP_DYNAMIC;
2364
        }
2365
        gen_goto_tb(s, tb_num, eip);
2366
        s->is_jmp = 3;
2367
    } else {
2368
        gen_jmp_im(eip);
2369
        gen_eob(s);
2370
    }
2371
}
2372

    
2373
static void gen_jmp(DisasContext *s, target_ulong eip)
2374
{
2375
    gen_jmp_tb(s, eip, 0);
2376
}
2377

    
2378
static GenOpFunc1 *gen_ldq_env_A0[3] = {
2379
    gen_op_ldq_raw_env_A0,
2380
#ifndef CONFIG_USER_ONLY
2381
    gen_op_ldq_kernel_env_A0,
2382
    gen_op_ldq_user_env_A0,
2383
#endif
2384
};
2385

    
2386
static GenOpFunc1 *gen_stq_env_A0[3] = {
2387
    gen_op_stq_raw_env_A0,
2388
#ifndef CONFIG_USER_ONLY
2389
    gen_op_stq_kernel_env_A0,
2390
    gen_op_stq_user_env_A0,
2391
#endif
2392
};
2393

    
2394
static GenOpFunc1 *gen_ldo_env_A0[3] = {
2395
    gen_op_ldo_raw_env_A0,
2396
#ifndef CONFIG_USER_ONLY
2397
    gen_op_ldo_kernel_env_A0,
2398
    gen_op_ldo_user_env_A0,
2399
#endif
2400
};
2401

    
2402
static GenOpFunc1 *gen_sto_env_A0[3] = {
2403
    gen_op_sto_raw_env_A0,
2404
#ifndef CONFIG_USER_ONLY
2405
    gen_op_sto_kernel_env_A0,
2406
    gen_op_sto_user_env_A0,
2407
#endif
2408
};
2409

    
2410
#define SSE_SPECIAL ((GenOpFunc2 *)1)
2411
#define SSE_DUMMY ((GenOpFunc2 *)2)
2412

    
2413
#define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2414
#define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2415
                     gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2416

    
2417
static GenOpFunc2 *sse_op_table1[256][4] = {
2418
    /* 3DNow! extensions */
2419
    [0x0e] = { SSE_DUMMY }, /* femms */
2420
    [0x0f] = { SSE_DUMMY }, /* pf... */
2421
    /* pure SSE operations */
2422
    [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2423
    [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2424
    [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2425
    [0x13] = { SSE_SPECIAL, SSE_SPECIAL },  /* movlps, movlpd */
2426
    [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2427
    [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2428
    [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd, movshdup */
2429
    [0x17] = { SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd */
2430

    
2431
    [0x28] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2432
    [0x29] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2433
    [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2434
    [0x2b] = { SSE_SPECIAL, SSE_SPECIAL },  /* movntps, movntpd */
2435
    [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2436
    [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2437
    [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2438
    [0x2f] = { gen_op_comiss, gen_op_comisd },
2439
    [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2440
    [0x51] = SSE_FOP(sqrt),
2441
    [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2442
    [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2443
    [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2444
    [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2445
    [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2446
    [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2447
    [0x58] = SSE_FOP(add),
2448
    [0x59] = SSE_FOP(mul),
2449
    [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps,
2450
               gen_op_cvtss2sd, gen_op_cvtsd2ss },
2451
    [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2452
    [0x5c] = SSE_FOP(sub),
2453
    [0x5d] = SSE_FOP(min),
2454
    [0x5e] = SSE_FOP(div),
2455
    [0x5f] = SSE_FOP(max),
2456

    
2457
    [0xc2] = SSE_FOP(cmpeq),
2458
    [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2459

    
2460
    /* MMX ops and their SSE extensions */
2461
    [0x60] = MMX_OP2(punpcklbw),
2462
    [0x61] = MMX_OP2(punpcklwd),
2463
    [0x62] = MMX_OP2(punpckldq),
2464
    [0x63] = MMX_OP2(packsswb),
2465
    [0x64] = MMX_OP2(pcmpgtb),
2466
    [0x65] = MMX_OP2(pcmpgtw),
2467
    [0x66] = MMX_OP2(pcmpgtl),
2468
    [0x67] = MMX_OP2(packuswb),
2469
    [0x68] = MMX_OP2(punpckhbw),
2470
    [0x69] = MMX_OP2(punpckhwd),
2471
    [0x6a] = MMX_OP2(punpckhdq),
2472
    [0x6b] = MMX_OP2(packssdw),
2473
    [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2474
    [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2475
    [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2476
    [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2477
    [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx,
2478
               (GenOpFunc2 *)gen_op_pshufd_xmm,
2479
               (GenOpFunc2 *)gen_op_pshufhw_xmm,
2480
               (GenOpFunc2 *)gen_op_pshuflw_xmm },
2481
    [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2482
    [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2483
    [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2484
    [0x74] = MMX_OP2(pcmpeqb),
2485
    [0x75] = MMX_OP2(pcmpeqw),
2486
    [0x76] = MMX_OP2(pcmpeql),
2487
    [0x77] = { SSE_DUMMY }, /* emms */
2488
    [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2489
    [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2490
    [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2491
    [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2492
    [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2493
    [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2494
    [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2495
    [0xd1] = MMX_OP2(psrlw),
2496
    [0xd2] = MMX_OP2(psrld),
2497
    [0xd3] = MMX_OP2(psrlq),
2498
    [0xd4] = MMX_OP2(paddq),
2499
    [0xd5] = MMX_OP2(pmullw),
2500
    [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2501
    [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2502
    [0xd8] = MMX_OP2(psubusb),
2503
    [0xd9] = MMX_OP2(psubusw),
2504
    [0xda] = MMX_OP2(pminub),
2505
    [0xdb] = MMX_OP2(pand),
2506
    [0xdc] = MMX_OP2(paddusb),
2507
    [0xdd] = MMX_OP2(paddusw),
2508
    [0xde] = MMX_OP2(pmaxub),
2509
    [0xdf] = MMX_OP2(pandn),
2510
    [0xe0] = MMX_OP2(pavgb),
2511
    [0xe1] = MMX_OP2(psraw),
2512
    [0xe2] = MMX_OP2(psrad),
2513
    [0xe3] = MMX_OP2(pavgw),
2514
    [0xe4] = MMX_OP2(pmulhuw),
2515
    [0xe5] = MMX_OP2(pmulhw),
2516
    [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2517
    [0xe7] = { SSE_SPECIAL , SSE_SPECIAL },  /* movntq, movntq */
2518
    [0xe8] = MMX_OP2(psubsb),
2519
    [0xe9] = MMX_OP2(psubsw),
2520
    [0xea] = MMX_OP2(pminsw),
2521
    [0xeb] = MMX_OP2(por),
2522
    [0xec] = MMX_OP2(paddsb),
2523
    [0xed] = MMX_OP2(paddsw),
2524
    [0xee] = MMX_OP2(pmaxsw),
2525
    [0xef] = MMX_OP2(pxor),
2526
    [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2527
    [0xf1] = MMX_OP2(psllw),
2528
    [0xf2] = MMX_OP2(pslld),
2529
    [0xf3] = MMX_OP2(psllq),
2530
    [0xf4] = MMX_OP2(pmuludq),
2531
    [0xf5] = MMX_OP2(pmaddwd),
2532
    [0xf6] = MMX_OP2(psadbw),
2533
    [0xf7] = MMX_OP2(maskmov),
2534
    [0xf8] = MMX_OP2(psubb),
2535
    [0xf9] = MMX_OP2(psubw),
2536
    [0xfa] = MMX_OP2(psubl),
2537
    [0xfb] = MMX_OP2(psubq),
2538
    [0xfc] = MMX_OP2(paddb),
2539
    [0xfd] = MMX_OP2(paddw),
2540
    [0xfe] = MMX_OP2(paddl),
2541
};
2542

    
2543
static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2544
    [0 + 2] = MMX_OP2(psrlw),
2545
    [0 + 4] = MMX_OP2(psraw),
2546
    [0 + 6] = MMX_OP2(psllw),
2547
    [8 + 2] = MMX_OP2(psrld),
2548
    [8 + 4] = MMX_OP2(psrad),
2549
    [8 + 6] = MMX_OP2(pslld),
2550
    [16 + 2] = MMX_OP2(psrlq),
2551
    [16 + 3] = { NULL, gen_op_psrldq_xmm },
2552
    [16 + 6] = MMX_OP2(psllq),
2553
    [16 + 7] = { NULL, gen_op_pslldq_xmm },
2554
};
2555

    
2556
static GenOpFunc1 *sse_op_table3[4 * 3] = {
2557
    gen_op_cvtsi2ss,
2558
    gen_op_cvtsi2sd,
2559
    X86_64_ONLY(gen_op_cvtsq2ss),
2560
    X86_64_ONLY(gen_op_cvtsq2sd),
2561

    
2562
    gen_op_cvttss2si,
2563
    gen_op_cvttsd2si,
2564
    X86_64_ONLY(gen_op_cvttss2sq),
2565
    X86_64_ONLY(gen_op_cvttsd2sq),
2566

    
2567
    gen_op_cvtss2si,
2568
    gen_op_cvtsd2si,
2569
    X86_64_ONLY(gen_op_cvtss2sq),
2570
    X86_64_ONLY(gen_op_cvtsd2sq),
2571
};
2572

    
2573
static GenOpFunc2 *sse_op_table4[8][4] = {
2574
    SSE_FOP(cmpeq),
2575
    SSE_FOP(cmplt),
2576
    SSE_FOP(cmple),
2577
    SSE_FOP(cmpunord),
2578
    SSE_FOP(cmpneq),
2579
    SSE_FOP(cmpnlt),
2580
    SSE_FOP(cmpnle),
2581
    SSE_FOP(cmpord),
2582
};
2583

    
2584
static GenOpFunc2 *sse_op_table5[256] = {
2585
    [0x0c] = gen_op_pi2fw,
2586
    [0x0d] = gen_op_pi2fd,
2587
    [0x1c] = gen_op_pf2iw,
2588
    [0x1d] = gen_op_pf2id,
2589
    [0x8a] = gen_op_pfnacc,
2590
    [0x8e] = gen_op_pfpnacc,
2591
    [0x90] = gen_op_pfcmpge,
2592
    [0x94] = gen_op_pfmin,
2593
    [0x96] = gen_op_pfrcp,
2594
    [0x97] = gen_op_pfrsqrt,
2595
    [0x9a] = gen_op_pfsub,
2596
    [0x9e] = gen_op_pfadd,
2597
    [0xa0] = gen_op_pfcmpgt,
2598
    [0xa4] = gen_op_pfmax,
2599
    [0xa6] = gen_op_movq, /* pfrcpit1; no need to actually increase precision */
2600
    [0xa7] = gen_op_movq, /* pfrsqit1 */
2601
    [0xaa] = gen_op_pfsubr,
2602
    [0xae] = gen_op_pfacc,
2603
    [0xb0] = gen_op_pfcmpeq,
2604
    [0xb4] = gen_op_pfmul,
2605
    [0xb6] = gen_op_movq, /* pfrcpit2 */
2606
    [0xb7] = gen_op_pmulhrw_mmx,
2607
    [0xbb] = gen_op_pswapd,
2608
    [0xbf] = gen_op_pavgb_mmx /* pavgusb */
2609
};
2610

    
2611
static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2612
{
2613
    int b1, op1_offset, op2_offset, is_xmm, val, ot;
2614
    int modrm, mod, rm, reg, reg_addr, offset_addr;
2615
    GenOpFunc2 *sse_op2;
2616
    GenOpFunc3 *sse_op3;
2617

    
2618
    b &= 0xff;
2619
    if (s->prefix & PREFIX_DATA)
2620
        b1 = 1;
2621
    else if (s->prefix & PREFIX_REPZ)
2622
        b1 = 2;
2623
    else if (s->prefix & PREFIX_REPNZ)
2624
        b1 = 3;
2625
    else
2626
        b1 = 0;
2627
    sse_op2 = sse_op_table1[b][b1];
2628
    if (!sse_op2)
2629
        goto illegal_op;
2630
    if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2631
        is_xmm = 1;
2632
    } else {
2633
        if (b1 == 0) {
2634
            /* MMX case */
2635
            is_xmm = 0;
2636
        } else {
2637
            is_xmm = 1;
2638
        }
2639
    }
2640
    /* simple MMX/SSE operation */
2641
    if (s->flags & HF_TS_MASK) {
2642
        gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2643
        return;
2644
    }
2645
    if (s->flags & HF_EM_MASK) {
2646
    illegal_op:
2647
        gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2648
        return;
2649
    }
2650
    if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2651
        goto illegal_op;
2652
    if (b == 0x77 || b == 0x0e) {
2653
        /* emms or femms */
2654
        gen_op_emms();
2655
        return;
2656
    }
2657
    /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2658
       the static cpu state) */
2659
    if (!is_xmm) {
2660
        gen_op_enter_mmx();
2661
    }
2662

    
2663
    modrm = ldub_code(s->pc++);
2664
    reg = ((modrm >> 3) & 7);
2665
    if (is_xmm)
2666
        reg |= rex_r;
2667
    mod = (modrm >> 6) & 3;
2668
    if (sse_op2 == SSE_SPECIAL) {
2669
        b |= (b1 << 8);
2670
        switch(b) {
2671
        case 0x0e7: /* movntq */
2672
            if (mod == 3)
2673
                goto illegal_op;
2674
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2675
            gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2676
            break;
2677
        case 0x1e7: /* movntdq */
2678
        case 0x02b: /* movntps */
2679
        case 0x12b: /* movntps */
2680
        case 0x3f0: /* lddqu */
2681
            if (mod == 3)
2682
                goto illegal_op;
2683
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2684
            gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2685
            break;
2686
        case 0x6e: /* movd mm, ea */
2687
#ifdef TARGET_X86_64
2688
            if (s->dflag == 2) {
2689
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2690
                gen_op_movq_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2691
            } else
2692
#endif
2693
            {
2694
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2695
                gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2696
            }
2697
            break;
2698
        case 0x16e: /* movd xmm, ea */
2699
#ifdef TARGET_X86_64
2700
            if (s->dflag == 2) {
2701
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2702
                gen_op_movq_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2703
            } else
2704
#endif
2705
            {
2706
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2707
                gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2708
            }
2709
            break;
2710
        case 0x6f: /* movq mm, ea */
2711
            if (mod != 3) {
2712
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2713
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2714
            } else {
2715
                rm = (modrm & 7);
2716
                gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2717
                            offsetof(CPUX86State,fpregs[rm].mmx));
2718
            }
2719
            break;
2720
        case 0x010: /* movups */
2721
        case 0x110: /* movupd */
2722
        case 0x028: /* movaps */
2723
        case 0x128: /* movapd */
2724
        case 0x16f: /* movdqa xmm, ea */
2725
        case 0x26f: /* movdqu xmm, ea */
2726
            if (mod != 3) {
2727
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2728
                gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2729
            } else {
2730
                rm = (modrm & 7) | REX_B(s);
2731
                gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2732
                            offsetof(CPUX86State,xmm_regs[rm]));
2733
            }
2734
            break;
2735
        case 0x210: /* movss xmm, ea */
2736
            if (mod != 3) {
2737
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2738
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
2739
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2740
                gen_op_movl_T0_0();
2741
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2742
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2743
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2744
            } else {
2745
                rm = (modrm & 7) | REX_B(s);
2746
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2747
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2748
            }
2749
            break;
2750
        case 0x310: /* movsd xmm, ea */
2751
            if (mod != 3) {
2752
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2753
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2754
                gen_op_movl_T0_0();
2755
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2756
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2757
            } else {
2758
                rm = (modrm & 7) | REX_B(s);
2759
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2760
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2761
            }
2762
            break;
2763
        case 0x012: /* movlps */
2764
        case 0x112: /* movlpd */
2765
            if (mod != 3) {
2766
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2767
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2768
            } else {
2769
                /* movhlps */
2770
                rm = (modrm & 7) | REX_B(s);
2771
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2772
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2773
            }
2774
            break;
2775
        case 0x212: /* movsldup */
2776
            if (mod != 3) {
2777
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2778
                gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2779
            } else {
2780
                rm = (modrm & 7) | REX_B(s);
2781
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2782
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2783
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2784
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2785
            }
2786
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2787
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2788
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2789
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2790
            break;
2791
        case 0x312: /* movddup */
2792
            if (mod != 3) {
2793
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2794
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2795
            } else {
2796
                rm = (modrm & 7) | REX_B(s);
2797
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2798
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2799
            }
2800
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2801
                        offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2802
            break;
2803
        case 0x016: /* movhps */
2804
        case 0x116: /* movhpd */
2805
            if (mod != 3) {
2806
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2807
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2808
            } else {
2809
                /* movlhps */
2810
                rm = (modrm & 7) | REX_B(s);
2811
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2812
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2813
            }
2814
            break;
2815
        case 0x216: /* movshdup */
2816
            if (mod != 3) {
2817
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2818
                gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2819
            } else {
2820
                rm = (modrm & 7) | REX_B(s);
2821
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2822
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2823
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2824
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2825
            }
2826
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2827
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2828
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2829
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2830
            break;
2831
        case 0x7e: /* movd ea, mm */
2832
#ifdef TARGET_X86_64
2833
            if (s->dflag == 2) {
2834
                gen_op_movq_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2835
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2836
            } else
2837
#endif
2838
            {
2839
                gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2840
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2841
            }
2842
            break;
2843
        case 0x17e: /* movd ea, xmm */
2844
#ifdef TARGET_X86_64
2845
            if (s->dflag == 2) {
2846
                gen_op_movq_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2847
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2848
            } else
2849
#endif
2850
            {
2851
                gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2852
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2853
            }
2854
            break;
2855
        case 0x27e: /* movq xmm, ea */
2856
            if (mod != 3) {
2857
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2858
                gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2859
            } else {
2860
                rm = (modrm & 7) | REX_B(s);
2861
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2862
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2863
            }
2864
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2865
            break;
2866
        case 0x7f: /* movq ea, mm */
2867
            if (mod != 3) {
2868
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2869
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2870
            } else {
2871
                rm = (modrm & 7);
2872
                gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2873
                            offsetof(CPUX86State,fpregs[reg].mmx));
2874
            }
2875
            break;
2876
        case 0x011: /* movups */
2877
        case 0x111: /* movupd */
2878
        case 0x029: /* movaps */
2879
        case 0x129: /* movapd */
2880
        case 0x17f: /* movdqa ea, xmm */
2881
        case 0x27f: /* movdqu ea, xmm */
2882
            if (mod != 3) {
2883
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2884
                gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2885
            } else {
2886
                rm = (modrm & 7) | REX_B(s);
2887
                gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2888
                            offsetof(CPUX86State,xmm_regs[reg]));
2889
            }
2890
            break;
2891
        case 0x211: /* movss ea, xmm */
2892
            if (mod != 3) {
2893
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2894
                gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2895
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
2896
            } else {
2897
                rm = (modrm & 7) | REX_B(s);
2898
                gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2899
                            offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2900
            }
2901
            break;
2902
        case 0x311: /* movsd ea, xmm */
2903
            if (mod != 3) {
2904
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2905
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2906
            } else {
2907
                rm = (modrm & 7) | REX_B(s);
2908
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2909
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2910
            }
2911
            break;
2912
        case 0x013: /* movlps */
2913
        case 0x113: /* movlpd */
2914
            if (mod != 3) {
2915
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2916
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2917
            } else {
2918
                goto illegal_op;
2919
            }
2920
            break;
2921
        case 0x017: /* movhps */
2922
        case 0x117: /* movhpd */
2923
            if (mod != 3) {
2924
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2925
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2926
            } else {
2927
                goto illegal_op;
2928
            }
2929
            break;
2930
        case 0x71: /* shift mm, im */
2931
        case 0x72:
2932
        case 0x73:
2933
        case 0x171: /* shift xmm, im */
2934
        case 0x172:
2935
        case 0x173:
2936
            val = ldub_code(s->pc++);
2937
            if (is_xmm) {
2938
                gen_op_movl_T0_im(val);
2939
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2940
                gen_op_movl_T0_0();
2941
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2942
                op1_offset = offsetof(CPUX86State,xmm_t0);
2943
            } else {
2944
                gen_op_movl_T0_im(val);
2945
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2946
                gen_op_movl_T0_0();
2947
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2948
                op1_offset = offsetof(CPUX86State,mmx_t0);
2949
            }
2950
            sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2951
            if (!sse_op2)
2952
                goto illegal_op;
2953
            if (is_xmm) {
2954
                rm = (modrm & 7) | REX_B(s);
2955
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2956
            } else {
2957
                rm = (modrm & 7);
2958
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2959
            }
2960
            sse_op2(op2_offset, op1_offset);
2961
            break;
2962
        case 0x050: /* movmskps */
2963
            rm = (modrm & 7) | REX_B(s);
2964
            gen_op_movmskps(offsetof(CPUX86State,xmm_regs[rm]));
2965
            gen_op_mov_reg_T0(OT_LONG, reg);
2966
            break;
2967
        case 0x150: /* movmskpd */
2968
            rm = (modrm & 7) | REX_B(s);
2969
            gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[rm]));
2970
            gen_op_mov_reg_T0(OT_LONG, reg);
2971
            break;
2972
        case 0x02a: /* cvtpi2ps */
2973
        case 0x12a: /* cvtpi2pd */
2974
            gen_op_enter_mmx();
2975
            if (mod != 3) {
2976
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2977
                op2_offset = offsetof(CPUX86State,mmx_t0);
2978
                gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2979
            } else {
2980
                rm = (modrm & 7);
2981
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2982
            }
2983
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2984
            switch(b >> 8) {
2985
            case 0x0:
2986
                gen_op_cvtpi2ps(op1_offset, op2_offset);
2987
                break;
2988
            default:
2989
            case 0x1:
2990
                gen_op_cvtpi2pd(op1_offset, op2_offset);
2991
                break;
2992
            }
2993
            break;
2994
        case 0x22a: /* cvtsi2ss */
2995
        case 0x32a: /* cvtsi2sd */
2996
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2997
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2998
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2999
            sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
3000
            break;
3001
        case 0x02c: /* cvttps2pi */
3002
        case 0x12c: /* cvttpd2pi */
3003
        case 0x02d: /* cvtps2pi */
3004
        case 0x12d: /* cvtpd2pi */
3005
            gen_op_enter_mmx();
3006
            if (mod != 3) {
3007
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3008
                op2_offset = offsetof(CPUX86State,xmm_t0);
3009
                gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3010
            } else {
3011
                rm = (modrm & 7) | REX_B(s);
3012
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3013
            }
3014
            op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3015
            switch(b) {
3016
            case 0x02c:
3017
                gen_op_cvttps2pi(op1_offset, op2_offset);
3018
                break;
3019
            case 0x12c:
3020
                gen_op_cvttpd2pi(op1_offset, op2_offset);
3021
                break;
3022
            case 0x02d:
3023
                gen_op_cvtps2pi(op1_offset, op2_offset);
3024
                break;
3025
            case 0x12d:
3026
                gen_op_cvtpd2pi(op1_offset, op2_offset);
3027
                break;
3028
            }
3029
            break;
3030
        case 0x22c: /* cvttss2si */
3031
        case 0x32c: /* cvttsd2si */
3032
        case 0x22d: /* cvtss2si */
3033
        case 0x32d: /* cvtsd2si */
3034
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3035
            if (mod != 3) {
3036
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3037
                if ((b >> 8) & 1) {
3038
                    gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3039
                } else {
3040
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3041
                    gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3042
                }
3043
                op2_offset = offsetof(CPUX86State,xmm_t0);
3044
            } else {
3045
                rm = (modrm & 7) | REX_B(s);
3046
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3047
            }
3048
            sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3049
                          (b & 1) * 4](op2_offset);
3050
            gen_op_mov_reg_T0(ot, reg);
3051
            break;
3052
        case 0xc4: /* pinsrw */
3053
        case 0x1c4:
3054
            s->rip_offset = 1;
3055
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3056
            val = ldub_code(s->pc++);
3057
            if (b1) {
3058
                val &= 7;
3059
                gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
3060
            } else {
3061
                val &= 3;
3062
                gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
3063
            }
3064
            break;
3065
        case 0xc5: /* pextrw */
3066
        case 0x1c5:
3067
            if (mod != 3)
3068
                goto illegal_op;
3069
            val = ldub_code(s->pc++);
3070
            if (b1) {
3071
                val &= 7;
3072
                rm = (modrm & 7) | REX_B(s);
3073
                gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
3074
            } else {
3075
                val &= 3;
3076
                rm = (modrm & 7);
3077
                gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
3078
            }
3079
            reg = ((modrm >> 3) & 7) | rex_r;
3080
            gen_op_mov_reg_T0(OT_LONG, reg);
3081
            break;
3082
        case 0x1d6: /* movq ea, xmm */
3083
            if (mod != 3) {
3084
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3085
                gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3086
            } else {
3087
                rm = (modrm & 7) | REX_B(s);
3088
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3089
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3090
                gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3091
            }
3092
            break;
3093
        case 0x2d6: /* movq2dq */
3094
            gen_op_enter_mmx();
3095
            rm = (modrm & 7);
3096
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3097
                        offsetof(CPUX86State,fpregs[rm].mmx));
3098
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3099
            break;
3100
        case 0x3d6: /* movdq2q */
3101
            gen_op_enter_mmx();
3102
            rm = (modrm & 7) | REX_B(s);
3103
            gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3104
                        offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3105
            break;
3106
        case 0xd7: /* pmovmskb */
3107
        case 0x1d7:
3108
            if (mod != 3)
3109
                goto illegal_op;
3110
            if (b1) {
3111
                rm = (modrm & 7) | REX_B(s);
3112
                gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
3113
            } else {
3114
                rm = (modrm & 7);
3115
                gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
3116
            }
3117
            reg = ((modrm >> 3) & 7) | rex_r;
3118
            gen_op_mov_reg_T0(OT_LONG, reg);
3119
            break;
3120
        default:
3121
            goto illegal_op;
3122
        }
3123
    } else {
3124
        /* generic MMX or SSE operation */
3125
        switch(b) {
3126
        case 0xf7:
3127
            /* maskmov : we must prepare A0 */
3128
            if (mod != 3)
3129
                goto illegal_op;
3130
#ifdef TARGET_X86_64
3131
            if (s->aflag == 2) {
3132
                gen_op_movq_A0_reg(R_EDI);
3133
            } else
3134
#endif
3135
            {
3136
                gen_op_movl_A0_reg(R_EDI);
3137
                if (s->aflag == 0)
3138
                    gen_op_andl_A0_ffff();
3139
            }
3140
            gen_add_A0_ds_seg(s);
3141
            break;
3142
        case 0x70: /* pshufx insn */
3143
        case 0xc6: /* pshufx insn */
3144
        case 0xc2: /* compare insns */
3145
            s->rip_offset = 1;
3146
            break;
3147
        default:
3148
            break;
3149
        }
3150
        if (is_xmm) {
3151
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3152
            if (mod != 3) {
3153
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3154
                op2_offset = offsetof(CPUX86State,xmm_t0);
3155
                if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3156
                                b == 0xc2)) {
3157
                    /* specific case for SSE single instructions */
3158
                    if (b1 == 2) {
3159
                        /* 32 bit access */
3160
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3161
                        gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3162
                    } else {
3163
                        /* 64 bit access */
3164
                        gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3165
                    }
3166
                } else {
3167
                    gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3168
                }
3169
            } else {
3170
                rm = (modrm & 7) | REX_B(s);
3171
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3172
            }
3173
        } else {
3174
            op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3175
            if (mod != 3) {
3176
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3177
                op2_offset = offsetof(CPUX86State,mmx_t0);
3178
                gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
3179
            } else {
3180
                rm = (modrm & 7);
3181
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3182
            }
3183
        }
3184
        switch(b) {
3185
        case 0x0f: /* 3DNow! data insns */
3186
            val = ldub_code(s->pc++);
3187
            sse_op2 = sse_op_table5[val];
3188
            if (!sse_op2)
3189
                goto illegal_op;
3190
            sse_op2(op1_offset, op2_offset);
3191
            break;
3192
        case 0x70: /* pshufx insn */
3193
        case 0xc6: /* pshufx insn */
3194
            val = ldub_code(s->pc++);
3195
            sse_op3 = (GenOpFunc3 *)sse_op2;
3196
            sse_op3(op1_offset, op2_offset, val);
3197
            break;
3198
        case 0xc2:
3199
            /* compare insns */
3200
            val = ldub_code(s->pc++);
3201
            if (val >= 8)
3202
                goto illegal_op;
3203
            sse_op2 = sse_op_table4[val][b1];
3204
            sse_op2(op1_offset, op2_offset);
3205
            break;
3206
        default:
3207
            sse_op2(op1_offset, op2_offset);
3208
            break;
3209
        }
3210
        if (b == 0x2e || b == 0x2f) {
3211
            s->cc_op = CC_OP_EFLAGS;
3212
        }
3213
    }
3214
}
3215

    
3216

    
3217
/* convert one instruction. s->is_jmp is set if the translation must
3218
   be stopped. Return the next pc value */
3219
static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3220
{
3221
    int b, prefixes, aflag, dflag;
3222
    int shift, ot;
3223
    int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3224
    target_ulong next_eip, tval;
3225
    int rex_w, rex_r;
3226

    
3227
    s->pc = pc_start;
3228
    prefixes = 0;
3229
    aflag = s->code32;
3230
    dflag = s->code32;
3231
    s->override = -1;
3232
    rex_w = -1;
3233
    rex_r = 0;
3234
#ifdef TARGET_X86_64
3235
    s->rex_x = 0;
3236
    s->rex_b = 0;
3237
    x86_64_hregs = 0;
3238
#endif
3239
    s->rip_offset = 0; /* for relative ip address */
3240
 next_byte:
3241
    b = ldub_code(s->pc);
3242
    s->pc++;
3243
    /* check prefixes */
3244
#ifdef TARGET_X86_64
3245
    if (CODE64(s)) {
3246
        switch (b) {
3247
        case 0xf3:
3248
            prefixes |= PREFIX_REPZ;
3249
            goto next_byte;
3250
        case 0xf2:
3251
            prefixes |= PREFIX_REPNZ;
3252
            goto next_byte;
3253
        case 0xf0:
3254
            prefixes |= PREFIX_LOCK;
3255
            goto next_byte;
3256
        case 0x2e:
3257
            s->override = R_CS;
3258
            goto next_byte;
3259
        case 0x36:
3260
            s->override = R_SS;
3261
            goto next_byte;
3262
        case 0x3e:
3263
            s->override = R_DS;
3264
            goto next_byte;
3265
        case 0x26:
3266
            s->override = R_ES;
3267
            goto next_byte;
3268
        case 0x64:
3269
            s->override = R_FS;
3270
            goto next_byte;
3271
        case 0x65:
3272
            s->override = R_GS;
3273
            goto next_byte;
3274
        case 0x66:
3275
            prefixes |= PREFIX_DATA;
3276
            goto next_byte;
3277
        case 0x67:
3278
            prefixes |= PREFIX_ADR;
3279
            goto next_byte;
3280
        case 0x40 ... 0x4f:
3281
            /* REX prefix */
3282
            rex_w = (b >> 3) & 1;
3283
            rex_r = (b & 0x4) << 1;
3284
            s->rex_x = (b & 0x2) << 2;
3285
            REX_B(s) = (b & 0x1) << 3;
3286
            x86_64_hregs = 1; /* select uniform byte register addressing */
3287
            goto next_byte;
3288
        }
3289
        if (rex_w == 1) {
3290
            /* 0x66 is ignored if rex.w is set */
3291
            dflag = 2;
3292
        } else {
3293
            if (prefixes & PREFIX_DATA)
3294
                dflag ^= 1;
3295
        }
3296
        if (!(prefixes & PREFIX_ADR))
3297
            aflag = 2;
3298
    } else
3299
#endif
3300
    {
3301
        switch (b) {
3302
        case 0xf3:
3303
            prefixes |= PREFIX_REPZ;
3304
            goto next_byte;
3305
        case 0xf2:
3306
            prefixes |= PREFIX_REPNZ;
3307
            goto next_byte;
3308
        case 0xf0:
3309
            prefixes |= PREFIX_LOCK;
3310
            goto next_byte;
3311
        case 0x2e:
3312
            s->override = R_CS;
3313
            goto next_byte;
3314
        case 0x36:
3315
            s->override = R_SS;
3316
            goto next_byte;
3317
        case 0x3e:
3318
            s->override = R_DS;
3319
            goto next_byte;
3320
        case 0x26:
3321
            s->override = R_ES;
3322
            goto next_byte;
3323
        case 0x64:
3324
            s->override = R_FS;
3325
            goto next_byte;
3326
        case 0x65:
3327
            s->override = R_GS;
3328
            goto next_byte;
3329
        case 0x66:
3330
            prefixes |= PREFIX_DATA;
3331
            goto next_byte;
3332
        case 0x67:
3333
            prefixes |= PREFIX_ADR;
3334
            goto next_byte;
3335
        }
3336
        if (prefixes & PREFIX_DATA)
3337
            dflag ^= 1;
3338
        if (prefixes & PREFIX_ADR)
3339
            aflag ^= 1;
3340
    }
3341

    
3342
    s->prefix = prefixes;
3343
    s->aflag = aflag;
3344
    s->dflag = dflag;
3345

    
3346
    /* lock generation */
3347
    if (prefixes & PREFIX_LOCK)
3348
        gen_op_lock();
3349

    
3350
    /* now check op code */
3351
 reswitch:
3352
    switch(b) {
3353
    case 0x0f:
3354
        /**************************/
3355
        /* extended op code */
3356
        b = ldub_code(s->pc++) | 0x100;
3357
        goto reswitch;
3358

    
3359
        /**************************/
3360
        /* arith & logic */
3361
    case 0x00 ... 0x05:
3362
    case 0x08 ... 0x0d:
3363
    case 0x10 ... 0x15:
3364
    case 0x18 ... 0x1d:
3365
    case 0x20 ... 0x25:
3366
    case 0x28 ... 0x2d:
3367
    case 0x30 ... 0x35:
3368
    case 0x38 ... 0x3d:
3369
        {
3370
            int op, f, val;
3371
            op = (b >> 3) & 7;
3372
            f = (b >> 1) & 3;
3373

    
3374
            if ((b & 1) == 0)
3375
                ot = OT_BYTE;
3376
            else
3377
                ot = dflag + OT_WORD;
3378

    
3379
            switch(f) {
3380
            case 0: /* OP Ev, Gv */
3381
                modrm = ldub_code(s->pc++);
3382
                reg = ((modrm >> 3) & 7) | rex_r;
3383
                mod = (modrm >> 6) & 3;
3384
                rm = (modrm & 7) | REX_B(s);
3385
                if (mod != 3) {
3386
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3387
                    opreg = OR_TMP0;
3388
                } else if (op == OP_XORL && rm == reg) {
3389
                xor_zero:
3390
                    /* xor reg, reg optimisation */
3391
                    gen_op_movl_T0_0();
3392
                    s->cc_op = CC_OP_LOGICB + ot;
3393
                    gen_op_mov_reg_T0(ot, reg);
3394
                    gen_op_update1_cc();
3395
                    break;
3396
                } else {
3397
                    opreg = rm;
3398
                }
3399
                gen_op_mov_TN_reg(ot, 1, reg);
3400
                gen_op(s, op, ot, opreg);
3401
                break;
3402
            case 1: /* OP Gv, Ev */
3403
                modrm = ldub_code(s->pc++);
3404
                mod = (modrm >> 6) & 3;
3405
                reg = ((modrm >> 3) & 7) | rex_r;
3406
                rm = (modrm & 7) | REX_B(s);
3407
                if (mod != 3) {
3408
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3409
                    gen_op_ld_T1_A0(ot + s->mem_index);
3410
                } else if (op == OP_XORL && rm == reg) {
3411
                    goto xor_zero;
3412
                } else {
3413
                    gen_op_mov_TN_reg(ot, 1, rm);
3414
                }
3415
                gen_op(s, op, ot, reg);
3416
                break;
3417
            case 2: /* OP A, Iv */
3418
                val = insn_get(s, ot);
3419
                gen_op_movl_T1_im(val);
3420
                gen_op(s, op, ot, OR_EAX);
3421
                break;
3422
            }
3423
        }
3424
        break;
3425

    
3426
    case 0x80: /* GRP1 */
3427
    case 0x81:
3428
    case 0x82:
3429
    case 0x83:
3430
        {
3431
            int val;
3432

    
3433
            if ((b & 1) == 0)
3434
                ot = OT_BYTE;
3435
            else
3436
                ot = dflag + OT_WORD;
3437

    
3438
            modrm = ldub_code(s->pc++);
3439
            mod = (modrm >> 6) & 3;
3440
            rm = (modrm & 7) | REX_B(s);
3441
            op = (modrm >> 3) & 7;
3442

    
3443
            if (mod != 3) {
3444
                if (b == 0x83)
3445
                    s->rip_offset = 1;
3446
                else
3447
                    s->rip_offset = insn_const_size(ot);
3448
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3449
                opreg = OR_TMP0;
3450
            } else {
3451
                opreg = rm;
3452
            }
3453

    
3454
            switch(b) {
3455
            default:
3456
            case 0x80:
3457
            case 0x81:
3458
            case 0x82:
3459
                val = insn_get(s, ot);
3460
                break;
3461
            case 0x83:
3462
                val = (int8_t)insn_get(s, OT_BYTE);
3463
                break;
3464
            }
3465
            gen_op_movl_T1_im(val);
3466
            gen_op(s, op, ot, opreg);
3467
        }
3468
        break;
3469

    
3470
        /**************************/
3471
        /* inc, dec, and other misc arith */
3472
    case 0x40 ... 0x47: /* inc Gv */
3473
        ot = dflag ? OT_LONG : OT_WORD;
3474
        gen_inc(s, ot, OR_EAX + (b & 7), 1);
3475
        break;
3476
    case 0x48 ... 0x4f: /* dec Gv */
3477
        ot = dflag ? OT_LONG : OT_WORD;
3478
        gen_inc(s, ot, OR_EAX + (b & 7), -1);
3479
        break;
3480
    case 0xf6: /* GRP3 */
3481
    case 0xf7:
3482
        if ((b & 1) == 0)
3483
            ot = OT_BYTE;
3484
        else
3485
            ot = dflag + OT_WORD;
3486

    
3487
        modrm = ldub_code(s->pc++);
3488
        mod = (modrm >> 6) & 3;
3489
        rm = (modrm & 7) | REX_B(s);
3490
        op = (modrm >> 3) & 7;
3491
        if (mod != 3) {
3492
            if (op == 0)
3493
                s->rip_offset = insn_const_size(ot);
3494
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3495
            gen_op_ld_T0_A0(ot + s->mem_index);
3496
        } else {
3497
            gen_op_mov_TN_reg(ot, 0, rm);
3498
        }
3499

    
3500
        switch(op) {
3501
        case 0: /* test */
3502
            val = insn_get(s, ot);
3503
            gen_op_movl_T1_im(val);
3504
            gen_op_testl_T0_T1_cc();
3505
            s->cc_op = CC_OP_LOGICB + ot;
3506
            break;
3507
        case 2: /* not */
3508
            gen_op_notl_T0();
3509
            if (mod != 3) {
3510
                gen_op_st_T0_A0(ot + s->mem_index);
3511
            } else {
3512
                gen_op_mov_reg_T0(ot, rm);
3513
            }
3514
            break;
3515
        case 3: /* neg */
3516
            gen_op_negl_T0();
3517
            if (mod != 3) {
3518
                gen_op_st_T0_A0(ot + s->mem_index);
3519
            } else {
3520
                gen_op_mov_reg_T0(ot, rm);
3521
            }
3522
            gen_op_update_neg_cc();
3523
            s->cc_op = CC_OP_SUBB + ot;
3524
            break;
3525
        case 4: /* mul */
3526
            switch(ot) {
3527
            case OT_BYTE:
3528
                gen_op_mulb_AL_T0();
3529
                s->cc_op = CC_OP_MULB;
3530
                break;
3531
            case OT_WORD:
3532
                gen_op_mulw_AX_T0();
3533
                s->cc_op = CC_OP_MULW;
3534
                break;
3535
            default:
3536
            case OT_LONG:
3537
                gen_op_mull_EAX_T0();
3538
                s->cc_op = CC_OP_MULL;
3539
                break;
3540
#ifdef TARGET_X86_64
3541
            case OT_QUAD:
3542
                gen_op_mulq_EAX_T0();
3543
                s->cc_op = CC_OP_MULQ;
3544
                break;
3545
#endif
3546
            }
3547
            break;
3548
        case 5: /* imul */
3549
            switch(ot) {
3550
            case OT_BYTE:
3551
                gen_op_imulb_AL_T0();
3552
                s->cc_op = CC_OP_MULB;
3553
                break;
3554
            case OT_WORD:
3555
                gen_op_imulw_AX_T0();
3556
                s->cc_op = CC_OP_MULW;
3557
                break;
3558
            default:
3559
            case OT_LONG:
3560
                gen_op_imull_EAX_T0();
3561
                s->cc_op = CC_OP_MULL;
3562
                break;
3563
#ifdef TARGET_X86_64
3564
            case OT_QUAD:
3565
                gen_op_imulq_EAX_T0();
3566
                s->cc_op = CC_OP_MULQ;
3567
                break;
3568
#endif
3569
            }
3570
            break;
3571
        case 6: /* div */
3572
            switch(ot) {
3573
            case OT_BYTE:
3574
                gen_jmp_im(pc_start - s->cs_base);
3575
                gen_op_divb_AL_T0();
3576
                break;
3577
            case OT_WORD:
3578
                gen_jmp_im(pc_start - s->cs_base);
3579
                gen_op_divw_AX_T0();
3580
                break;
3581
            default:
3582
            case OT_LONG:
3583
                gen_jmp_im(pc_start - s->cs_base);
3584
#ifdef MACRO_TEST
3585
                /* XXX: this is just a test */
3586
                tcg_gen_macro_2(cpu_T[0], cpu_T[0], MACRO_TEST);
3587
#else
3588
                tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
3589
#endif
3590
                break;
3591
#ifdef TARGET_X86_64
3592
            case OT_QUAD:
3593
                gen_jmp_im(pc_start - s->cs_base);
3594
                gen_op_divq_EAX_T0();
3595
                break;
3596
#endif
3597
            }
3598
            break;
3599
        case 7: /* idiv */
3600
            switch(ot) {
3601
            case OT_BYTE:
3602
                gen_jmp_im(pc_start - s->cs_base);
3603
                gen_op_idivb_AL_T0();
3604
                break;
3605
            case OT_WORD:
3606
                gen_jmp_im(pc_start - s->cs_base);
3607
                gen_op_idivw_AX_T0();
3608
                break;
3609
            default:
3610
            case OT_LONG:
3611
                gen_jmp_im(pc_start - s->cs_base);
3612
                tcg_gen_helper_0_1(helper_idivl_EAX_T0, cpu_T[0]);
3613
                break;
3614
#ifdef TARGET_X86_64
3615
            case OT_QUAD:
3616
                gen_jmp_im(pc_start - s->cs_base);
3617
                gen_op_idivq_EAX_T0();
3618
                break;
3619
#endif
3620
            }
3621
            break;
3622
        default:
3623
            goto illegal_op;
3624
        }
3625
        break;
3626

    
3627
    case 0xfe: /* GRP4 */
3628
    case 0xff: /* GRP5 */
3629
        if ((b & 1) == 0)
3630
            ot = OT_BYTE;
3631
        else
3632
            ot = dflag + OT_WORD;
3633

    
3634
        modrm = ldub_code(s->pc++);
3635
        mod = (modrm >> 6) & 3;
3636
        rm = (modrm & 7) | REX_B(s);
3637
        op = (modrm >> 3) & 7;
3638
        if (op >= 2 && b == 0xfe) {
3639
            goto illegal_op;
3640
        }
3641
        if (CODE64(s)) {
3642
            if (op == 2 || op == 4) {
3643
                /* operand size for jumps is 64 bit */
3644
                ot = OT_QUAD;
3645
            } else if (op == 3 || op == 5) {
3646
                /* for call calls, the operand is 16 or 32 bit, even
3647
                   in long mode */
3648
                ot = dflag ? OT_LONG : OT_WORD;
3649
            } else if (op == 6) {
3650
                /* default push size is 64 bit */
3651
                ot = dflag ? OT_QUAD : OT_WORD;
3652
            }
3653
        }
3654
        if (mod != 3) {
3655
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3656
            if (op >= 2 && op != 3 && op != 5)
3657
                gen_op_ld_T0_A0(ot + s->mem_index);
3658
        } else {
3659
            gen_op_mov_TN_reg(ot, 0, rm);
3660
        }
3661

    
3662
        switch(op) {
3663
        case 0: /* inc Ev */
3664
            if (mod != 3)
3665
                opreg = OR_TMP0;
3666
            else
3667
                opreg = rm;
3668
            gen_inc(s, ot, opreg, 1);
3669
            break;
3670
        case 1: /* dec Ev */
3671
            if (mod != 3)
3672
                opreg = OR_TMP0;
3673
            else
3674
                opreg = rm;
3675
            gen_inc(s, ot, opreg, -1);
3676
            break;
3677
        case 2: /* call Ev */
3678
            /* XXX: optimize if memory (no 'and' is necessary) */
3679
            if (s->dflag == 0)
3680
                gen_op_andl_T0_ffff();
3681
            next_eip = s->pc - s->cs_base;
3682
            gen_movtl_T1_im(next_eip);
3683
            gen_push_T1(s);
3684
            gen_op_jmp_T0();
3685
            gen_eob(s);
3686
            break;
3687
        case 3: /* lcall Ev */
3688
            gen_op_ld_T1_A0(ot + s->mem_index);
3689
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3690
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3691
        do_lcall:
3692
            if (s->pe && !s->vm86) {
3693
                if (s->cc_op != CC_OP_DYNAMIC)
3694
                    gen_op_set_cc_op(s->cc_op);
3695
                gen_jmp_im(pc_start - s->cs_base);
3696
                gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3697
            } else {
3698
                gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3699
            }
3700
            gen_eob(s);
3701
            break;
3702
        case 4: /* jmp Ev */
3703
            if (s->dflag == 0)
3704
                gen_op_andl_T0_ffff();
3705
            gen_op_jmp_T0();
3706
            gen_eob(s);
3707
            break;
3708
        case 5: /* ljmp Ev */
3709
            gen_op_ld_T1_A0(ot + s->mem_index);
3710
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3711
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
3712
        do_ljmp:
3713
            if (s->pe && !s->vm86) {
3714
                if (s->cc_op != CC_OP_DYNAMIC)
3715
                    gen_op_set_cc_op(s->cc_op);
3716
                gen_jmp_im(pc_start - s->cs_base);
3717
                gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3718
            } else {
3719
                gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3720
                gen_op_movl_T0_T1();
3721
                gen_op_jmp_T0();
3722
            }
3723
            gen_eob(s);
3724
            break;
3725
        case 6: /* push Ev */
3726
            gen_push_T0(s);
3727
            break;
3728
        default:
3729
            goto illegal_op;
3730
        }
3731
        break;
3732

    
3733
    case 0x84: /* test Ev, Gv */
3734
    case 0x85:
3735
        if ((b & 1) == 0)
3736
            ot = OT_BYTE;
3737
        else
3738
            ot = dflag + OT_WORD;
3739

    
3740
        modrm = ldub_code(s->pc++);
3741
        mod = (modrm >> 6) & 3;
3742
        rm = (modrm & 7) | REX_B(s);
3743
        reg = ((modrm >> 3) & 7) | rex_r;
3744

    
3745
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3746
        gen_op_mov_TN_reg(ot, 1, reg);
3747
        gen_op_testl_T0_T1_cc();
3748
        s->cc_op = CC_OP_LOGICB + ot;
3749
        break;
3750

    
3751
    case 0xa8: /* test eAX, Iv */
3752
    case 0xa9:
3753
        if ((b & 1) == 0)
3754
            ot = OT_BYTE;
3755
        else
3756
            ot = dflag + OT_WORD;
3757
        val = insn_get(s, ot);
3758

    
3759
        gen_op_mov_TN_reg(ot, 0, OR_EAX);
3760
        gen_op_movl_T1_im(val);
3761
        gen_op_testl_T0_T1_cc();
3762
        s->cc_op = CC_OP_LOGICB + ot;
3763
        break;
3764

    
3765
    case 0x98: /* CWDE/CBW */
3766
#ifdef TARGET_X86_64
3767
        if (dflag == 2) {
3768
            gen_op_movslq_RAX_EAX();
3769
        } else
3770
#endif
3771
        if (dflag == 1)
3772
            gen_op_movswl_EAX_AX();
3773
        else
3774
            gen_op_movsbw_AX_AL();
3775
        break;
3776
    case 0x99: /* CDQ/CWD */
3777
#ifdef TARGET_X86_64
3778
        if (dflag == 2) {
3779
            gen_op_movsqo_RDX_RAX();
3780
        } else
3781
#endif
3782
        if (dflag == 1)
3783
            gen_op_movslq_EDX_EAX();
3784
        else
3785
            gen_op_movswl_DX_AX();
3786
        break;
3787
    case 0x1af: /* imul Gv, Ev */
3788
    case 0x69: /* imul Gv, Ev, I */
3789
    case 0x6b:
3790
        ot = dflag + OT_WORD;
3791
        modrm = ldub_code(s->pc++);
3792
        reg = ((modrm >> 3) & 7) | rex_r;
3793
        if (b == 0x69)
3794
            s->rip_offset = insn_const_size(ot);
3795
        else if (b == 0x6b)
3796
            s->rip_offset = 1;
3797
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3798
        if (b == 0x69) {
3799
            val = insn_get(s, ot);
3800
            gen_op_movl_T1_im(val);
3801
        } else if (b == 0x6b) {
3802
            val = (int8_t)insn_get(s, OT_BYTE);
3803
            gen_op_movl_T1_im(val);
3804
        } else {
3805
            gen_op_mov_TN_reg(ot, 1, reg);
3806
        }
3807

    
3808
#ifdef TARGET_X86_64
3809
        if (ot == OT_QUAD) {
3810
            gen_op_imulq_T0_T1();
3811
        } else
3812
#endif
3813
        if (ot == OT_LONG) {
3814
            gen_op_imull_T0_T1();
3815
        } else {
3816
            gen_op_imulw_T0_T1();
3817
        }
3818
        gen_op_mov_reg_T0(ot, reg);
3819
        s->cc_op = CC_OP_MULB + ot;
3820
        break;
3821
    case 0x1c0:
3822
    case 0x1c1: /* xadd Ev, Gv */
3823
        if ((b & 1) == 0)
3824
            ot = OT_BYTE;
3825
        else
3826
            ot = dflag + OT_WORD;
3827
        modrm = ldub_code(s->pc++);
3828
        reg = ((modrm >> 3) & 7) | rex_r;
3829
        mod = (modrm >> 6) & 3;
3830
        if (mod == 3) {
3831
            rm = (modrm & 7) | REX_B(s);
3832
            gen_op_mov_TN_reg(ot, 0, reg);
3833
            gen_op_mov_TN_reg(ot, 1, rm);
3834
            gen_op_addl_T0_T1();
3835
            gen_op_mov_reg_T1(ot, reg);
3836
            gen_op_mov_reg_T0(ot, rm);
3837
        } else {
3838
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3839
            gen_op_mov_TN_reg(ot, 0, reg);
3840
            gen_op_ld_T1_A0(ot + s->mem_index);
3841
            gen_op_addl_T0_T1();
3842
            gen_op_st_T0_A0(ot + s->mem_index);
3843
            gen_op_mov_reg_T1(ot, reg);
3844
        }
3845
        gen_op_update2_cc();
3846
        s->cc_op = CC_OP_ADDB + ot;
3847
        break;
3848
    case 0x1b0:
3849
    case 0x1b1: /* cmpxchg Ev, Gv */
3850
        if ((b & 1) == 0)
3851
            ot = OT_BYTE;
3852
        else
3853
            ot = dflag + OT_WORD;
3854
        modrm = ldub_code(s->pc++);
3855
        reg = ((modrm >> 3) & 7) | rex_r;
3856
        mod = (modrm >> 6) & 3;
3857
        gen_op_mov_TN_reg(ot, 1, reg);
3858
        if (mod == 3) {
3859
            rm = (modrm & 7) | REX_B(s);
3860
            gen_op_mov_TN_reg(ot, 0, rm);
3861
            gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3862
            gen_op_mov_reg_T0(ot, rm);
3863
        } else {
3864
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3865
            gen_op_ld_T0_A0(ot + s->mem_index);
3866
            gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3867
        }
3868
        s->cc_op = CC_OP_SUBB + ot;
3869
        break;
3870
    case 0x1c7: /* cmpxchg8b */
3871
        modrm = ldub_code(s->pc++);
3872
        mod = (modrm >> 6) & 3;
3873
        if ((mod == 3) || ((modrm & 0x38) != 0x8))
3874
            goto illegal_op;
3875
        gen_jmp_im(pc_start - s->cs_base);
3876
        if (s->cc_op != CC_OP_DYNAMIC)
3877
            gen_op_set_cc_op(s->cc_op);
3878
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3879
        gen_op_cmpxchg8b();
3880
        s->cc_op = CC_OP_EFLAGS;
3881
        break;
3882

    
3883
        /**************************/
3884
        /* push/pop */
3885
    case 0x50 ... 0x57: /* push */
3886
        gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
3887
        gen_push_T0(s);
3888
        break;
3889
    case 0x58 ... 0x5f: /* pop */
3890
        if (CODE64(s)) {
3891
            ot = dflag ? OT_QUAD : OT_WORD;
3892
        } else {
3893
            ot = dflag + OT_WORD;
3894
        }
3895
        gen_pop_T0(s);
3896
        /* NOTE: order is important for pop %sp */
3897
        gen_pop_update(s);
3898
        gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
3899
        break;
3900
    case 0x60: /* pusha */
3901
        if (CODE64(s))
3902
            goto illegal_op;
3903
        gen_pusha(s);
3904
        break;
3905
    case 0x61: /* popa */
3906
        if (CODE64(s))
3907
            goto illegal_op;
3908
        gen_popa(s);
3909
        break;
3910
    case 0x68: /* push Iv */
3911
    case 0x6a:
3912
        if (CODE64(s)) {
3913
            ot = dflag ? OT_QUAD : OT_WORD;
3914
        } else {
3915
            ot = dflag + OT_WORD;
3916
        }
3917
        if (b == 0x68)
3918
            val = insn_get(s, ot);
3919
        else
3920
            val = (int8_t)insn_get(s, OT_BYTE);
3921
        gen_op_movl_T0_im(val);
3922
        gen_push_T0(s);
3923
        break;
3924
    case 0x8f: /* pop Ev */
3925
        if (CODE64(s)) {
3926
            ot = dflag ? OT_QUAD : OT_WORD;
3927
        } else {
3928
            ot = dflag + OT_WORD;
3929
        }
3930
        modrm = ldub_code(s->pc++);
3931
        mod = (modrm >> 6) & 3;
3932
        gen_pop_T0(s);
3933
        if (mod == 3) {
3934
            /* NOTE: order is important for pop %sp */
3935
            gen_pop_update(s);
3936
            rm = (modrm & 7) | REX_B(s);
3937
            gen_op_mov_reg_T0(ot, rm);
3938
        } else {
3939
            /* NOTE: order is important too for MMU exceptions */
3940
            s->popl_esp_hack = 1 << ot;
3941
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3942
            s->popl_esp_hack = 0;
3943
            gen_pop_update(s);
3944
        }
3945
        break;
3946
    case 0xc8: /* enter */
3947
        {
3948
            int level;
3949
            val = lduw_code(s->pc);
3950
            s->pc += 2;
3951
            level = ldub_code(s->pc++);
3952
            gen_enter(s, val, level);
3953
        }
3954
        break;
3955
    case 0xc9: /* leave */
3956
        /* XXX: exception not precise (ESP is updated before potential exception) */
3957
        if (CODE64(s)) {
3958
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
3959
            gen_op_mov_reg_T0(OT_QUAD, R_ESP);
3960
        } else if (s->ss32) {
3961
            gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3962
            gen_op_mov_reg_T0(OT_LONG, R_ESP);
3963
        } else {
3964
            gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
3965
            gen_op_mov_reg_T0(OT_WORD, R_ESP);
3966
        }
3967
        gen_pop_T0(s);
3968
        if (CODE64(s)) {
3969
            ot = dflag ? OT_QUAD : OT_WORD;
3970
        } else {
3971
            ot = dflag + OT_WORD;
3972
        }
3973
        gen_op_mov_reg_T0(ot, R_EBP);
3974
        gen_pop_update(s);
3975
        break;
3976
    case 0x06: /* push es */
3977
    case 0x0e: /* push cs */
3978
    case 0x16: /* push ss */
3979
    case 0x1e: /* push ds */
3980
        if (CODE64(s))
3981
            goto illegal_op;
3982
        gen_op_movl_T0_seg(b >> 3);
3983
        gen_push_T0(s);
3984
        break;
3985
    case 0x1a0: /* push fs */
3986
    case 0x1a8: /* push gs */
3987
        gen_op_movl_T0_seg((b >> 3) & 7);
3988
        gen_push_T0(s);
3989
        break;
3990
    case 0x07: /* pop es */
3991
    case 0x17: /* pop ss */
3992
    case 0x1f: /* pop ds */
3993
        if (CODE64(s))
3994
            goto illegal_op;
3995
        reg = b >> 3;
3996
        gen_pop_T0(s);
3997
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3998
        gen_pop_update(s);
3999
        if (reg == R_SS) {
4000
            /* if reg == SS, inhibit interrupts/trace. */
4001
            /* If several instructions disable interrupts, only the
4002
               _first_ does it */
4003
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4004
                gen_op_set_inhibit_irq();
4005
            s->tf = 0;
4006
        }
4007
        if (s->is_jmp) {
4008
            gen_jmp_im(s->pc - s->cs_base);
4009
            gen_eob(s);
4010
        }
4011
        break;
4012
    case 0x1a1: /* pop fs */
4013
    case 0x1a9: /* pop gs */
4014
        gen_pop_T0(s);
4015
        gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4016
        gen_pop_update(s);
4017
        if (s->is_jmp) {
4018
            gen_jmp_im(s->pc - s->cs_base);
4019
            gen_eob(s);
4020
        }
4021
        break;
4022

    
4023
        /**************************/
4024
        /* mov */
4025
    case 0x88:
4026
    case 0x89: /* mov Gv, Ev */
4027
        if ((b & 1) == 0)
4028
            ot = OT_BYTE;
4029
        else
4030
            ot = dflag + OT_WORD;
4031
        modrm = ldub_code(s->pc++);
4032
        reg = ((modrm >> 3) & 7) | rex_r;
4033

    
4034
        /* generate a generic store */
4035
        gen_ldst_modrm(s, modrm, ot, reg, 1);
4036
        break;
4037
    case 0xc6:
4038
    case 0xc7: /* mov Ev, Iv */
4039
        if ((b & 1) == 0)
4040
            ot = OT_BYTE;
4041
        else
4042
            ot = dflag + OT_WORD;
4043
        modrm = ldub_code(s->pc++);
4044
        mod = (modrm >> 6) & 3;
4045
        if (mod != 3) {
4046
            s->rip_offset = insn_const_size(ot);
4047
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4048
        }
4049
        val = insn_get(s, ot);
4050
        gen_op_movl_T0_im(val);
4051
        if (mod != 3)
4052
            gen_op_st_T0_A0(ot + s->mem_index);
4053
        else
4054
            gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4055
        break;
4056
    case 0x8a:
4057
    case 0x8b: /* mov Ev, Gv */
4058
        if ((b & 1) == 0)
4059
            ot = OT_BYTE;
4060
        else
4061
            ot = OT_WORD + dflag;
4062
        modrm = ldub_code(s->pc++);
4063
        reg = ((modrm >> 3) & 7) | rex_r;
4064

    
4065
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4066
        gen_op_mov_reg_T0(ot, reg);
4067
        break;
4068
    case 0x8e: /* mov seg, Gv */
4069
        modrm = ldub_code(s->pc++);
4070
        reg = (modrm >> 3) & 7;
4071
        if (reg >= 6 || reg == R_CS)
4072
            goto illegal_op;
4073
        gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4074
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4075
        if (reg == R_SS) {
4076
            /* if reg == SS, inhibit interrupts/trace */
4077
            /* If several instructions disable interrupts, only the
4078
               _first_ does it */
4079
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4080
                gen_op_set_inhibit_irq();
4081
            s->tf = 0;
4082
        }
4083
        if (s->is_jmp) {
4084
            gen_jmp_im(s->pc - s->cs_base);
4085
            gen_eob(s);
4086
        }
4087
        break;
4088
    case 0x8c: /* mov Gv, seg */
4089
        modrm = ldub_code(s->pc++);
4090
        reg = (modrm >> 3) & 7;
4091
        mod = (modrm >> 6) & 3;
4092
        if (reg >= 6)
4093
            goto illegal_op;
4094
        gen_op_movl_T0_seg(reg);
4095
        if (mod == 3)
4096
            ot = OT_WORD + dflag;
4097
        else
4098
            ot = OT_WORD;
4099
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4100
        break;
4101

    
4102
    case 0x1b6: /* movzbS Gv, Eb */
4103
    case 0x1b7: /* movzwS Gv, Eb */
4104
    case 0x1be: /* movsbS Gv, Eb */
4105
    case 0x1bf: /* movswS Gv, Eb */
4106
        {
4107
            int d_ot;
4108
            /* d_ot is the size of destination */
4109
            d_ot = dflag + OT_WORD;
4110
            /* ot is the size of source */
4111
            ot = (b & 1) + OT_BYTE;
4112
            modrm = ldub_code(s->pc++);
4113
            reg = ((modrm >> 3) & 7) | rex_r;
4114
            mod = (modrm >> 6) & 3;
4115
            rm = (modrm & 7) | REX_B(s);
4116

    
4117
            if (mod == 3) {
4118
                gen_op_mov_TN_reg(ot, 0, rm);
4119
                switch(ot | (b & 8)) {
4120
                case OT_BYTE:
4121
                    gen_op_movzbl_T0_T0();
4122
                    break;
4123
                case OT_BYTE | 8:
4124
                    gen_op_movsbl_T0_T0();
4125
                    break;
4126
                case OT_WORD:
4127
                    gen_op_movzwl_T0_T0();
4128
                    break;
4129
                default:
4130
                case OT_WORD | 8:
4131
                    gen_op_movswl_T0_T0();
4132
                    break;
4133
                }
4134
                gen_op_mov_reg_T0(d_ot, reg);
4135
            } else {
4136
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4137
                if (b & 8) {
4138
                    gen_op_lds_T0_A0(ot + s->mem_index);
4139
                } else {
4140
                    gen_op_ldu_T0_A0(ot + s->mem_index);
4141
                }
4142
                gen_op_mov_reg_T0(d_ot, reg);
4143
            }
4144
        }
4145
        break;
4146

    
4147
    case 0x8d: /* lea */
4148
        ot = dflag + OT_WORD;
4149
        modrm = ldub_code(s->pc++);
4150
        mod = (modrm >> 6) & 3;
4151
        if (mod == 3)
4152
            goto illegal_op;
4153
        reg = ((modrm >> 3) & 7) | rex_r;
4154
        /* we must ensure that no segment is added */
4155
        s->override = -1;
4156
        val = s->addseg;
4157
        s->addseg = 0;
4158
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4159
        s->addseg = val;
4160
        gen_op_mov_reg_A0(ot - OT_WORD, reg);
4161
        break;
4162

    
4163
    case 0xa0: /* mov EAX, Ov */
4164
    case 0xa1:
4165
    case 0xa2: /* mov Ov, EAX */
4166
    case 0xa3:
4167
        {
4168
            target_ulong offset_addr;
4169

    
4170
            if ((b & 1) == 0)
4171
                ot = OT_BYTE;
4172
            else
4173
                ot = dflag + OT_WORD;
4174
#ifdef TARGET_X86_64
4175
            if (s->aflag == 2) {
4176
                offset_addr = ldq_code(s->pc);
4177
                s->pc += 8;
4178
                gen_op_movq_A0_im(offset_addr);
4179
            } else
4180
#endif
4181
            {
4182
                if (s->aflag) {
4183
                    offset_addr = insn_get(s, OT_LONG);
4184
                } else {
4185
                    offset_addr = insn_get(s, OT_WORD);
4186
                }
4187
                gen_op_movl_A0_im(offset_addr);
4188
            }
4189
            gen_add_A0_ds_seg(s);
4190
            if ((b & 2) == 0) {
4191
                gen_op_ld_T0_A0(ot + s->mem_index);
4192
                gen_op_mov_reg_T0(ot, R_EAX);
4193
            } else {
4194
                gen_op_mov_TN_reg(ot, 0, R_EAX);
4195
                gen_op_st_T0_A0(ot + s->mem_index);
4196
            }
4197
        }
4198
        break;
4199
    case 0xd7: /* xlat */
4200
#ifdef TARGET_X86_64
4201
        if (s->aflag == 2) {
4202
            gen_op_movq_A0_reg(R_EBX);
4203
            gen_op_addq_A0_AL();
4204
        } else
4205
#endif
4206
        {
4207
            gen_op_movl_A0_reg(R_EBX);
4208
            gen_op_addl_A0_AL();
4209
            if (s->aflag == 0)
4210
                gen_op_andl_A0_ffff();
4211
        }
4212
        gen_add_A0_ds_seg(s);
4213
        gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4214
        gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4215
        break;
4216
    case 0xb0 ... 0xb7: /* mov R, Ib */
4217
        val = insn_get(s, OT_BYTE);
4218
        gen_op_movl_T0_im(val);
4219
        gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4220
        break;
4221
    case 0xb8 ... 0xbf: /* mov R, Iv */
4222
#ifdef TARGET_X86_64
4223
        if (dflag == 2) {
4224
            uint64_t tmp;
4225
            /* 64 bit case */
4226
            tmp = ldq_code(s->pc);
4227
            s->pc += 8;
4228
            reg = (b & 7) | REX_B(s);
4229
            gen_movtl_T0_im(tmp);
4230
            gen_op_mov_reg_T0(OT_QUAD, reg);
4231
        } else
4232
#endif
4233
        {
4234
            ot = dflag ? OT_LONG : OT_WORD;
4235
            val = insn_get(s, ot);
4236
            reg = (b & 7) | REX_B(s);
4237
            gen_op_movl_T0_im(val);
4238
            gen_op_mov_reg_T0(ot, reg);
4239
        }
4240
        break;
4241

    
4242
    case 0x91 ... 0x97: /* xchg R, EAX */
4243
        ot = dflag + OT_WORD;
4244
        reg = (b & 7) | REX_B(s);
4245
        rm = R_EAX;
4246
        goto do_xchg_reg;
4247
    case 0x86:
4248
    case 0x87: /* xchg Ev, Gv */
4249
        if ((b & 1) == 0)
4250
            ot = OT_BYTE;
4251
        else
4252
            ot = dflag + OT_WORD;
4253
        modrm = ldub_code(s->pc++);
4254
        reg = ((modrm >> 3) & 7) | rex_r;
4255
        mod = (modrm >> 6) & 3;
4256
        if (mod == 3) {
4257
            rm = (modrm & 7) | REX_B(s);
4258
        do_xchg_reg:
4259
            gen_op_mov_TN_reg(ot, 0, reg);
4260
            gen_op_mov_TN_reg(ot, 1, rm);
4261
            gen_op_mov_reg_T0(ot, rm);
4262
            gen_op_mov_reg_T1(ot, reg);
4263
        } else {
4264
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4265
            gen_op_mov_TN_reg(ot, 0, reg);
4266
            /* for xchg, lock is implicit */
4267
            if (!(prefixes & PREFIX_LOCK))
4268
                gen_op_lock();
4269
            gen_op_ld_T1_A0(ot + s->mem_index);
4270
            gen_op_st_T0_A0(ot + s->mem_index);
4271
            if (!(prefixes & PREFIX_LOCK))
4272
                gen_op_unlock();
4273
            gen_op_mov_reg_T1(ot, reg);
4274
        }
4275
        break;
4276
    case 0xc4: /* les Gv */
4277
        if (CODE64(s))
4278
            goto illegal_op;
4279
        op = R_ES;
4280
        goto do_lxx;
4281
    case 0xc5: /* lds Gv */
4282
        if (CODE64(s))
4283
            goto illegal_op;
4284
        op = R_DS;
4285
        goto do_lxx;
4286
    case 0x1b2: /* lss Gv */
4287
        op = R_SS;
4288
        goto do_lxx;
4289
    case 0x1b4: /* lfs Gv */
4290
        op = R_FS;
4291
        goto do_lxx;
4292
    case 0x1b5: /* lgs Gv */
4293
        op = R_GS;
4294
    do_lxx:
4295
        ot = dflag ? OT_LONG : OT_WORD;
4296
        modrm = ldub_code(s->pc++);
4297
        reg = ((modrm >> 3) & 7) | rex_r;
4298
        mod = (modrm >> 6) & 3;
4299
        if (mod == 3)
4300
            goto illegal_op;
4301
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4302
        gen_op_ld_T1_A0(ot + s->mem_index);
4303
        gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4304
        /* load the segment first to handle exceptions properly */
4305
        gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4306
        gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4307
        /* then put the data */
4308
        gen_op_mov_reg_T1(ot, reg);
4309
        if (s->is_jmp) {
4310
            gen_jmp_im(s->pc - s->cs_base);
4311
            gen_eob(s);
4312
        }
4313
        break;
4314

    
4315
        /************************/
4316
        /* shifts */
4317
    case 0xc0:
4318
    case 0xc1:
4319
        /* shift Ev,Ib */
4320
        shift = 2;
4321
    grp2:
4322
        {
4323
            if ((b & 1) == 0)
4324
                ot = OT_BYTE;
4325
            else
4326
                ot = dflag + OT_WORD;
4327

    
4328
            modrm = ldub_code(s->pc++);
4329
            mod = (modrm >> 6) & 3;
4330
            op = (modrm >> 3) & 7;
4331

    
4332
            if (mod != 3) {
4333
                if (shift == 2) {
4334
                    s->rip_offset = 1;
4335
                }
4336
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4337
                opreg = OR_TMP0;
4338
            } else {
4339
                opreg = (modrm & 7) | REX_B(s);
4340
            }
4341

    
4342
            /* simpler op */
4343
            if (shift == 0) {
4344
                gen_shift(s, op, ot, opreg, OR_ECX);
4345
            } else {
4346
                if (shift == 2) {
4347
                    shift = ldub_code(s->pc++);
4348
                }
4349
                gen_shifti(s, op, ot, opreg, shift);
4350
            }
4351
        }
4352
        break;
4353
    case 0xd0:
4354
    case 0xd1:
4355
        /* shift Ev,1 */
4356
        shift = 1;
4357
        goto grp2;
4358
    case 0xd2:
4359
    case 0xd3:
4360
        /* shift Ev,cl */
4361
        shift = 0;
4362
        goto grp2;
4363

    
4364
    case 0x1a4: /* shld imm */
4365
        op = 0;
4366
        shift = 1;
4367
        goto do_shiftd;
4368
    case 0x1a5: /* shld cl */
4369
        op = 0;
4370
        shift = 0;
4371
        goto do_shiftd;
4372
    case 0x1ac: /* shrd imm */
4373
        op = 1;
4374
        shift = 1;
4375
        goto do_shiftd;
4376
    case 0x1ad: /* shrd cl */
4377
        op = 1;
4378
        shift = 0;
4379
    do_shiftd:
4380
        ot = dflag + OT_WORD;
4381
        modrm = ldub_code(s->pc++);
4382
        mod = (modrm >> 6) & 3;
4383
        rm = (modrm & 7) | REX_B(s);
4384
        reg = ((modrm >> 3) & 7) | rex_r;
4385

    
4386
        if (mod != 3) {
4387
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4388
            gen_op_ld_T0_A0(ot + s->mem_index);
4389
        } else {
4390
            gen_op_mov_TN_reg(ot, 0, rm);
4391
        }
4392
        gen_op_mov_TN_reg(ot, 1, reg);
4393

    
4394
        if (shift) {
4395
            val = ldub_code(s->pc++);
4396
            if (ot == OT_QUAD)
4397
                val &= 0x3f;
4398
            else
4399
                val &= 0x1f;
4400
            if (val) {
4401
                if (mod == 3)
4402
                    gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4403
                else
4404
                    gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4405
                if (op == 0 && ot != OT_WORD)
4406
                    s->cc_op = CC_OP_SHLB + ot;
4407
                else
4408
                    s->cc_op = CC_OP_SARB + ot;
4409
            }
4410
        } else {
4411
            if (s->cc_op != CC_OP_DYNAMIC)
4412
                gen_op_set_cc_op(s->cc_op);
4413
            if (mod == 3)
4414
                gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4415
            else
4416
                gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4417
            s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4418
        }
4419
        if (mod == 3) {
4420
            gen_op_mov_reg_T0(ot, rm);
4421
        }
4422
        break;
4423

    
4424
        /************************/
4425
        /* floats */
4426
    case 0xd8 ... 0xdf:
4427
        if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4428
            /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4429
            /* XXX: what to do if illegal op ? */
4430
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4431
            break;
4432
        }
4433
        modrm = ldub_code(s->pc++);
4434
        mod = (modrm >> 6) & 3;
4435
        rm = modrm & 7;
4436
        op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4437
        if (mod != 3) {
4438
            /* memory op */
4439
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4440
            switch(op) {
4441
            case 0x00 ... 0x07: /* fxxxs */
4442
            case 0x10 ... 0x17: /* fixxxl */
4443
            case 0x20 ... 0x27: /* fxxxl */
4444
            case 0x30 ... 0x37: /* fixxx */
4445
                {
4446
                    int op1;
4447
                    op1 = op & 7;
4448

    
4449
                    switch(op >> 4) {
4450
                    case 0:
4451
                        gen_op_flds_FT0_A0();
4452
                        break;
4453
                    case 1:
4454
                        gen_op_fildl_FT0_A0();
4455
                        break;
4456
                    case 2:
4457
                        gen_op_fldl_FT0_A0();
4458
                        break;
4459
                    case 3:
4460
                    default:
4461
                        gen_op_fild_FT0_A0();
4462
                        break;
4463
                    }
4464

    
4465
                    gen_op_fp_arith_ST0_FT0[op1]();
4466
                    if (op1 == 3) {
4467
                        /* fcomp needs pop */
4468
                        gen_op_fpop();
4469
                    }
4470
                }
4471
                break;
4472
            case 0x08: /* flds */
4473
            case 0x0a: /* fsts */
4474
            case 0x0b: /* fstps */
4475
            case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4476
            case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4477
            case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4478
                switch(op & 7) {
4479
                case 0:
4480
                    switch(op >> 4) {
4481
                    case 0:
4482
                        gen_op_flds_ST0_A0();
4483
                        break;
4484
                    case 1:
4485
                        gen_op_fildl_ST0_A0();
4486
                        break;
4487
                    case 2:
4488
                        gen_op_fldl_ST0_A0();
4489
                        break;
4490
                    case 3:
4491
                    default:
4492
                        gen_op_fild_ST0_A0();
4493
                        break;
4494
                    }
4495
                    break;
4496
                case 1:
4497
                    switch(op >> 4) {
4498
                    case 1:
4499
                        gen_op_fisttl_ST0_A0();
4500
                        break;
4501
                    case 2:
4502
                        gen_op_fisttll_ST0_A0();
4503
                        break;
4504
                    case 3:
4505
                    default:
4506
                        gen_op_fistt_ST0_A0();
4507
                    }
4508
                    gen_op_fpop();
4509
                    break;
4510
                default:
4511
                    switch(op >> 4) {
4512
                    case 0:
4513
                        gen_op_fsts_ST0_A0();
4514
                        break;
4515
                    case 1:
4516
                        gen_op_fistl_ST0_A0();
4517
                        break;
4518
                    case 2:
4519
                        gen_op_fstl_ST0_A0();
4520
                        break;
4521
                    case 3:
4522
                    default:
4523
                        gen_op_fist_ST0_A0();
4524
                        break;
4525
                    }
4526
                    if ((op & 7) == 3)
4527
                        gen_op_fpop();
4528
                    break;
4529
                }
4530
                break;
4531
            case 0x0c: /* fldenv mem */
4532
                gen_op_fldenv_A0(s->dflag);
4533
                break;
4534
            case 0x0d: /* fldcw mem */
4535
                gen_op_fldcw_A0();
4536
                break;
4537
            case 0x0e: /* fnstenv mem */
4538
                gen_op_fnstenv_A0(s->dflag);
4539
                break;
4540
            case 0x0f: /* fnstcw mem */
4541
                gen_op_fnstcw_A0();
4542
                break;
4543
            case 0x1d: /* fldt mem */
4544
                gen_op_fldt_ST0_A0();
4545
                break;
4546
            case 0x1f: /* fstpt mem */
4547
                gen_op_fstt_ST0_A0();
4548
                gen_op_fpop();
4549
                break;
4550
            case 0x2c: /* frstor mem */
4551
                gen_op_frstor_A0(s->dflag);
4552
                break;
4553
            case 0x2e: /* fnsave mem */
4554
                gen_op_fnsave_A0(s->dflag);
4555
                break;
4556
            case 0x2f: /* fnstsw mem */
4557
                gen_op_fnstsw_A0();
4558
                break;
4559
            case 0x3c: /* fbld */
4560
                gen_op_fbld_ST0_A0();
4561
                break;
4562
            case 0x3e: /* fbstp */
4563
                gen_op_fbst_ST0_A0();
4564
                gen_op_fpop();
4565
                break;
4566
            case 0x3d: /* fildll */
4567
                gen_op_fildll_ST0_A0();
4568
                break;
4569
            case 0x3f: /* fistpll */
4570
                gen_op_fistll_ST0_A0();
4571
                gen_op_fpop();
4572
                break;
4573
            default:
4574
                goto illegal_op;
4575
            }
4576
        } else {
4577
            /* register float ops */
4578
            opreg = rm;
4579

    
4580
            switch(op) {
4581
            case 0x08: /* fld sti */
4582
                gen_op_fpush();
4583
                gen_op_fmov_ST0_STN((opreg + 1) & 7);
4584
                break;
4585
            case 0x09: /* fxchg sti */
4586
            case 0x29: /* fxchg4 sti, undocumented op */
4587
            case 0x39: /* fxchg7 sti, undocumented op */
4588
                gen_op_fxchg_ST0_STN(opreg);
4589
                break;
4590
            case 0x0a: /* grp d9/2 */
4591
                switch(rm) {
4592
                case 0: /* fnop */
4593
                    /* check exceptions (FreeBSD FPU probe) */
4594
                    if (s->cc_op != CC_OP_DYNAMIC)
4595
                        gen_op_set_cc_op(s->cc_op);
4596
                    gen_jmp_im(pc_start - s->cs_base);
4597
                    gen_op_fwait();
4598
                    break;
4599
                default:
4600
                    goto illegal_op;
4601
                }
4602
                break;
4603
            case 0x0c: /* grp d9/4 */
4604
                switch(rm) {
4605
                case 0: /* fchs */
4606
                    gen_op_fchs_ST0();
4607
                    break;
4608
                case 1: /* fabs */
4609
                    gen_op_fabs_ST0();
4610
                    break;
4611
                case 4: /* ftst */
4612
                    gen_op_fldz_FT0();
4613
                    gen_op_fcom_ST0_FT0();
4614
                    break;
4615
                case 5: /* fxam */
4616
                    gen_op_fxam_ST0();
4617
                    break;
4618
                default:
4619
                    goto illegal_op;
4620
                }
4621
                break;
4622
            case 0x0d: /* grp d9/5 */
4623
                {
4624
                    switch(rm) {
4625
                    case 0:
4626
                        gen_op_fpush();
4627
                        gen_op_fld1_ST0();
4628
                        break;
4629
                    case 1:
4630
                        gen_op_fpush();
4631
                        gen_op_fldl2t_ST0();
4632
                        break;
4633
                    case 2:
4634
                        gen_op_fpush();
4635
                        gen_op_fldl2e_ST0();
4636
                        break;
4637
                    case 3:
4638
                        gen_op_fpush();
4639
                        gen_op_fldpi_ST0();
4640
                        break;
4641
                    case 4:
4642
                        gen_op_fpush();
4643
                        gen_op_fldlg2_ST0();
4644
                        break;
4645
                    case 5:
4646
                        gen_op_fpush();
4647
                        gen_op_fldln2_ST0();
4648
                        break;
4649
                    case 6:
4650
                        gen_op_fpush();
4651
                        gen_op_fldz_ST0();
4652
                        break;
4653
                    default:
4654
                        goto illegal_op;
4655
                    }
4656
                }
4657
                break;
4658
            case 0x0e: /* grp d9/6 */
4659
                switch(rm) {
4660
                case 0: /* f2xm1 */
4661
                    gen_op_f2xm1();
4662
                    break;
4663
                case 1: /* fyl2x */
4664
                    gen_op_fyl2x();
4665
                    break;
4666
                case 2: /* fptan */
4667
                    gen_op_fptan();
4668
                    break;
4669
                case 3: /* fpatan */
4670
                    gen_op_fpatan();
4671
                    break;
4672
                case 4: /* fxtract */
4673
                    gen_op_fxtract();
4674
                    break;
4675
                case 5: /* fprem1 */
4676
                    gen_op_fprem1();
4677
                    break;
4678
                case 6: /* fdecstp */
4679
                    gen_op_fdecstp();
4680
                    break;
4681
                default:
4682
                case 7: /* fincstp */
4683
                    gen_op_fincstp();
4684
                    break;
4685
                }
4686
                break;
4687
            case 0x0f: /* grp d9/7 */
4688
                switch(rm) {
4689
                case 0: /* fprem */
4690
                    gen_op_fprem();
4691
                    break;
4692
                case 1: /* fyl2xp1 */
4693
                    gen_op_fyl2xp1();
4694
                    break;
4695
                case 2: /* fsqrt */
4696
                    gen_op_fsqrt();
4697
                    break;
4698
                case 3: /* fsincos */
4699
                    gen_op_fsincos();
4700
                    break;
4701
                case 5: /* fscale */
4702
                    gen_op_fscale();
4703
                    break;
4704
                case 4: /* frndint */
4705
                    gen_op_frndint();
4706
                    break;
4707
                case 6: /* fsin */
4708
                    gen_op_fsin();
4709
                    break;
4710
                default:
4711
                case 7: /* fcos */
4712
                    gen_op_fcos();
4713
                    break;
4714
                }
4715
                break;
4716
            case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4717
            case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4718
            case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4719
                {
4720
                    int op1;
4721

    
4722
                    op1 = op & 7;
4723
                    if (op >= 0x20) {
4724
                        gen_op_fp_arith_STN_ST0[op1](opreg);
4725
                        if (op >= 0x30)
4726
                            gen_op_fpop();
4727
                    } else {
4728
                        gen_op_fmov_FT0_STN(opreg);
4729
                        gen_op_fp_arith_ST0_FT0[op1]();
4730
                    }
4731
                }
4732
                break;
4733
            case 0x02: /* fcom */
4734
            case 0x22: /* fcom2, undocumented op */
4735
                gen_op_fmov_FT0_STN(opreg);
4736
                gen_op_fcom_ST0_FT0();
4737
                break;
4738
            case 0x03: /* fcomp */
4739
            case 0x23: /* fcomp3, undocumented op */
4740
            case 0x32: /* fcomp5, undocumented op */
4741
                gen_op_fmov_FT0_STN(opreg);
4742
                gen_op_fcom_ST0_FT0();
4743
                gen_op_fpop();
4744
                break;
4745
            case 0x15: /* da/5 */
4746
                switch(rm) {
4747
                case 1: /* fucompp */
4748
                    gen_op_fmov_FT0_STN(1);
4749
                    gen_op_fucom_ST0_FT0();
4750
                    gen_op_fpop();
4751
                    gen_op_fpop();
4752
                    break;
4753
                default:
4754
                    goto illegal_op;
4755
                }
4756
                break;
4757
            case 0x1c:
4758
                switch(rm) {
4759
                case 0: /* feni (287 only, just do nop here) */
4760
                    break;
4761
                case 1: /* fdisi (287 only, just do nop here) */
4762
                    break;
4763
                case 2: /* fclex */
4764
                    gen_op_fclex();
4765
                    break;
4766
                case 3: /* fninit */
4767
                    gen_op_fninit();
4768
                    break;
4769
                case 4: /* fsetpm (287 only, just do nop here) */
4770
                    break;
4771
                default:
4772
                    goto illegal_op;
4773
                }
4774
                break;
4775
            case 0x1d: /* fucomi */
4776
                if (s->cc_op != CC_OP_DYNAMIC)
4777
                    gen_op_set_cc_op(s->cc_op);
4778
                gen_op_fmov_FT0_STN(opreg);
4779
                gen_op_fucomi_ST0_FT0();
4780
                s->cc_op = CC_OP_EFLAGS;
4781
                break;
4782
            case 0x1e: /* fcomi */
4783
                if (s->cc_op != CC_OP_DYNAMIC)
4784
                    gen_op_set_cc_op(s->cc_op);
4785
                gen_op_fmov_FT0_STN(opreg);
4786
                gen_op_fcomi_ST0_FT0();
4787
                s->cc_op = CC_OP_EFLAGS;
4788
                break;
4789
            case 0x28: /* ffree sti */
4790
                gen_op_ffree_STN(opreg);
4791
                break;
4792
            case 0x2a: /* fst sti */
4793
                gen_op_fmov_STN_ST0(opreg);
4794
                break;
4795
            case 0x2b: /* fstp sti */
4796
            case 0x0b: /* fstp1 sti, undocumented op */
4797
            case 0x3a: /* fstp8 sti, undocumented op */
4798
            case 0x3b: /* fstp9 sti, undocumented op */
4799
                gen_op_fmov_STN_ST0(opreg);
4800
                gen_op_fpop();
4801
                break;
4802
            case 0x2c: /* fucom st(i) */
4803
                gen_op_fmov_FT0_STN(opreg);
4804
                gen_op_fucom_ST0_FT0();
4805
                break;
4806
            case 0x2d: /* fucomp st(i) */
4807
                gen_op_fmov_FT0_STN(opreg);
4808
                gen_op_fucom_ST0_FT0();
4809
                gen_op_fpop();
4810
                break;
4811
            case 0x33: /* de/3 */
4812
                switch(rm) {
4813
                case 1: /* fcompp */
4814
                    gen_op_fmov_FT0_STN(1);
4815
                    gen_op_fcom_ST0_FT0();
4816
                    gen_op_fpop();
4817
                    gen_op_fpop();
4818
                    break;
4819
                default:
4820
                    goto illegal_op;
4821
                }
4822
                break;
4823
            case 0x38: /* ffreep sti, undocumented op */
4824
                gen_op_ffree_STN(opreg);
4825
                gen_op_fpop();
4826
                break;
4827
            case 0x3c: /* df/4 */
4828
                switch(rm) {
4829
                case 0:
4830
                    gen_op_fnstsw_EAX();
4831
                    break;
4832
                default:
4833
                    goto illegal_op;
4834
                }
4835
                break;
4836
            case 0x3d: /* fucomip */
4837
                if (s->cc_op != CC_OP_DYNAMIC)
4838
                    gen_op_set_cc_op(s->cc_op);
4839
                gen_op_fmov_FT0_STN(opreg);
4840
                gen_op_fucomi_ST0_FT0();
4841
                gen_op_fpop();
4842
                s->cc_op = CC_OP_EFLAGS;
4843
                break;
4844
            case 0x3e: /* fcomip */
4845
                if (s->cc_op != CC_OP_DYNAMIC)
4846
                    gen_op_set_cc_op(s->cc_op);
4847
                gen_op_fmov_FT0_STN(opreg);
4848
                gen_op_fcomi_ST0_FT0();
4849
                gen_op_fpop();
4850
                s->cc_op = CC_OP_EFLAGS;
4851
                break;
4852
            case 0x10 ... 0x13: /* fcmovxx */
4853
            case 0x18 ... 0x1b:
4854
                {
4855
                    int op1;
4856
                    const static uint8_t fcmov_cc[8] = {
4857
                        (JCC_B << 1),
4858
                        (JCC_Z << 1),
4859
                        (JCC_BE << 1),
4860
                        (JCC_P << 1),
4861
                    };
4862
                    op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
4863
                    gen_setcc(s, op1);
4864
                    gen_op_fcmov_ST0_STN_T0(opreg);
4865
                }
4866
                break;
4867
            default:
4868
                goto illegal_op;
4869
            }
4870
        }
4871
        break;
4872
        /************************/
4873
        /* string ops */
4874

    
4875
    case 0xa4: /* movsS */
4876
    case 0xa5:
4877
        if ((b & 1) == 0)
4878
            ot = OT_BYTE;
4879
        else
4880
            ot = dflag + OT_WORD;
4881

    
4882
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4883
            gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4884
        } else {
4885
            gen_movs(s, ot);
4886
        }
4887
        break;
4888

    
4889
    case 0xaa: /* stosS */
4890
    case 0xab:
4891
        if ((b & 1) == 0)
4892
            ot = OT_BYTE;
4893
        else
4894
            ot = dflag + OT_WORD;
4895

    
4896
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4897
            gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4898
        } else {
4899
            gen_stos(s, ot);
4900
        }
4901
        break;
4902
    case 0xac: /* lodsS */
4903
    case 0xad:
4904
        if ((b & 1) == 0)
4905
            ot = OT_BYTE;
4906
        else
4907
            ot = dflag + OT_WORD;
4908
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4909
            gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4910
        } else {
4911
            gen_lods(s, ot);
4912
        }
4913
        break;
4914
    case 0xae: /* scasS */
4915
    case 0xaf:
4916
        if ((b & 1) == 0)
4917
            ot = OT_BYTE;
4918
        else
4919
            ot = dflag + OT_WORD;
4920
        if (prefixes & PREFIX_REPNZ) {
4921
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4922
        } else if (prefixes & PREFIX_REPZ) {
4923
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4924
        } else {
4925
            gen_scas(s, ot);
4926
            s->cc_op = CC_OP_SUBB + ot;
4927
        }
4928
        break;
4929

    
4930
    case 0xa6: /* cmpsS */
4931
    case 0xa7:
4932
        if ((b & 1) == 0)
4933
            ot = OT_BYTE;
4934
        else
4935
            ot = dflag + OT_WORD;
4936
        if (prefixes & PREFIX_REPNZ) {
4937
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4938
        } else if (prefixes & PREFIX_REPZ) {
4939
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4940
        } else {
4941
            gen_cmps(s, ot);
4942
            s->cc_op = CC_OP_SUBB + ot;
4943
        }
4944
        break;
4945
    case 0x6c: /* insS */
4946
    case 0x6d:
4947
        if ((b & 1) == 0)
4948
            ot = OT_BYTE;
4949
        else
4950
            ot = dflag ? OT_LONG : OT_WORD;
4951
        gen_check_io(s, ot, 1, pc_start - s->cs_base);
4952
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
4953
        gen_op_andl_T0_ffff();
4954
        if (gen_svm_check_io(s, pc_start,
4955
                             SVM_IOIO_TYPE_MASK | (1 << (4+ot)) |
4956
                             svm_is_rep(prefixes) | 4 | (1 << (7+s->aflag))))
4957
            break;
4958
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4959
            gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4960
        } else {
4961
            gen_ins(s, ot);
4962
        }
4963
        break;
4964
    case 0x6e: /* outsS */
4965
    case 0x6f:
4966
        if ((b & 1) == 0)
4967
            ot = OT_BYTE;
4968
        else
4969
            ot = dflag ? OT_LONG : OT_WORD;
4970
        gen_check_io(s, ot, 1, pc_start - s->cs_base);
4971
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
4972
        gen_op_andl_T0_ffff();
4973
        if (gen_svm_check_io(s, pc_start,
4974
                             (1 << (4+ot)) | svm_is_rep(prefixes) |
4975
                             4 | (1 << (7+s->aflag))))
4976
            break;
4977
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4978
            gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4979
        } else {
4980
            gen_outs(s, ot);
4981
        }
4982
        break;
4983

    
4984
        /************************/
4985
        /* port I/O */
4986

    
4987
    case 0xe4:
4988
    case 0xe5:
4989
        if ((b & 1) == 0)
4990
            ot = OT_BYTE;
4991
        else
4992
            ot = dflag ? OT_LONG : OT_WORD;
4993
        val = ldub_code(s->pc++);
4994
        gen_op_movl_T0_im(val);
4995
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
4996
        if (gen_svm_check_io(s, pc_start,
4997
                             SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
4998
                             (1 << (4+ot))))
4999
            break;
5000
        gen_op_in[ot]();
5001
        gen_op_mov_reg_T1(ot, R_EAX);
5002
        break;
5003
    case 0xe6:
5004
    case 0xe7:
5005
        if ((b & 1) == 0)
5006
            ot = OT_BYTE;
5007
        else
5008
            ot = dflag ? OT_LONG : OT_WORD;
5009
        val = ldub_code(s->pc++);
5010
        gen_op_movl_T0_im(val);
5011
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5012
        if (gen_svm_check_io(s, pc_start, svm_is_rep(prefixes) |
5013
                             (1 << (4+ot))))
5014
            break;
5015
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5016
        gen_op_out[ot]();
5017
        break;
5018
    case 0xec:
5019
    case 0xed:
5020
        if ((b & 1) == 0)
5021
            ot = OT_BYTE;
5022
        else
5023
            ot = dflag ? OT_LONG : OT_WORD;
5024
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5025
        gen_op_andl_T0_ffff();
5026
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5027
        if (gen_svm_check_io(s, pc_start,
5028
                             SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) |
5029
                             (1 << (4+ot))))
5030
            break;
5031
        gen_op_in[ot]();
5032
        gen_op_mov_reg_T1(ot, R_EAX);
5033
        break;
5034
    case 0xee:
5035
    case 0xef:
5036
        if ((b & 1) == 0)
5037
            ot = OT_BYTE;
5038
        else
5039
            ot = dflag ? OT_LONG : OT_WORD;
5040
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5041
        gen_op_andl_T0_ffff();
5042
        gen_check_io(s, ot, 0, pc_start - s->cs_base);
5043
        if (gen_svm_check_io(s, pc_start,
5044
                             svm_is_rep(prefixes) | (1 << (4+ot))))
5045
            break;
5046
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5047
        gen_op_out[ot]();
5048
        break;
5049

    
5050
        /************************/
5051
        /* control */
5052
    case 0xc2: /* ret im */
5053
        val = ldsw_code(s->pc);
5054
        s->pc += 2;
5055
        gen_pop_T0(s);
5056
        if (CODE64(s) && s->dflag)
5057
            s->dflag = 2;
5058
        gen_stack_update(s, val + (2 << s->dflag));
5059
        if (s->dflag == 0)
5060
            gen_op_andl_T0_ffff();
5061
        gen_op_jmp_T0();
5062
        gen_eob(s);
5063
        break;
5064
    case 0xc3: /* ret */
5065
        gen_pop_T0(s);
5066
        gen_pop_update(s);
5067
        if (s->dflag == 0)
5068
            gen_op_andl_T0_ffff();
5069
        gen_op_jmp_T0();
5070
        gen_eob(s);
5071
        break;
5072
    case 0xca: /* lret im */
5073
        val = ldsw_code(s->pc);
5074
        s->pc += 2;
5075
    do_lret:
5076
        if (s->pe && !s->vm86) {
5077
            if (s->cc_op != CC_OP_DYNAMIC)
5078
                gen_op_set_cc_op(s->cc_op);
5079
            gen_jmp_im(pc_start - s->cs_base);
5080
            gen_op_lret_protected(s->dflag, val);
5081
        } else {
5082
            gen_stack_A0(s);
5083
            /* pop offset */
5084
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5085
            if (s->dflag == 0)
5086
                gen_op_andl_T0_ffff();
5087
            /* NOTE: keeping EIP updated is not a problem in case of
5088
               exception */
5089
            gen_op_jmp_T0();
5090
            /* pop selector */
5091
            gen_op_addl_A0_im(2 << s->dflag);
5092
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5093
            gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5094
            /* add stack offset */
5095
            gen_stack_update(s, val + (4 << s->dflag));
5096
        }
5097
        gen_eob(s);
5098
        break;
5099
    case 0xcb: /* lret */
5100
        val = 0;
5101
        goto do_lret;
5102
    case 0xcf: /* iret */
5103
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5104
            break;
5105
        if (!s->pe) {
5106
            /* real mode */
5107
            gen_op_iret_real(s->dflag);
5108
            s->cc_op = CC_OP_EFLAGS;
5109
        } else if (s->vm86) {
5110
            if (s->iopl != 3) {
5111
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5112
            } else {
5113
                gen_op_iret_real(s->dflag);
5114
                s->cc_op = CC_OP_EFLAGS;
5115
            }
5116
        } else {
5117
            if (s->cc_op != CC_OP_DYNAMIC)
5118
                gen_op_set_cc_op(s->cc_op);
5119
            gen_jmp_im(pc_start - s->cs_base);
5120
            gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
5121
            s->cc_op = CC_OP_EFLAGS;
5122
        }
5123
        gen_eob(s);
5124
        break;
5125
    case 0xe8: /* call im */
5126
        {
5127
            if (dflag)
5128
                tval = (int32_t)insn_get(s, OT_LONG);
5129
            else
5130
                tval = (int16_t)insn_get(s, OT_WORD);
5131
            next_eip = s->pc - s->cs_base;
5132
            tval += next_eip;
5133
            if (s->dflag == 0)
5134
                tval &= 0xffff;
5135
            gen_movtl_T0_im(next_eip);
5136
            gen_push_T0(s);
5137
            gen_jmp(s, tval);
5138
        }
5139
        break;
5140
    case 0x9a: /* lcall im */
5141
        {
5142
            unsigned int selector, offset;
5143

    
5144
            if (CODE64(s))
5145
                goto illegal_op;
5146
            ot = dflag ? OT_LONG : OT_WORD;
5147
            offset = insn_get(s, ot);
5148
            selector = insn_get(s, OT_WORD);
5149

    
5150
            gen_op_movl_T0_im(selector);
5151
            gen_op_movl_T1_imu(offset);
5152
        }
5153
        goto do_lcall;
5154
    case 0xe9: /* jmp im */
5155
        if (dflag)
5156
            tval = (int32_t)insn_get(s, OT_LONG);
5157
        else
5158
            tval = (int16_t)insn_get(s, OT_WORD);
5159
        tval += s->pc - s->cs_base;
5160
        if (s->dflag == 0)
5161
            tval &= 0xffff;
5162
        gen_jmp(s, tval);
5163
        break;
5164
    case 0xea: /* ljmp im */
5165
        {
5166
            unsigned int selector, offset;
5167

    
5168
            if (CODE64(s))
5169
                goto illegal_op;
5170
            ot = dflag ? OT_LONG : OT_WORD;
5171
            offset = insn_get(s, ot);
5172
            selector = insn_get(s, OT_WORD);
5173

    
5174
            gen_op_movl_T0_im(selector);
5175
            gen_op_movl_T1_imu(offset);
5176
        }
5177
        goto do_ljmp;
5178
    case 0xeb: /* jmp Jb */
5179
        tval = (int8_t)insn_get(s, OT_BYTE);
5180
        tval += s->pc - s->cs_base;
5181
        if (s->dflag == 0)
5182
            tval &= 0xffff;
5183
        gen_jmp(s, tval);
5184
        break;
5185
    case 0x70 ... 0x7f: /* jcc Jb */
5186
        tval = (int8_t)insn_get(s, OT_BYTE);
5187
        goto do_jcc;
5188
    case 0x180 ... 0x18f: /* jcc Jv */
5189
        if (dflag) {
5190
            tval = (int32_t)insn_get(s, OT_LONG);
5191
        } else {
5192
            tval = (int16_t)insn_get(s, OT_WORD);
5193
        }
5194
    do_jcc:
5195
        next_eip = s->pc - s->cs_base;
5196
        tval += next_eip;
5197
        if (s->dflag == 0)
5198
            tval &= 0xffff;
5199
        gen_jcc(s, b, tval, next_eip);
5200
        break;
5201

    
5202
    case 0x190 ... 0x19f: /* setcc Gv */
5203
        modrm = ldub_code(s->pc++);
5204
        gen_setcc(s, b);
5205
        gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5206
        break;
5207
    case 0x140 ... 0x14f: /* cmov Gv, Ev */
5208
        ot = dflag + OT_WORD;
5209
        modrm = ldub_code(s->pc++);
5210
        reg = ((modrm >> 3) & 7) | rex_r;
5211
        mod = (modrm >> 6) & 3;
5212
        gen_setcc(s, b);
5213
        if (mod != 3) {
5214
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5215
            gen_op_ld_T1_A0(ot + s->mem_index);
5216
        } else {
5217
            rm = (modrm & 7) | REX_B(s);
5218
            gen_op_mov_TN_reg(ot, 1, rm);
5219
        }
5220
        gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5221
        break;
5222

    
5223
        /************************/
5224
        /* flags */
5225
    case 0x9c: /* pushf */
5226
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5227
            break;
5228
        if (s->vm86 && s->iopl != 3) {
5229
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5230
        } else {
5231
            if (s->cc_op != CC_OP_DYNAMIC)
5232
                gen_op_set_cc_op(s->cc_op);
5233
            gen_op_movl_T0_eflags();
5234
            gen_push_T0(s);
5235
        }
5236
        break;
5237
    case 0x9d: /* popf */
5238
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5239
            break;
5240
        if (s->vm86 && s->iopl != 3) {
5241
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5242
        } else {
5243
            gen_pop_T0(s);
5244
            if (s->cpl == 0) {
5245
                if (s->dflag) {
5246
                    gen_op_movl_eflags_T0_cpl0();
5247
                } else {
5248
                    gen_op_movw_eflags_T0_cpl0();
5249
                }
5250
            } else {
5251
                if (s->cpl <= s->iopl) {
5252
                    if (s->dflag) {
5253
                        gen_op_movl_eflags_T0_io();
5254
                    } else {
5255
                        gen_op_movw_eflags_T0_io();
5256
                    }
5257
                } else {
5258
                    if (s->dflag) {
5259
                        gen_op_movl_eflags_T0();
5260
                    } else {
5261
                        gen_op_movw_eflags_T0();
5262
                    }
5263
                }
5264
            }
5265
            gen_pop_update(s);
5266
            s->cc_op = CC_OP_EFLAGS;
5267
            /* abort translation because TF flag may change */
5268
            gen_jmp_im(s->pc - s->cs_base);
5269
            gen_eob(s);
5270
        }
5271
        break;
5272
    case 0x9e: /* sahf */
5273
        if (CODE64(s))
5274
            goto illegal_op;
5275
        gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5276
        if (s->cc_op != CC_OP_DYNAMIC)
5277
            gen_op_set_cc_op(s->cc_op);
5278
        gen_op_movb_eflags_T0();
5279
        s->cc_op = CC_OP_EFLAGS;
5280
        break;
5281
    case 0x9f: /* lahf */
5282
        if (CODE64(s))
5283
            goto illegal_op;
5284
        if (s->cc_op != CC_OP_DYNAMIC)
5285
            gen_op_set_cc_op(s->cc_op);
5286
        gen_op_movl_T0_eflags();
5287
        gen_op_mov_reg_T0(OT_BYTE, R_AH);
5288
        break;
5289
    case 0xf5: /* cmc */
5290
        if (s->cc_op != CC_OP_DYNAMIC)
5291
            gen_op_set_cc_op(s->cc_op);
5292
        gen_op_cmc();
5293
        s->cc_op = CC_OP_EFLAGS;
5294
        break;
5295
    case 0xf8: /* clc */
5296
        if (s->cc_op != CC_OP_DYNAMIC)
5297
            gen_op_set_cc_op(s->cc_op);
5298
        gen_op_clc();
5299
        s->cc_op = CC_OP_EFLAGS;
5300
        break;
5301
    case 0xf9: /* stc */
5302
        if (s->cc_op != CC_OP_DYNAMIC)
5303
            gen_op_set_cc_op(s->cc_op);
5304
        gen_op_stc();
5305
        s->cc_op = CC_OP_EFLAGS;
5306
        break;
5307
    case 0xfc: /* cld */
5308
        gen_op_cld();
5309
        break;
5310
    case 0xfd: /* std */
5311
        gen_op_std();
5312
        break;
5313

    
5314
        /************************/
5315
        /* bit operations */
5316
    case 0x1ba: /* bt/bts/btr/btc Gv, im */
5317
        ot = dflag + OT_WORD;
5318
        modrm = ldub_code(s->pc++);
5319
        op = (modrm >> 3) & 7;
5320
        mod = (modrm >> 6) & 3;
5321
        rm = (modrm & 7) | REX_B(s);
5322
        if (mod != 3) {
5323
            s->rip_offset = 1;
5324
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5325
            gen_op_ld_T0_A0(ot + s->mem_index);
5326
        } else {
5327
            gen_op_mov_TN_reg(ot, 0, rm);
5328
        }
5329
        /* load shift */
5330
        val = ldub_code(s->pc++);
5331
        gen_op_movl_T1_im(val);
5332
        if (op < 4)
5333
            goto illegal_op;
5334
        op -= 4;
5335
        gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5336
        s->cc_op = CC_OP_SARB + ot;
5337
        if (op != 0) {
5338
            if (mod != 3)
5339
                gen_op_st_T0_A0(ot + s->mem_index);
5340
            else
5341
                gen_op_mov_reg_T0(ot, rm);
5342
            gen_op_update_bt_cc();
5343
        }
5344
        break;
5345
    case 0x1a3: /* bt Gv, Ev */
5346
        op = 0;
5347
        goto do_btx;
5348
    case 0x1ab: /* bts */
5349
        op = 1;
5350
        goto do_btx;
5351
    case 0x1b3: /* btr */
5352
        op = 2;
5353
        goto do_btx;
5354
    case 0x1bb: /* btc */
5355
        op = 3;
5356
    do_btx:
5357
        ot = dflag + OT_WORD;
5358
        modrm = ldub_code(s->pc++);
5359
        reg = ((modrm >> 3) & 7) | rex_r;
5360
        mod = (modrm >> 6) & 3;
5361
        rm = (modrm & 7) | REX_B(s);
5362
        gen_op_mov_TN_reg(OT_LONG, 1, reg);
5363
        if (mod != 3) {
5364
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5365
            /* specific case: we need to add a displacement */
5366
            gen_op_add_bit_A0_T1[ot - OT_WORD]();
5367
            gen_op_ld_T0_A0(ot + s->mem_index);
5368
        } else {
5369
            gen_op_mov_TN_reg(ot, 0, rm);
5370
        }
5371
        gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5372
        s->cc_op = CC_OP_SARB + ot;
5373
        if (op != 0) {
5374
            if (mod != 3)
5375
                gen_op_st_T0_A0(ot + s->mem_index);
5376
            else
5377
                gen_op_mov_reg_T0(ot, rm);
5378
            gen_op_update_bt_cc();
5379
        }
5380
        break;
5381
    case 0x1bc: /* bsf */
5382
    case 0x1bd: /* bsr */
5383
        ot = dflag + OT_WORD;
5384
        modrm = ldub_code(s->pc++);
5385
        reg = ((modrm >> 3) & 7) | rex_r;
5386
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5387
        /* NOTE: in order to handle the 0 case, we must load the
5388
           result. It could be optimized with a generated jump */
5389
        gen_op_mov_TN_reg(ot, 1, reg);
5390
        gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5391
        gen_op_mov_reg_T1(ot, reg);
5392
        s->cc_op = CC_OP_LOGICB + ot;
5393
        break;
5394
        /************************/
5395
        /* bcd */
5396
    case 0x27: /* daa */
5397
        if (CODE64(s))
5398
            goto illegal_op;
5399
        if (s->cc_op != CC_OP_DYNAMIC)
5400
            gen_op_set_cc_op(s->cc_op);
5401
        gen_op_daa();
5402
        s->cc_op = CC_OP_EFLAGS;
5403
        break;
5404
    case 0x2f: /* das */
5405
        if (CODE64(s))
5406
            goto illegal_op;
5407
        if (s->cc_op != CC_OP_DYNAMIC)
5408
            gen_op_set_cc_op(s->cc_op);
5409
        gen_op_das();
5410
        s->cc_op = CC_OP_EFLAGS;
5411
        break;
5412
    case 0x37: /* aaa */
5413
        if (CODE64(s))
5414
            goto illegal_op;
5415
        if (s->cc_op != CC_OP_DYNAMIC)
5416
            gen_op_set_cc_op(s->cc_op);
5417
        gen_op_aaa();
5418
        s->cc_op = CC_OP_EFLAGS;
5419
        break;
5420
    case 0x3f: /* aas */
5421
        if (CODE64(s))
5422
            goto illegal_op;
5423
        if (s->cc_op != CC_OP_DYNAMIC)
5424
            gen_op_set_cc_op(s->cc_op);
5425
        gen_op_aas();
5426
        s->cc_op = CC_OP_EFLAGS;
5427
        break;
5428
    case 0xd4: /* aam */
5429
        if (CODE64(s))
5430
            goto illegal_op;
5431
        val = ldub_code(s->pc++);
5432
        if (val == 0) {
5433
            gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5434
        } else {
5435
            gen_op_aam(val);
5436
            s->cc_op = CC_OP_LOGICB;
5437
        }
5438
        break;
5439
    case 0xd5: /* aad */
5440
        if (CODE64(s))
5441
            goto illegal_op;
5442
        val = ldub_code(s->pc++);
5443
        gen_op_aad(val);
5444
        s->cc_op = CC_OP_LOGICB;
5445
        break;
5446
        /************************/
5447
        /* misc */
5448
    case 0x90: /* nop */
5449
        /* XXX: xchg + rex handling */
5450
        /* XXX: correct lock test for all insn */
5451
        if (prefixes & PREFIX_LOCK)
5452
            goto illegal_op;
5453
        if (prefixes & PREFIX_REPZ) {
5454
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
5455
        }
5456
        break;
5457
    case 0x9b: /* fwait */
5458
        if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5459
            (HF_MP_MASK | HF_TS_MASK)) {
5460
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5461
        } else {
5462
            if (s->cc_op != CC_OP_DYNAMIC)
5463
                gen_op_set_cc_op(s->cc_op);
5464
            gen_jmp_im(pc_start - s->cs_base);
5465
            gen_op_fwait();
5466
        }
5467
        break;
5468
    case 0xcc: /* int3 */
5469
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5470
            break;
5471
        gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5472
        break;
5473
    case 0xcd: /* int N */
5474
        val = ldub_code(s->pc++);
5475
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5476
            break;
5477
        if (s->vm86 && s->iopl != 3) {
5478
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5479
        } else {
5480
            gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5481
        }
5482
        break;
5483
    case 0xce: /* into */
5484
        if (CODE64(s))
5485
            goto illegal_op;
5486
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5487
            break;
5488
        if (s->cc_op != CC_OP_DYNAMIC)
5489
            gen_op_set_cc_op(s->cc_op);
5490
        gen_jmp_im(pc_start - s->cs_base);
5491
        gen_op_into(s->pc - pc_start);
5492
        break;
5493
    case 0xf1: /* icebp (undocumented, exits to external debugger) */
5494
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
5495
            break;
5496
#if 1
5497
        gen_debug(s, pc_start - s->cs_base);
5498
#else
5499
        /* start debug */
5500
        tb_flush(cpu_single_env);
5501
        cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5502
#endif
5503
        break;
5504
    case 0xfa: /* cli */
5505
        if (!s->vm86) {
5506
            if (s->cpl <= s->iopl) {
5507
                gen_op_cli();
5508
            } else {
5509
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5510
            }
5511
        } else {
5512
            if (s->iopl == 3) {
5513
                gen_op_cli();
5514
            } else {
5515
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5516
            }
5517
        }
5518
        break;
5519
    case 0xfb: /* sti */
5520
        if (!s->vm86) {
5521
            if (s->cpl <= s->iopl) {
5522
            gen_sti:
5523
                gen_op_sti();
5524
                /* interruptions are enabled only the first insn after sti */
5525
                /* If several instructions disable interrupts, only the
5526
                   _first_ does it */
5527
                if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5528
                    gen_op_set_inhibit_irq();
5529
                /* give a chance to handle pending irqs */
5530
                gen_jmp_im(s->pc - s->cs_base);
5531
                gen_eob(s);
5532
            } else {
5533
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5534
            }
5535
        } else {
5536
            if (s->iopl == 3) {
5537
                goto gen_sti;
5538
            } else {
5539
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5540
            }
5541
        }
5542
        break;
5543
    case 0x62: /* bound */
5544
        if (CODE64(s))
5545
            goto illegal_op;
5546
        ot = dflag ? OT_LONG : OT_WORD;
5547
        modrm = ldub_code(s->pc++);
5548
        reg = (modrm >> 3) & 7;
5549
        mod = (modrm >> 6) & 3;
5550
        if (mod == 3)
5551
            goto illegal_op;
5552
        gen_op_mov_TN_reg(ot, 0, reg);
5553
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5554
        gen_jmp_im(pc_start - s->cs_base);
5555
        if (ot == OT_WORD)
5556
            gen_op_boundw();
5557
        else
5558
            gen_op_boundl();
5559
        break;
5560
    case 0x1c8 ... 0x1cf: /* bswap reg */
5561
        reg = (b & 7) | REX_B(s);
5562
#ifdef TARGET_X86_64
5563
        if (dflag == 2) {
5564
            gen_op_mov_TN_reg(OT_QUAD, 0, reg);
5565
            tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
5566
            gen_op_mov_reg_T0(OT_QUAD, reg);
5567
        } else
5568
        {
5569
            TCGv tmp0;
5570
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
5571
            
5572
            tmp0 = tcg_temp_new(TCG_TYPE_I32);
5573
            tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
5574
            tcg_gen_bswap_i32(tmp0, tmp0);
5575
            tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
5576
            gen_op_mov_reg_T0(OT_LONG, reg);
5577
        }
5578
#else
5579
        {
5580
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
5581
            tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
5582
            gen_op_mov_reg_T0(OT_LONG, reg);
5583
        }
5584
#endif
5585
        break;
5586
    case 0xd6: /* salc */
5587
        if (CODE64(s))
5588
            goto illegal_op;
5589
        if (s->cc_op != CC_OP_DYNAMIC)
5590
            gen_op_set_cc_op(s->cc_op);
5591
        gen_op_salc();
5592
        break;
5593
    case 0xe0: /* loopnz */
5594
    case 0xe1: /* loopz */
5595
        if (s->cc_op != CC_OP_DYNAMIC)
5596
            gen_op_set_cc_op(s->cc_op);
5597
        /* FALL THRU */
5598
    case 0xe2: /* loop */
5599
    case 0xe3: /* jecxz */
5600
        {
5601
            int l1, l2;
5602

    
5603
            tval = (int8_t)insn_get(s, OT_BYTE);
5604
            next_eip = s->pc - s->cs_base;
5605
            tval += next_eip;
5606
            if (s->dflag == 0)
5607
                tval &= 0xffff;
5608

    
5609
            l1 = gen_new_label();
5610
            l2 = gen_new_label();
5611
            b &= 3;
5612
            if (b == 3) {
5613
                gen_op_jz_ecx[s->aflag](l1);
5614
            } else {
5615
                gen_op_dec_ECX[s->aflag]();
5616
                if (b <= 1)
5617
                    gen_op_mov_T0_cc();
5618
                gen_op_loop[s->aflag][b](l1);
5619
            }
5620

    
5621
            gen_jmp_im(next_eip);
5622
            gen_op_jmp_label(l2);
5623
            gen_set_label(l1);
5624
            gen_jmp_im(tval);
5625
            gen_set_label(l2);
5626
            gen_eob(s);
5627
        }
5628
        break;
5629
    case 0x130: /* wrmsr */
5630
    case 0x132: /* rdmsr */
5631
        if (s->cpl != 0) {
5632
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5633
        } else {
5634
            int retval = 0;
5635
            if (b & 2) {
5636
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
5637
                gen_op_rdmsr();
5638
            } else {
5639
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
5640
                gen_op_wrmsr();
5641
            }
5642
            if(retval)
5643
                gen_eob(s);
5644
        }
5645
        break;
5646
    case 0x131: /* rdtsc */
5647
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
5648
            break;
5649
        gen_jmp_im(pc_start - s->cs_base);
5650
        gen_op_rdtsc();
5651
        break;
5652
    case 0x133: /* rdpmc */
5653
        gen_jmp_im(pc_start - s->cs_base);
5654
        gen_op_rdpmc();
5655
        break;
5656
    case 0x134: /* sysenter */
5657
        if (CODE64(s))
5658
            goto illegal_op;
5659
        if (!s->pe) {
5660
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5661
        } else {
5662
            if (s->cc_op != CC_OP_DYNAMIC) {
5663
                gen_op_set_cc_op(s->cc_op);
5664
                s->cc_op = CC_OP_DYNAMIC;
5665
            }
5666
            gen_jmp_im(pc_start - s->cs_base);
5667
            gen_op_sysenter();
5668
            gen_eob(s);
5669
        }
5670
        break;
5671
    case 0x135: /* sysexit */
5672
        if (CODE64(s))
5673
            goto illegal_op;
5674
        if (!s->pe) {
5675
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5676
        } else {
5677
            if (s->cc_op != CC_OP_DYNAMIC) {
5678
                gen_op_set_cc_op(s->cc_op);
5679
                s->cc_op = CC_OP_DYNAMIC;
5680
            }
5681
            gen_jmp_im(pc_start - s->cs_base);
5682
            gen_op_sysexit();
5683
            gen_eob(s);
5684
        }
5685
        break;
5686
#ifdef TARGET_X86_64
5687
    case 0x105: /* syscall */
5688
        /* XXX: is it usable in real mode ? */
5689
        if (s->cc_op != CC_OP_DYNAMIC) {
5690
            gen_op_set_cc_op(s->cc_op);
5691
            s->cc_op = CC_OP_DYNAMIC;
5692
        }
5693
        gen_jmp_im(pc_start - s->cs_base);
5694
        gen_op_syscall(s->pc - pc_start);
5695
        gen_eob(s);
5696
        break;
5697
    case 0x107: /* sysret */
5698
        if (!s->pe) {
5699
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5700
        } else {
5701
            if (s->cc_op != CC_OP_DYNAMIC) {
5702
                gen_op_set_cc_op(s->cc_op);
5703
                s->cc_op = CC_OP_DYNAMIC;
5704
            }
5705
            gen_jmp_im(pc_start - s->cs_base);
5706
            gen_op_sysret(s->dflag);
5707
            /* condition codes are modified only in long mode */
5708
            if (s->lma)
5709
                s->cc_op = CC_OP_EFLAGS;
5710
            gen_eob(s);
5711
        }
5712
        break;
5713
#endif
5714
    case 0x1a2: /* cpuid */
5715
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
5716
            break;
5717
        gen_op_cpuid();
5718
        break;
5719
    case 0xf4: /* hlt */
5720
        if (s->cpl != 0) {
5721
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5722
        } else {
5723
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
5724
                break;
5725
            if (s->cc_op != CC_OP_DYNAMIC)
5726
                gen_op_set_cc_op(s->cc_op);
5727
            gen_jmp_im(s->pc - s->cs_base);
5728
            gen_op_hlt();
5729
            s->is_jmp = 3;
5730
        }
5731
        break;
5732
    case 0x100:
5733
        modrm = ldub_code(s->pc++);
5734
        mod = (modrm >> 6) & 3;
5735
        op = (modrm >> 3) & 7;
5736
        switch(op) {
5737
        case 0: /* sldt */
5738
            if (!s->pe || s->vm86)
5739
                goto illegal_op;
5740
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
5741
                break;
5742
            gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5743
            ot = OT_WORD;
5744
            if (mod == 3)
5745
                ot += s->dflag;
5746
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5747
            break;
5748
        case 2: /* lldt */
5749
            if (!s->pe || s->vm86)
5750
                goto illegal_op;
5751
            if (s->cpl != 0) {
5752
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5753
            } else {
5754
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
5755
                    break;
5756
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5757
                gen_jmp_im(pc_start - s->cs_base);
5758
                gen_op_lldt_T0();
5759
            }
5760
            break;
5761
        case 1: /* str */
5762
            if (!s->pe || s->vm86)
5763
                goto illegal_op;
5764
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
5765
                break;
5766
            gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5767
            ot = OT_WORD;
5768
            if (mod == 3)
5769
                ot += s->dflag;
5770
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5771
            break;
5772
        case 3: /* ltr */
5773
            if (!s->pe || s->vm86)
5774
                goto illegal_op;
5775
            if (s->cpl != 0) {
5776
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5777
            } else {
5778
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
5779
                    break;
5780
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5781
                gen_jmp_im(pc_start - s->cs_base);
5782
                gen_op_ltr_T0();
5783
            }
5784
            break;
5785
        case 4: /* verr */
5786
        case 5: /* verw */
5787
            if (!s->pe || s->vm86)
5788
                goto illegal_op;
5789
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5790
            if (s->cc_op != CC_OP_DYNAMIC)
5791
                gen_op_set_cc_op(s->cc_op);
5792
            if (op == 4)
5793
                gen_op_verr();
5794
            else
5795
                gen_op_verw();
5796
            s->cc_op = CC_OP_EFLAGS;
5797
            break;
5798
        default:
5799
            goto illegal_op;
5800
        }
5801
        break;
5802
    case 0x101:
5803
        modrm = ldub_code(s->pc++);
5804
        mod = (modrm >> 6) & 3;
5805
        op = (modrm >> 3) & 7;
5806
        rm = modrm & 7;
5807
        switch(op) {
5808
        case 0: /* sgdt */
5809
            if (mod == 3)
5810
                goto illegal_op;
5811
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
5812
                break;
5813
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5814
            gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5815
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
5816
            gen_add_A0_im(s, 2);
5817
            gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5818
            if (!s->dflag)
5819
                gen_op_andl_T0_im(0xffffff);
5820
            gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5821
            break;
5822
        case 1:
5823
            if (mod == 3) {
5824
                switch (rm) {
5825
                case 0: /* monitor */
5826
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5827
                        s->cpl != 0)
5828
                        goto illegal_op;
5829
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
5830
                        break;
5831
                    gen_jmp_im(pc_start - s->cs_base);
5832
#ifdef TARGET_X86_64
5833
                    if (s->aflag == 2) {
5834
                        gen_op_movq_A0_reg(R_EBX);
5835
                        gen_op_addq_A0_AL();
5836
                    } else
5837
#endif
5838
                    {
5839
                        gen_op_movl_A0_reg(R_EBX);
5840
                        gen_op_addl_A0_AL();
5841
                        if (s->aflag == 0)
5842
                            gen_op_andl_A0_ffff();
5843
                    }
5844
                    gen_add_A0_ds_seg(s);
5845
                    gen_op_monitor();
5846
                    break;
5847
                case 1: /* mwait */
5848
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5849
                        s->cpl != 0)
5850
                        goto illegal_op;
5851
                    if (s->cc_op != CC_OP_DYNAMIC) {
5852
                        gen_op_set_cc_op(s->cc_op);
5853
                        s->cc_op = CC_OP_DYNAMIC;
5854
                    }
5855
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
5856
                        break;
5857
                    gen_jmp_im(s->pc - s->cs_base);
5858
                    gen_op_mwait();
5859
                    gen_eob(s);
5860
                    break;
5861
                default:
5862
                    goto illegal_op;
5863
                }
5864
            } else { /* sidt */
5865
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
5866
                    break;
5867
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5868
                gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
5869
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
5870
                gen_add_A0_im(s, 2);
5871
                gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
5872
                if (!s->dflag)
5873
                    gen_op_andl_T0_im(0xffffff);
5874
                gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5875
            }
5876
            break;
5877
        case 2: /* lgdt */
5878
        case 3: /* lidt */
5879
            if (mod == 3) {
5880
                switch(rm) {
5881
                case 0: /* VMRUN */
5882
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
5883
                        break;
5884
                    if (s->cc_op != CC_OP_DYNAMIC)
5885
                        gen_op_set_cc_op(s->cc_op);
5886
                    gen_jmp_im(s->pc - s->cs_base);
5887
                    gen_op_vmrun();
5888
                    s->cc_op = CC_OP_EFLAGS;
5889
                    gen_eob(s);
5890
                    break;
5891
                case 1: /* VMMCALL */
5892
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
5893
                         break;
5894
                    /* FIXME: cause #UD if hflags & SVM */
5895
                    gen_op_vmmcall();
5896
                    break;
5897
                case 2: /* VMLOAD */
5898
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
5899
                         break;
5900
                    gen_op_vmload();
5901
                    break;
5902
                case 3: /* VMSAVE */
5903
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
5904
                         break;
5905
                    gen_op_vmsave();
5906
                    break;
5907
                case 4: /* STGI */
5908
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
5909
                         break;
5910
                    gen_op_stgi();
5911
                    break;
5912
                case 5: /* CLGI */
5913
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
5914
                         break;
5915
                    gen_op_clgi();
5916
                    break;
5917
                case 6: /* SKINIT */
5918
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
5919
                         break;
5920
                    gen_op_skinit();
5921
                    break;
5922
                case 7: /* INVLPGA */
5923
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
5924
                         break;
5925
                    gen_op_invlpga();
5926
                    break;
5927
                default:
5928
                    goto illegal_op;
5929
                }
5930
            } else if (s->cpl != 0) {
5931
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5932
            } else {
5933
                if (gen_svm_check_intercept(s, pc_start,
5934
                                            op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
5935
                    break;
5936
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5937
                gen_op_ld_T1_A0(OT_WORD + s->mem_index);
5938
                gen_add_A0_im(s, 2);
5939
                gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
5940
                if (!s->dflag)
5941
                    gen_op_andl_T0_im(0xffffff);
5942
                if (op == 2) {
5943
                    gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
5944
                    gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
5945
                } else {
5946
                    gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
5947
                    gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
5948
                }
5949
            }
5950
            break;
5951
        case 4: /* smsw */
5952
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
5953
                break;
5954
            gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
5955
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
5956
            break;
5957
        case 6: /* lmsw */
5958
            if (s->cpl != 0) {
5959
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5960
            } else {
5961
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
5962
                    break;
5963
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5964
                gen_op_lmsw_T0();
5965
                gen_jmp_im(s->pc - s->cs_base);
5966
                gen_eob(s);
5967
            }
5968
            break;
5969
        case 7: /* invlpg */
5970
            if (s->cpl != 0) {
5971
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5972
            } else {
5973
                if (mod == 3) {
5974
#ifdef TARGET_X86_64
5975
                    if (CODE64(s) && rm == 0) {
5976
                        /* swapgs */
5977
                        gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
5978
                        gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
5979
                        gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
5980
                        gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
5981
                    } else
5982
#endif
5983
                    {
5984
                        goto illegal_op;
5985
                    }
5986
                } else {
5987
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
5988
                        break;
5989
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5990
                    gen_op_invlpg_A0();
5991
                    gen_jmp_im(s->pc - s->cs_base);
5992
                    gen_eob(s);
5993
                }
5994
            }
5995
            break;
5996
        default:
5997
            goto illegal_op;
5998
        }
5999
        break;
6000
    case 0x108: /* invd */
6001
    case 0x109: /* wbinvd */
6002
        if (s->cpl != 0) {
6003
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6004
        } else {
6005
            if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6006
                break;
6007
            /* nothing to do */
6008
        }
6009
        break;
6010
    case 0x63: /* arpl or movslS (x86_64) */
6011
#ifdef TARGET_X86_64
6012
        if (CODE64(s)) {
6013
            int d_ot;
6014
            /* d_ot is the size of destination */
6015
            d_ot = dflag + OT_WORD;
6016

    
6017
            modrm = ldub_code(s->pc++);
6018
            reg = ((modrm >> 3) & 7) | rex_r;
6019
            mod = (modrm >> 6) & 3;
6020
            rm = (modrm & 7) | REX_B(s);
6021

    
6022
            if (mod == 3) {
6023
                gen_op_mov_TN_reg(OT_LONG, 0, rm);
6024
                /* sign extend */
6025
                if (d_ot == OT_QUAD)
6026
                    gen_op_movslq_T0_T0();
6027
                gen_op_mov_reg_T0(d_ot, reg);
6028
            } else {
6029
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6030
                if (d_ot == OT_QUAD) {
6031
                    gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6032
                } else {
6033
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6034
                }
6035
                gen_op_mov_reg_T0(d_ot, reg);
6036
            }
6037
        } else
6038
#endif
6039
        {
6040
            if (!s->pe || s->vm86)
6041
                goto illegal_op;
6042
            ot = dflag ? OT_LONG : OT_WORD;
6043
            modrm = ldub_code(s->pc++);
6044
            reg = (modrm >> 3) & 7;
6045
            mod = (modrm >> 6) & 3;
6046
            rm = modrm & 7;
6047
            if (mod != 3) {
6048
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6049
                gen_op_ld_T0_A0(ot + s->mem_index);
6050
            } else {
6051
                gen_op_mov_TN_reg(ot, 0, rm);
6052
            }
6053
            if (s->cc_op != CC_OP_DYNAMIC)
6054
                gen_op_set_cc_op(s->cc_op);
6055
            gen_op_arpl();
6056
            s->cc_op = CC_OP_EFLAGS;
6057
            if (mod != 3) {
6058
                gen_op_st_T0_A0(ot + s->mem_index);
6059
            } else {
6060
                gen_op_mov_reg_T0(ot, rm);
6061
            }
6062
            gen_op_arpl_update();
6063
        }
6064
        break;
6065
    case 0x102: /* lar */
6066
    case 0x103: /* lsl */
6067
        if (!s->pe || s->vm86)
6068
            goto illegal_op;
6069
        ot = dflag ? OT_LONG : OT_WORD;
6070
        modrm = ldub_code(s->pc++);
6071
        reg = ((modrm >> 3) & 7) | rex_r;
6072
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6073
        gen_op_mov_TN_reg(ot, 1, reg);
6074
        if (s->cc_op != CC_OP_DYNAMIC)
6075
            gen_op_set_cc_op(s->cc_op);
6076
        if (b == 0x102)
6077
            gen_op_lar();
6078
        else
6079
            gen_op_lsl();
6080
        s->cc_op = CC_OP_EFLAGS;
6081
        gen_op_mov_reg_T1(ot, reg);
6082
        break;
6083
    case 0x118:
6084
        modrm = ldub_code(s->pc++);
6085
        mod = (modrm >> 6) & 3;
6086
        op = (modrm >> 3) & 7;
6087
        switch(op) {
6088
        case 0: /* prefetchnta */
6089
        case 1: /* prefetchnt0 */
6090
        case 2: /* prefetchnt0 */
6091
        case 3: /* prefetchnt0 */
6092
            if (mod == 3)
6093
                goto illegal_op;
6094
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6095
            /* nothing more to do */
6096
            break;
6097
        default: /* nop (multi byte) */
6098
            gen_nop_modrm(s, modrm);
6099
            break;
6100
        }
6101
        break;
6102
    case 0x119 ... 0x11f: /* nop (multi byte) */
6103
        modrm = ldub_code(s->pc++);
6104
        gen_nop_modrm(s, modrm);
6105
        break;
6106
    case 0x120: /* mov reg, crN */
6107
    case 0x122: /* mov crN, reg */
6108
        if (s->cpl != 0) {
6109
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6110
        } else {
6111
            modrm = ldub_code(s->pc++);
6112
            if ((modrm & 0xc0) != 0xc0)
6113
                goto illegal_op;
6114
            rm = (modrm & 7) | REX_B(s);
6115
            reg = ((modrm >> 3) & 7) | rex_r;
6116
            if (CODE64(s))
6117
                ot = OT_QUAD;
6118
            else
6119
                ot = OT_LONG;
6120
            switch(reg) {
6121
            case 0:
6122
            case 2:
6123
            case 3:
6124
            case 4:
6125
            case 8:
6126
                if (b & 2) {
6127
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6128
                    gen_op_mov_TN_reg(ot, 0, rm);
6129
                    gen_op_movl_crN_T0(reg);
6130
                    gen_jmp_im(s->pc - s->cs_base);
6131
                    gen_eob(s);
6132
                } else {
6133
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6134
#if !defined(CONFIG_USER_ONLY)
6135
                    if (reg == 8)
6136
                        gen_op_movtl_T0_cr8();
6137
                    else
6138
#endif
6139
                        gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6140
                    gen_op_mov_reg_T0(ot, rm);
6141
                }
6142
                break;
6143
            default:
6144
                goto illegal_op;
6145
            }
6146
        }
6147
        break;
6148
    case 0x121: /* mov reg, drN */
6149
    case 0x123: /* mov drN, reg */
6150
        if (s->cpl != 0) {
6151
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6152
        } else {
6153
            modrm = ldub_code(s->pc++);
6154
            if ((modrm & 0xc0) != 0xc0)
6155
                goto illegal_op;
6156
            rm = (modrm & 7) | REX_B(s);
6157
            reg = ((modrm >> 3) & 7) | rex_r;
6158
            if (CODE64(s))
6159
                ot = OT_QUAD;
6160
            else
6161
                ot = OT_LONG;
6162
            /* XXX: do it dynamically with CR4.DE bit */
6163
            if (reg == 4 || reg == 5 || reg >= 8)
6164
                goto illegal_op;
6165
            if (b & 2) {
6166
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6167
                gen_op_mov_TN_reg(ot, 0, rm);
6168
                gen_op_movl_drN_T0(reg);
6169
                gen_jmp_im(s->pc - s->cs_base);
6170
                gen_eob(s);
6171
            } else {
6172
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6173
                gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6174
                gen_op_mov_reg_T0(ot, rm);
6175
            }
6176
        }
6177
        break;
6178
    case 0x106: /* clts */
6179
        if (s->cpl != 0) {
6180
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6181
        } else {
6182
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6183
            gen_op_clts();
6184
            /* abort block because static cpu state changed */
6185
            gen_jmp_im(s->pc - s->cs_base);
6186
            gen_eob(s);
6187
        }
6188
        break;
6189
    /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6190
    case 0x1c3: /* MOVNTI reg, mem */
6191
        if (!(s->cpuid_features & CPUID_SSE2))
6192
            goto illegal_op;
6193
        ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6194
        modrm = ldub_code(s->pc++);
6195
        mod = (modrm >> 6) & 3;
6196
        if (mod == 3)
6197
            goto illegal_op;
6198
        reg = ((modrm >> 3) & 7) | rex_r;
6199
        /* generate a generic store */
6200
        gen_ldst_modrm(s, modrm, ot, reg, 1);
6201
        break;
6202
    case 0x1ae:
6203
        modrm = ldub_code(s->pc++);
6204
        mod = (modrm >> 6) & 3;
6205
        op = (modrm >> 3) & 7;
6206
        switch(op) {
6207
        case 0: /* fxsave */
6208
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6209
                (s->flags & HF_EM_MASK))
6210
                goto illegal_op;
6211
            if (s->flags & HF_TS_MASK) {
6212
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6213
                break;
6214
            }
6215
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6216
            gen_op_fxsave_A0((s->dflag == 2));
6217
            break;
6218
        case 1: /* fxrstor */
6219
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6220
                (s->flags & HF_EM_MASK))
6221
                goto illegal_op;
6222
            if (s->flags & HF_TS_MASK) {
6223
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6224
                break;
6225
            }
6226
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6227
            gen_op_fxrstor_A0((s->dflag == 2));
6228
            break;
6229
        case 2: /* ldmxcsr */
6230
        case 3: /* stmxcsr */
6231
            if (s->flags & HF_TS_MASK) {
6232
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6233
                break;
6234
            }
6235
            if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6236
                mod == 3)
6237
                goto illegal_op;
6238
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6239
            if (op == 2) {
6240
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6241
                gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6242
            } else {
6243
                gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6244
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
6245
            }
6246
            break;
6247
        case 5: /* lfence */
6248
        case 6: /* mfence */
6249
            if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6250
                goto illegal_op;
6251
            break;
6252
        case 7: /* sfence / clflush */
6253
            if ((modrm & 0xc7) == 0xc0) {
6254
                /* sfence */
6255
                /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6256
                if (!(s->cpuid_features & CPUID_SSE))
6257
                    goto illegal_op;
6258
            } else {
6259
                /* clflush */
6260
                if (!(s->cpuid_features & CPUID_CLFLUSH))
6261
                    goto illegal_op;
6262
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6263
            }
6264
            break;
6265
        default:
6266
            goto illegal_op;
6267
        }
6268
        break;
6269
    case 0x10d: /* 3DNow! prefetch(w) */
6270
        modrm = ldub_code(s->pc++);
6271
        mod = (modrm >> 6) & 3;
6272
        if (mod == 3)
6273
            goto illegal_op;
6274
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6275
        /* ignore for now */
6276
        break;
6277
    case 0x1aa: /* rsm */
6278
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6279
            break;
6280
        if (!(s->flags & HF_SMM_MASK))
6281
            goto illegal_op;
6282
        if (s->cc_op != CC_OP_DYNAMIC) {
6283
            gen_op_set_cc_op(s->cc_op);
6284
            s->cc_op = CC_OP_DYNAMIC;
6285
        }
6286
        gen_jmp_im(s->pc - s->cs_base);
6287
        gen_op_rsm();
6288
        gen_eob(s);
6289
        break;
6290
    case 0x10e ... 0x10f:
6291
        /* 3DNow! instructions, ignore prefixes */
6292
        s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
6293
    case 0x110 ... 0x117:
6294
    case 0x128 ... 0x12f:
6295
    case 0x150 ... 0x177:
6296
    case 0x17c ... 0x17f:
6297
    case 0x1c2:
6298
    case 0x1c4 ... 0x1c6:
6299
    case 0x1d0 ... 0x1fe:
6300
        gen_sse(s, b, pc_start, rex_r);
6301
        break;
6302
    default:
6303
        goto illegal_op;
6304
    }
6305
    /* lock generation */
6306
    if (s->prefix & PREFIX_LOCK)
6307
        gen_op_unlock();
6308
    return s->pc;
6309
 illegal_op:
6310
    if (s->prefix & PREFIX_LOCK)
6311
        gen_op_unlock();
6312
    /* XXX: ensure that no lock was generated */
6313
    gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6314
    return s->pc;
6315
}
6316

    
6317
#define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6318
#define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6319

    
6320
/* flags read by an operation */
6321
static uint16_t opc_read_flags[NB_OPS] = {
6322
    [INDEX_op_aas] = CC_A,
6323
    [INDEX_op_aaa] = CC_A,
6324
    [INDEX_op_das] = CC_A | CC_C,
6325
    [INDEX_op_daa] = CC_A | CC_C,
6326

    
6327
    /* subtle: due to the incl/decl implementation, C is used */
6328
    [INDEX_op_update_inc_cc] = CC_C,
6329

    
6330
    [INDEX_op_into] = CC_O,
6331

    
6332
    [INDEX_op_jb_subb] = CC_C,
6333
    [INDEX_op_jb_subw] = CC_C,
6334
    [INDEX_op_jb_subl] = CC_C,
6335

    
6336
    [INDEX_op_jz_subb] = CC_Z,
6337
    [INDEX_op_jz_subw] = CC_Z,
6338
    [INDEX_op_jz_subl] = CC_Z,
6339

    
6340
    [INDEX_op_jbe_subb] = CC_Z | CC_C,
6341
    [INDEX_op_jbe_subw] = CC_Z | CC_C,
6342
    [INDEX_op_jbe_subl] = CC_Z | CC_C,
6343

    
6344
    [INDEX_op_js_subb] = CC_S,
6345
    [INDEX_op_js_subw] = CC_S,
6346
    [INDEX_op_js_subl] = CC_S,
6347

    
6348
    [INDEX_op_jl_subb] = CC_O | CC_S,
6349
    [INDEX_op_jl_subw] = CC_O | CC_S,
6350
    [INDEX_op_jl_subl] = CC_O | CC_S,
6351

    
6352
    [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6353
    [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6354
    [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6355

    
6356
    [INDEX_op_loopnzw] = CC_Z,
6357
    [INDEX_op_loopnzl] = CC_Z,
6358
    [INDEX_op_loopzw] = CC_Z,
6359
    [INDEX_op_loopzl] = CC_Z,
6360

    
6361
    [INDEX_op_seto_T0_cc] = CC_O,
6362
    [INDEX_op_setb_T0_cc] = CC_C,
6363
    [INDEX_op_setz_T0_cc] = CC_Z,
6364
    [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6365
    [INDEX_op_sets_T0_cc] = CC_S,
6366
    [INDEX_op_setp_T0_cc] = CC_P,
6367
    [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6368
    [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6369

    
6370
    [INDEX_op_setb_T0_subb] = CC_C,
6371
    [INDEX_op_setb_T0_subw] = CC_C,
6372
    [INDEX_op_setb_T0_subl] = CC_C,
6373

    
6374
    [INDEX_op_setz_T0_subb] = CC_Z,
6375
    [INDEX_op_setz_T0_subw] = CC_Z,
6376
    [INDEX_op_setz_T0_subl] = CC_Z,
6377

    
6378
    [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6379
    [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6380
    [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6381

    
6382
    [INDEX_op_sets_T0_subb] = CC_S,
6383
    [INDEX_op_sets_T0_subw] = CC_S,
6384
    [INDEX_op_sets_T0_subl] = CC_S,
6385

    
6386
    [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6387
    [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6388
    [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6389

    
6390
    [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6391
    [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6392
    [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6393

    
6394
    [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6395
    [INDEX_op_cmc] = CC_C,
6396
    [INDEX_op_salc] = CC_C,
6397

    
6398
    /* needed for correct flag optimisation before string ops */
6399
    [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6400
    [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6401
    [INDEX_op_jz_ecxw] = CC_OSZAPC,
6402
    [INDEX_op_jz_ecxl] = CC_OSZAPC,
6403

    
6404
#ifdef TARGET_X86_64
6405
    [INDEX_op_jb_subq] = CC_C,
6406
    [INDEX_op_jz_subq] = CC_Z,
6407
    [INDEX_op_jbe_subq] = CC_Z | CC_C,
6408
    [INDEX_op_js_subq] = CC_S,
6409
    [INDEX_op_jl_subq] = CC_O | CC_S,
6410
    [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6411

    
6412
    [INDEX_op_loopnzq] = CC_Z,
6413
    [INDEX_op_loopzq] = CC_Z,
6414

    
6415
    [INDEX_op_setb_T0_subq] = CC_C,
6416
    [INDEX_op_setz_T0_subq] = CC_Z,
6417
    [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6418
    [INDEX_op_sets_T0_subq] = CC_S,
6419
    [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6420
    [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6421

    
6422
    [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6423
    [INDEX_op_jz_ecxq] = CC_OSZAPC,
6424
#endif
6425

    
6426
#define DEF_READF(SUFFIX)\
6427
    [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6428
    [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6429
    [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6430
    X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6431
    [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6432
    [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6433
    [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6434
    X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6435
\
6436
    [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6437
    [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6438
    [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6439
    X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6440
    [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6441
    [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6442
    [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6443
    X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6444

    
6445
    DEF_READF( )
6446
    DEF_READF(_raw)
6447
#ifndef CONFIG_USER_ONLY
6448
    DEF_READF(_kernel)
6449
    DEF_READF(_user)
6450
#endif
6451
};
6452

    
6453
/* flags written by an operation */
6454
static uint16_t opc_write_flags[NB_OPS] = {
6455
    [INDEX_op_update2_cc] = CC_OSZAPC,
6456
    [INDEX_op_update1_cc] = CC_OSZAPC,
6457
    [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6458
    [INDEX_op_update_neg_cc] = CC_OSZAPC,
6459
    /* subtle: due to the incl/decl implementation, C is used */
6460
    [INDEX_op_update_inc_cc] = CC_OSZAPC,
6461
    [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6462

    
6463
    [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6464
    [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6465
    [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6466
    X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6467
    [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6468
    [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6469
    [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6470
    X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6471
    [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6472
    [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6473
    X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6474

    
6475
    /* sse */
6476
    [INDEX_op_ucomiss] = CC_OSZAPC,
6477
    [INDEX_op_ucomisd] = CC_OSZAPC,
6478
    [INDEX_op_comiss] = CC_OSZAPC,
6479
    [INDEX_op_comisd] = CC_OSZAPC,
6480

    
6481
    /* bcd */
6482
    [INDEX_op_aam] = CC_OSZAPC,
6483
    [INDEX_op_aad] = CC_OSZAPC,
6484
    [INDEX_op_aas] = CC_OSZAPC,
6485
    [INDEX_op_aaa] = CC_OSZAPC,
6486
    [INDEX_op_das] = CC_OSZAPC,
6487
    [INDEX_op_daa] = CC_OSZAPC,
6488

    
6489
    [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6490
    [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6491
    [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6492
    [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6493
    [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6494
    [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6495
    [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6496
    [INDEX_op_clc] = CC_C,
6497
    [INDEX_op_stc] = CC_C,
6498
    [INDEX_op_cmc] = CC_C,
6499

    
6500
    [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6501
    [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6502
    X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6503
    [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6504
    [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6505
    X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6506
    [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6507
    [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6508
    X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6509
    [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6510
    [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6511
    X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6512

    
6513
    [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6514
    [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6515
    X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6516
    [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6517
    [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6518
    X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6519

    
6520
    [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6521
    [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6522
    [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6523
    X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6524

    
6525
    [INDEX_op_cmpxchg8b] = CC_Z,
6526
    [INDEX_op_lar] = CC_Z,
6527
    [INDEX_op_lsl] = CC_Z,
6528
    [INDEX_op_verr] = CC_Z,
6529
    [INDEX_op_verw] = CC_Z,
6530
    [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6531
    [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6532

    
6533
#define DEF_WRITEF(SUFFIX)\
6534
    [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6535
    [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6536
    [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6537
    X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6538
    [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6539
    [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6540
    [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6541
    X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6542
\
6543
    [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6544
    [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6545
    [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6546
    X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6547
    [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6548
    [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6549
    [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6550
    X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6551
\
6552
    [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6553
    [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6554
    [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6555
    X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6556
    [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6557
    [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6558
    [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6559
    X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6560
\
6561
    [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6562
    [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6563
    [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6564
    X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6565
\
6566
    [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6567
    [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6568
    [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6569
    X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6570
\
6571
    [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6572
    [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6573
    [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6574
    X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6575
\
6576
    [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6577
    [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6578
    X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6579
    [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6580
    [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6581
    X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6582
\
6583
    [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6584
    [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6585
    X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6586
    [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6587
    [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6588
    X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6589
\
6590
    [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6591
    [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6592
    [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6593
    X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6594

    
6595

    
6596
    DEF_WRITEF( )
6597
    DEF_WRITEF(_raw)
6598
#ifndef CONFIG_USER_ONLY
6599
    DEF_WRITEF(_kernel)
6600
    DEF_WRITEF(_user)
6601
#endif
6602
};
6603

    
6604
/* simpler form of an operation if no flags need to be generated */
6605
static uint16_t opc_simpler[NB_OPS] = {
6606
    [INDEX_op_update2_cc] = INDEX_op_nop,
6607
    [INDEX_op_update1_cc] = INDEX_op_nop,
6608
    [INDEX_op_update_neg_cc] = INDEX_op_nop,
6609
#if 0
6610
    /* broken: CC_OP logic must be rewritten */
6611
    [INDEX_op_update_inc_cc] = INDEX_op_nop,
6612
#endif
6613

    
6614
    [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6615
    [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6616
    [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6617
    X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6618

    
6619
    [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6620
    [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6621
    [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6622
    X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6623

    
6624
    [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6625
    [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6626
    [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6627
    X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6628

    
6629
#define DEF_SIMPLER(SUFFIX)\
6630
    [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6631
    [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6632
    [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6633
    X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6634
\
6635
    [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6636
    [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6637
    [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6638
    X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6639

    
6640
    DEF_SIMPLER( )
6641
    DEF_SIMPLER(_raw)
6642
#ifndef CONFIG_USER_ONLY
6643
    DEF_SIMPLER(_kernel)
6644
    DEF_SIMPLER(_user)
6645
#endif
6646
};
6647

    
6648
static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
6649
{
6650
    switch(macro_id) {
6651
#ifdef MACRO_TEST
6652
    case MACRO_TEST:
6653
        tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
6654
        break;
6655
#endif
6656
    }
6657
}
6658

    
6659
void optimize_flags_init(void)
6660
{
6661
    int i;
6662
    /* put default values in arrays */
6663
    for(i = 0; i < NB_OPS; i++) {
6664
        if (opc_simpler[i] == 0)
6665
            opc_simpler[i] = i;
6666
    }
6667

    
6668
    tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
6669

    
6670
    cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
6671
#if TARGET_LONG_BITS > HOST_LONG_BITS
6672
    cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL, 
6673
                                  TCG_AREG0, offsetof(CPUState, t0), "T0");
6674
    cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
6675
                                  TCG_AREG0, offsetof(CPUState, t1), "T1");
6676
    cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
6677
                                TCG_AREG0, offsetof(CPUState, t2), "A0");
6678
#else
6679
    cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
6680
    cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
6681
    cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
6682
#endif
6683
    /* the helpers are only registered to print debug info */
6684
    TCG_HELPER(helper_divl_EAX_T0);
6685
    TCG_HELPER(helper_idivl_EAX_T0);
6686
}
6687

    
6688
/* CPU flags computation optimization: we move backward thru the
6689
   generated code to see which flags are needed. The operation is
6690
   modified if suitable */
6691
static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6692
{
6693
    uint16_t *opc_ptr;
6694
    int live_flags, write_flags, op;
6695

    
6696
    opc_ptr = opc_buf + opc_buf_len;
6697
    /* live_flags contains the flags needed by the next instructions
6698
       in the code. At the end of the block, we consider that all the
6699
       flags are live. */
6700
    live_flags = CC_OSZAPC;
6701
    while (opc_ptr > opc_buf) {
6702
        op = *--opc_ptr;
6703
        /* if none of the flags written by the instruction is used,
6704
           then we can try to find a simpler instruction */
6705
        write_flags = opc_write_flags[op];
6706
        if ((live_flags & write_flags) == 0) {
6707
            *opc_ptr = opc_simpler[op];
6708
        }
6709
        /* compute the live flags before the instruction */
6710
        live_flags &= ~write_flags;
6711
        live_flags |= opc_read_flags[op];
6712
    }
6713
}
6714

    
6715
/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6716
   basic block 'tb'. If search_pc is TRUE, also generate PC
6717
   information for each intermediate instruction. */
6718
static inline int gen_intermediate_code_internal(CPUState *env,
6719
                                                 TranslationBlock *tb,
6720
                                                 int search_pc)
6721
{
6722
    DisasContext dc1, *dc = &dc1;
6723
    target_ulong pc_ptr;
6724
    uint16_t *gen_opc_end;
6725
    int j, lj, cflags;
6726
    uint64_t flags;
6727
    target_ulong pc_start;
6728
    target_ulong cs_base;
6729

    
6730
    /* generate intermediate code */
6731
    pc_start = tb->pc;
6732
    cs_base = tb->cs_base;
6733
    flags = tb->flags;
6734
    cflags = tb->cflags;
6735

    
6736
    dc->pe = (flags >> HF_PE_SHIFT) & 1;
6737
    dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6738
    dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6739
    dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6740
    dc->f_st = 0;
6741
    dc->vm86 = (flags >> VM_SHIFT) & 1;
6742
    dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6743
    dc->iopl = (flags >> IOPL_SHIFT) & 3;
6744
    dc->tf = (flags >> TF_SHIFT) & 1;
6745
    dc->singlestep_enabled = env->singlestep_enabled;
6746
    dc->cc_op = CC_OP_DYNAMIC;
6747
    dc->cs_base = cs_base;
6748
    dc->tb = tb;
6749
    dc->popl_esp_hack = 0;
6750
    /* select memory access functions */
6751
    dc->mem_index = 0;
6752
    if (flags & HF_SOFTMMU_MASK) {
6753
        if (dc->cpl == 3)
6754
            dc->mem_index = 2 * 4;
6755
        else
6756
            dc->mem_index = 1 * 4;
6757
    }
6758
    dc->cpuid_features = env->cpuid_features;
6759
    dc->cpuid_ext_features = env->cpuid_ext_features;
6760
#ifdef TARGET_X86_64
6761
    dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6762
    dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6763
#endif
6764
    dc->flags = flags;
6765
    dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6766
                    (flags & HF_INHIBIT_IRQ_MASK)
6767
#ifndef CONFIG_SOFTMMU
6768
                    || (flags & HF_SOFTMMU_MASK)
6769
#endif
6770
                    );
6771
#if 0
6772
    /* check addseg logic */
6773
    if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6774
        printf("ERROR addseg\n");
6775
#endif
6776

    
6777
    cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
6778

    
6779
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6780

    
6781
    dc->is_jmp = DISAS_NEXT;
6782
    pc_ptr = pc_start;
6783
    lj = -1;
6784

    
6785
    for(;;) {
6786
        if (env->nb_breakpoints > 0) {
6787
            for(j = 0; j < env->nb_breakpoints; j++) {
6788
                if (env->breakpoints[j] == pc_ptr) {
6789
                    gen_debug(dc, pc_ptr - dc->cs_base);
6790
                    break;
6791
                }
6792
            }
6793
        }
6794
        if (search_pc) {
6795
            j = gen_opc_ptr - gen_opc_buf;
6796
            if (lj < j) {
6797
                lj++;
6798
                while (lj < j)
6799
                    gen_opc_instr_start[lj++] = 0;
6800
            }
6801
            gen_opc_pc[lj] = pc_ptr;
6802
            gen_opc_cc_op[lj] = dc->cc_op;
6803
            gen_opc_instr_start[lj] = 1;
6804
        }
6805
        pc_ptr = disas_insn(dc, pc_ptr);
6806
        /* stop translation if indicated */
6807
        if (dc->is_jmp)
6808
            break;
6809
        /* if single step mode, we generate only one instruction and
6810
           generate an exception */
6811
        /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6812
           the flag and abort the translation to give the irqs a
6813
           change to be happen */
6814
        if (dc->tf || dc->singlestep_enabled ||
6815
            (flags & HF_INHIBIT_IRQ_MASK) ||
6816
            (cflags & CF_SINGLE_INSN)) {
6817
            gen_jmp_im(pc_ptr - dc->cs_base);
6818
            gen_eob(dc);
6819
            break;
6820
        }
6821
        /* if too long translation, stop generation too */
6822
        if (gen_opc_ptr >= gen_opc_end ||
6823
            (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6824
            gen_jmp_im(pc_ptr - dc->cs_base);
6825
            gen_eob(dc);
6826
            break;
6827
        }
6828
    }
6829
    *gen_opc_ptr = INDEX_op_end;
6830
    /* we don't forget to fill the last values */
6831
    if (search_pc) {
6832
        j = gen_opc_ptr - gen_opc_buf;
6833
        lj++;
6834
        while (lj <= j)
6835
            gen_opc_instr_start[lj++] = 0;
6836
    }
6837

    
6838
#ifdef DEBUG_DISAS
6839
    if (loglevel & CPU_LOG_TB_CPU) {
6840
        cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6841
    }
6842
    if (loglevel & CPU_LOG_TB_IN_ASM) {
6843
        int disas_flags;
6844
        fprintf(logfile, "----------------\n");
6845
        fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6846
#ifdef TARGET_X86_64
6847
        if (dc->code64)
6848
            disas_flags = 2;
6849
        else
6850
#endif
6851
            disas_flags = !dc->code32;
6852
        target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6853
        fprintf(logfile, "\n");
6854
        if (loglevel & CPU_LOG_TB_OP_OPT) {
6855
            fprintf(logfile, "OP before opt:\n");
6856
            tcg_dump_ops(&tcg_ctx, logfile);
6857
            fprintf(logfile, "\n");
6858
        }
6859
    }
6860
#endif
6861

    
6862
    /* optimize flag computations */
6863
    optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6864

    
6865
    if (!search_pc)
6866
        tb->size = pc_ptr - pc_start;
6867
    return 0;
6868
}
6869

    
6870
int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6871
{
6872
    return gen_intermediate_code_internal(env, tb, 0);
6873
}
6874

    
6875
int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6876
{
6877
    return gen_intermediate_code_internal(env, tb, 1);
6878
}
6879