Statistics
| Branch: | Revision:

root / target-i386 / translate.c @ 914178d3

History | View | Annotate | Download (234.9 kB)

1
/*
2
 *  i386 translation
3
 *
4
 *  Copyright (c) 2003 Fabrice Bellard
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20
#include <stdarg.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23
#include <string.h>
24
#include <inttypes.h>
25
#include <signal.h>
26
#include <assert.h>
27

    
28
#include "cpu.h"
29
#include "exec-all.h"
30
#include "disas.h"
31
#include "helper.h"
32
#include "tcg-op.h"
33

    
34
#define PREFIX_REPZ   0x01
35
#define PREFIX_REPNZ  0x02
36
#define PREFIX_LOCK   0x04
37
#define PREFIX_DATA   0x08
38
#define PREFIX_ADR    0x10
39

    
40
#ifdef TARGET_X86_64
41
#define X86_64_ONLY(x) x
42
#define X86_64_DEF(x...) x
43
#define CODE64(s) ((s)->code64)
44
#define REX_X(s) ((s)->rex_x)
45
#define REX_B(s) ((s)->rex_b)
46
/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
47
#if 1
48
#define BUGGY_64(x) NULL
49
#endif
50
#else
51
#define X86_64_ONLY(x) NULL
52
#define X86_64_DEF(x...)
53
#define CODE64(s) 0
54
#define REX_X(s) 0
55
#define REX_B(s) 0
56
#endif
57

    
58
//#define MACRO_TEST   1
59

    
60
/* global register indexes */
61
static TCGv cpu_env, cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
62
/* local temps */
63
static TCGv cpu_T[2], cpu_T3;
64
/* local register indexes (only used inside old micro ops) */
65
static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
66
static TCGv cpu_tmp5, cpu_tmp6;
67

    
68
#ifdef TARGET_X86_64
69
static int x86_64_hregs;
70
#endif
71

    
72
typedef struct DisasContext {
73
    /* current insn context */
74
    int override; /* -1 if no override */
75
    int prefix;
76
    int aflag, dflag;
77
    target_ulong pc; /* pc = eip + cs_base */
78
    int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
79
                   static state change (stop translation) */
80
    /* current block context */
81
    target_ulong cs_base; /* base of CS segment */
82
    int pe;     /* protected mode */
83
    int code32; /* 32 bit code segment */
84
#ifdef TARGET_X86_64
85
    int lma;    /* long mode active */
86
    int code64; /* 64 bit code segment */
87
    int rex_x, rex_b;
88
#endif
89
    int ss32;   /* 32 bit stack segment */
90
    int cc_op;  /* current CC operation */
91
    int addseg; /* non zero if either DS/ES/SS have a non zero base */
92
    int f_st;   /* currently unused */
93
    int vm86;   /* vm86 mode */
94
    int cpl;
95
    int iopl;
96
    int tf;     /* TF cpu flag */
97
    int singlestep_enabled; /* "hardware" single step enabled */
98
    int jmp_opt; /* use direct block chaining for direct jumps */
99
    int mem_index; /* select memory access functions */
100
    uint64_t flags; /* all execution flags */
101
    struct TranslationBlock *tb;
102
    int popl_esp_hack; /* for correct popl with esp base handling */
103
    int rip_offset; /* only used in x86_64, but left for simplicity */
104
    int cpuid_features;
105
    int cpuid_ext_features;
106
    int cpuid_ext2_features;
107
    int cpuid_ext3_features;
108
} DisasContext;
109

    
110
static void gen_eob(DisasContext *s);
111
static void gen_jmp(DisasContext *s, target_ulong eip);
112
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
113

    
114
/* i386 arith/logic operations */
115
enum {
116
    OP_ADDL,
117
    OP_ORL,
118
    OP_ADCL,
119
    OP_SBBL,
120
    OP_ANDL,
121
    OP_SUBL,
122
    OP_XORL,
123
    OP_CMPL,
124
};
125

    
126
/* i386 shift ops */
127
enum {
128
    OP_ROL,
129
    OP_ROR,
130
    OP_RCL,
131
    OP_RCR,
132
    OP_SHL,
133
    OP_SHR,
134
    OP_SHL1, /* undocumented */
135
    OP_SAR = 7,
136
};
137

    
138
enum {
139
    JCC_O,
140
    JCC_B,
141
    JCC_Z,
142
    JCC_BE,
143
    JCC_S,
144
    JCC_P,
145
    JCC_L,
146
    JCC_LE,
147
};
148

    
149
/* operand size */
150
enum {
151
    OT_BYTE = 0,
152
    OT_WORD,
153
    OT_LONG,
154
    OT_QUAD,
155
};
156

    
157
enum {
158
    /* I386 int registers */
159
    OR_EAX,   /* MUST be even numbered */
160
    OR_ECX,
161
    OR_EDX,
162
    OR_EBX,
163
    OR_ESP,
164
    OR_EBP,
165
    OR_ESI,
166
    OR_EDI,
167

    
168
    OR_TMP0 = 16,    /* temporary operand register */
169
    OR_TMP1,
170
    OR_A0, /* temporary register used when doing address evaluation */
171
};
172

    
173
static inline void gen_op_movl_T0_0(void)
174
{
175
    tcg_gen_movi_tl(cpu_T[0], 0);
176
}
177

    
178
static inline void gen_op_movl_T0_im(int32_t val)
179
{
180
    tcg_gen_movi_tl(cpu_T[0], val);
181
}
182

    
183
static inline void gen_op_movl_T0_imu(uint32_t val)
184
{
185
    tcg_gen_movi_tl(cpu_T[0], val);
186
}
187

    
188
static inline void gen_op_movl_T1_im(int32_t val)
189
{
190
    tcg_gen_movi_tl(cpu_T[1], val);
191
}
192

    
193
static inline void gen_op_movl_T1_imu(uint32_t val)
194
{
195
    tcg_gen_movi_tl(cpu_T[1], val);
196
}
197

    
198
static inline void gen_op_movl_A0_im(uint32_t val)
199
{
200
    tcg_gen_movi_tl(cpu_A0, val);
201
}
202

    
203
#ifdef TARGET_X86_64
204
static inline void gen_op_movq_A0_im(int64_t val)
205
{
206
    tcg_gen_movi_tl(cpu_A0, val);
207
}
208
#endif
209

    
210
static inline void gen_movtl_T0_im(target_ulong val)
211
{
212
    tcg_gen_movi_tl(cpu_T[0], val);
213
}
214

    
215
static inline void gen_movtl_T1_im(target_ulong val)
216
{
217
    tcg_gen_movi_tl(cpu_T[1], val);
218
}
219

    
220
static inline void gen_op_andl_T0_ffff(void)
221
{
222
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
223
}
224

    
225
static inline void gen_op_andl_T0_im(uint32_t val)
226
{
227
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
228
}
229

    
230
static inline void gen_op_movl_T0_T1(void)
231
{
232
    tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
233
}
234

    
235
static inline void gen_op_andl_A0_ffff(void)
236
{
237
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
238
}
239

    
240
#ifdef TARGET_X86_64
241

    
242
#define NB_OP_SIZES 4
243

    
244
#else /* !TARGET_X86_64 */
245

    
246
#define NB_OP_SIZES 3
247

    
248
#endif /* !TARGET_X86_64 */
249

    
250
#if defined(WORDS_BIGENDIAN)
251
#define REG_B_OFFSET (sizeof(target_ulong) - 1)
252
#define REG_H_OFFSET (sizeof(target_ulong) - 2)
253
#define REG_W_OFFSET (sizeof(target_ulong) - 2)
254
#define REG_L_OFFSET (sizeof(target_ulong) - 4)
255
#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
256
#else
257
#define REG_B_OFFSET 0
258
#define REG_H_OFFSET 1
259
#define REG_W_OFFSET 0
260
#define REG_L_OFFSET 0
261
#define REG_LH_OFFSET 4
262
#endif
263

    
264
static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
265
{
266
    switch(ot) {
267
    case OT_BYTE:
268
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
269
            tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
270
        } else {
271
            tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
272
        }
273
        break;
274
    case OT_WORD:
275
        tcg_gen_st16_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
276
        break;
277
#ifdef TARGET_X86_64
278
    case OT_LONG:
279
        tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
280
        /* high part of register set to zero */
281
        tcg_gen_movi_tl(cpu_tmp0, 0);
282
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
283
        break;
284
    default:
285
    case OT_QUAD:
286
        tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
287
        break;
288
#else
289
    default:
290
    case OT_LONG:
291
        tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
292
        break;
293
#endif
294
    }
295
}
296

    
297
static inline void gen_op_mov_reg_T0(int ot, int reg)
298
{
299
    gen_op_mov_reg_v(ot, reg, cpu_T[0]);
300
}
301

    
302
static inline void gen_op_mov_reg_T1(int ot, int reg)
303
{
304
    gen_op_mov_reg_v(ot, reg, cpu_T[1]);
305
}
306

    
307
static inline void gen_op_mov_reg_A0(int size, int reg)
308
{
309
    switch(size) {
310
    case 0:
311
        tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
312
        break;
313
#ifdef TARGET_X86_64
314
    case 1:
315
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
316
        /* high part of register set to zero */
317
        tcg_gen_movi_tl(cpu_tmp0, 0);
318
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
319
        break;
320
    default:
321
    case 2:
322
        tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
323
        break;
324
#else
325
    default:
326
    case 1:
327
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
328
        break;
329
#endif
330
    }
331
}
332

    
333
static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
334
{
335
    switch(ot) {
336
    case OT_BYTE:
337
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
338
            goto std_case;
339
        } else {
340
            tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
341
        }
342
        break;
343
    default:
344
    std_case:
345
        tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
346
        break;
347
    }
348
}
349

    
350
static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
351
{
352
    gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
353
}
354

    
355
static inline void gen_op_movl_A0_reg(int reg)
356
{
357
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
358
}
359

    
360
static inline void gen_op_addl_A0_im(int32_t val)
361
{
362
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
363
#ifdef TARGET_X86_64
364
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
365
#endif
366
}
367

    
368
#ifdef TARGET_X86_64
369
static inline void gen_op_addq_A0_im(int64_t val)
370
{
371
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
372
}
373
#endif
374
    
375
static void gen_add_A0_im(DisasContext *s, int val)
376
{
377
#ifdef TARGET_X86_64
378
    if (CODE64(s))
379
        gen_op_addq_A0_im(val);
380
    else
381
#endif
382
        gen_op_addl_A0_im(val);
383
}
384

    
385
static inline void gen_op_addl_T0_T1(void)
386
{
387
    tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
388
}
389

    
390
static inline void gen_op_jmp_T0(void)
391
{
392
    tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
393
}
394

    
395
static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
396
{
397
    switch(size) {
398
    case 0:
399
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
400
        tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
401
        tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
402
        break;
403
    case 1:
404
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
405
        tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
406
#ifdef TARGET_X86_64
407
        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
408
#endif
409
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
410
        break;
411
#ifdef TARGET_X86_64
412
    case 2:
413
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
414
        tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
415
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
416
        break;
417
#endif
418
    }
419
}
420

    
421
static inline void gen_op_add_reg_T0(int size, int reg)
422
{
423
    switch(size) {
424
    case 0:
425
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
426
        tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
427
        tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
428
        break;
429
    case 1:
430
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
431
        tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
432
#ifdef TARGET_X86_64
433
        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
434
#endif
435
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
436
        break;
437
#ifdef TARGET_X86_64
438
    case 2:
439
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
440
        tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
441
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
442
        break;
443
#endif
444
    }
445
}
446

    
447
static inline void gen_op_set_cc_op(int32_t val)
448
{
449
    tcg_gen_movi_i32(cpu_cc_op, val);
450
}
451

    
452
static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
453
{
454
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
455
    if (shift != 0) 
456
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
457
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
458
#ifdef TARGET_X86_64
459
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
460
#endif
461
}
462

    
463
static inline void gen_op_movl_A0_seg(int reg)
464
{
465
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
466
}
467

    
468
static inline void gen_op_addl_A0_seg(int reg)
469
{
470
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
471
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
472
#ifdef TARGET_X86_64
473
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
474
#endif
475
}
476

    
477
#ifdef TARGET_X86_64
478
static inline void gen_op_movq_A0_seg(int reg)
479
{
480
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
481
}
482

    
483
static inline void gen_op_addq_A0_seg(int reg)
484
{
485
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
486
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
487
}
488

    
489
static inline void gen_op_movq_A0_reg(int reg)
490
{
491
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
492
}
493

    
494
static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
495
{
496
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
497
    if (shift != 0) 
498
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
499
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
500
}
501
#endif
502

    
503
static inline void gen_op_lds_T0_A0(int idx)
504
{
505
    int mem_index = (idx >> 2) - 1;
506
    switch(idx & 3) {
507
    case 0:
508
        tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
509
        break;
510
    case 1:
511
        tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
512
        break;
513
    default:
514
    case 2:
515
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
516
        break;
517
    }
518
}
519

    
520
static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
521
{
522
    int mem_index = (idx >> 2) - 1;
523
    switch(idx & 3) {
524
    case 0:
525
        tcg_gen_qemu_ld8u(t0, a0, mem_index);
526
        break;
527
    case 1:
528
        tcg_gen_qemu_ld16u(t0, a0, mem_index);
529
        break;
530
    case 2:
531
        tcg_gen_qemu_ld32u(t0, a0, mem_index);
532
        break;
533
    default:
534
    case 3:
535
        tcg_gen_qemu_ld64(t0, a0, mem_index);
536
        break;
537
    }
538
}
539

    
540
/* XXX: always use ldu or lds */
541
static inline void gen_op_ld_T0_A0(int idx)
542
{
543
    gen_op_ld_v(idx, cpu_T[0], cpu_A0);
544
}
545

    
546
static inline void gen_op_ldu_T0_A0(int idx)
547
{
548
    gen_op_ld_v(idx, cpu_T[0], cpu_A0);
549
}
550

    
551
static inline void gen_op_ld_T1_A0(int idx)
552
{
553
    gen_op_ld_v(idx, cpu_T[1], cpu_A0);
554
}
555

    
556
static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
557
{
558
    int mem_index = (idx >> 2) - 1;
559
    switch(idx & 3) {
560
    case 0:
561
        tcg_gen_qemu_st8(t0, a0, mem_index);
562
        break;
563
    case 1:
564
        tcg_gen_qemu_st16(t0, a0, mem_index);
565
        break;
566
    case 2:
567
        tcg_gen_qemu_st32(t0, a0, mem_index);
568
        break;
569
    default:
570
    case 3:
571
        tcg_gen_qemu_st64(t0, a0, mem_index);
572
        break;
573
    }
574
}
575

    
576
static inline void gen_op_st_T0_A0(int idx)
577
{
578
    gen_op_st_v(idx, cpu_T[0], cpu_A0);
579
}
580

    
581
static inline void gen_op_st_T1_A0(int idx)
582
{
583
    gen_op_st_v(idx, cpu_T[1], cpu_A0);
584
}
585

    
586
static inline void gen_jmp_im(target_ulong pc)
587
{
588
    tcg_gen_movi_tl(cpu_tmp0, pc);
589
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
590
}
591

    
592
static inline void gen_string_movl_A0_ESI(DisasContext *s)
593
{
594
    int override;
595

    
596
    override = s->override;
597
#ifdef TARGET_X86_64
598
    if (s->aflag == 2) {
599
        if (override >= 0) {
600
            gen_op_movq_A0_seg(override);
601
            gen_op_addq_A0_reg_sN(0, R_ESI);
602
        } else {
603
            gen_op_movq_A0_reg(R_ESI);
604
        }
605
    } else
606
#endif
607
    if (s->aflag) {
608
        /* 32 bit address */
609
        if (s->addseg && override < 0)
610
            override = R_DS;
611
        if (override >= 0) {
612
            gen_op_movl_A0_seg(override);
613
            gen_op_addl_A0_reg_sN(0, R_ESI);
614
        } else {
615
            gen_op_movl_A0_reg(R_ESI);
616
        }
617
    } else {
618
        /* 16 address, always override */
619
        if (override < 0)
620
            override = R_DS;
621
        gen_op_movl_A0_reg(R_ESI);
622
        gen_op_andl_A0_ffff();
623
        gen_op_addl_A0_seg(override);
624
    }
625
}
626

    
627
static inline void gen_string_movl_A0_EDI(DisasContext *s)
628
{
629
#ifdef TARGET_X86_64
630
    if (s->aflag == 2) {
631
        gen_op_movq_A0_reg(R_EDI);
632
    } else
633
#endif
634
    if (s->aflag) {
635
        if (s->addseg) {
636
            gen_op_movl_A0_seg(R_ES);
637
            gen_op_addl_A0_reg_sN(0, R_EDI);
638
        } else {
639
            gen_op_movl_A0_reg(R_EDI);
640
        }
641
    } else {
642
        gen_op_movl_A0_reg(R_EDI);
643
        gen_op_andl_A0_ffff();
644
        gen_op_addl_A0_seg(R_ES);
645
    }
646
}
647

    
648
static inline void gen_op_movl_T0_Dshift(int ot) 
649
{
650
    tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
651
    tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
652
};
653

    
654
static void gen_extu(int ot, TCGv reg)
655
{
656
    switch(ot) {
657
    case OT_BYTE:
658
        tcg_gen_ext8u_tl(reg, reg);
659
        break;
660
    case OT_WORD:
661
        tcg_gen_ext16u_tl(reg, reg);
662
        break;
663
    case OT_LONG:
664
        tcg_gen_ext32u_tl(reg, reg);
665
        break;
666
    default:
667
        break;
668
    }
669
}
670

    
671
static void gen_exts(int ot, TCGv reg)
672
{
673
    switch(ot) {
674
    case OT_BYTE:
675
        tcg_gen_ext8s_tl(reg, reg);
676
        break;
677
    case OT_WORD:
678
        tcg_gen_ext16s_tl(reg, reg);
679
        break;
680
    case OT_LONG:
681
        tcg_gen_ext32s_tl(reg, reg);
682
        break;
683
    default:
684
        break;
685
    }
686
}
687

    
688
static inline void gen_op_jnz_ecx(int size, int label1)
689
{
690
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
691
    gen_extu(size + 1, cpu_tmp0);
692
    tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
693
}
694

    
695
static inline void gen_op_jz_ecx(int size, int label1)
696
{
697
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
698
    gen_extu(size + 1, cpu_tmp0);
699
    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
700
}
701

    
702
static void *helper_in_func[3] = {
703
    helper_inb,
704
    helper_inw,
705
    helper_inl,
706
};
707

    
708
static void *helper_out_func[3] = {
709
    helper_outb,
710
    helper_outw,
711
    helper_outl,
712
};
713

    
714
static void *gen_check_io_func[3] = {
715
    helper_check_iob,
716
    helper_check_iow,
717
    helper_check_iol,
718
};
719

    
720
static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
721
                         uint32_t svm_flags)
722
{
723
    int state_saved;
724
    target_ulong next_eip;
725

    
726
    state_saved = 0;
727
    if (s->pe && (s->cpl > s->iopl || s->vm86)) {
728
        if (s->cc_op != CC_OP_DYNAMIC)
729
            gen_op_set_cc_op(s->cc_op);
730
        gen_jmp_im(cur_eip);
731
        state_saved = 1;
732
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
733
        tcg_gen_helper_0_1(gen_check_io_func[ot],
734
                           cpu_tmp2_i32);
735
    }
736
    if(s->flags & HF_SVMI_MASK) {
737
        if (!state_saved) {
738
            if (s->cc_op != CC_OP_DYNAMIC)
739
                gen_op_set_cc_op(s->cc_op);
740
            gen_jmp_im(cur_eip);
741
            state_saved = 1;
742
        }
743
        svm_flags |= (1 << (4 + ot));
744
        next_eip = s->pc - s->cs_base;
745
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
746
        tcg_gen_helper_0_3(helper_svm_check_io,
747
                           cpu_tmp2_i32,
748
                           tcg_const_i32(svm_flags),
749
                           tcg_const_i32(next_eip - cur_eip));
750
    }
751
}
752

    
753
static inline void gen_movs(DisasContext *s, int ot)
754
{
755
    gen_string_movl_A0_ESI(s);
756
    gen_op_ld_T0_A0(ot + s->mem_index);
757
    gen_string_movl_A0_EDI(s);
758
    gen_op_st_T0_A0(ot + s->mem_index);
759
    gen_op_movl_T0_Dshift(ot);
760
    gen_op_add_reg_T0(s->aflag, R_ESI);
761
    gen_op_add_reg_T0(s->aflag, R_EDI);
762
}
763

    
764
static inline void gen_update_cc_op(DisasContext *s)
765
{
766
    if (s->cc_op != CC_OP_DYNAMIC) {
767
        gen_op_set_cc_op(s->cc_op);
768
        s->cc_op = CC_OP_DYNAMIC;
769
    }
770
}
771

    
772
static void gen_op_update1_cc(void)
773
{
774
    tcg_gen_discard_tl(cpu_cc_src);
775
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
776
}
777

    
778
static void gen_op_update2_cc(void)
779
{
780
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
781
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
782
}
783

    
784
static inline void gen_op_cmpl_T0_T1_cc(void)
785
{
786
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
787
    tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
788
}
789

    
790
static inline void gen_op_testl_T0_T1_cc(void)
791
{
792
    tcg_gen_discard_tl(cpu_cc_src);
793
    tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
794
}
795

    
796
static void gen_op_update_neg_cc(void)
797
{
798
    tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
799
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
800
}
801

    
802
/* compute eflags.C to reg */
803
static void gen_compute_eflags_c(TCGv reg)
804
{
805
#if TCG_TARGET_REG_BITS == 32
806
    tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
807
    tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 
808
                     (long)cc_table + offsetof(CCTable, compute_c));
809
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
810
    tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE, 
811
                 1, &cpu_tmp2_i32, 0, NULL);
812
#else
813
    tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
814
    tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
815
    tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64, 
816
                     (long)cc_table + offsetof(CCTable, compute_c));
817
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
818
    tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE, 
819
                 1, &cpu_tmp2_i32, 0, NULL);
820
#endif
821
    tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
822
}
823

    
824
/* compute all eflags to cc_src */
825
static void gen_compute_eflags(TCGv reg)
826
{
827
#if TCG_TARGET_REG_BITS == 32
828
    tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
829
    tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 
830
                     (long)cc_table + offsetof(CCTable, compute_all));
831
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
832
    tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE, 
833
                 1, &cpu_tmp2_i32, 0, NULL);
834
#else
835
    tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
836
    tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
837
    tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64, 
838
                     (long)cc_table + offsetof(CCTable, compute_all));
839
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
840
    tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE, 
841
                 1, &cpu_tmp2_i32, 0, NULL);
842
#endif
843
    tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
844
}
845

    
846
static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
847
{
848
    if (s->cc_op != CC_OP_DYNAMIC)
849
        gen_op_set_cc_op(s->cc_op);
850
    switch(jcc_op) {
851
    case JCC_O:
852
        gen_compute_eflags(cpu_T[0]);
853
        tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
854
        tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
855
        break;
856
    case JCC_B:
857
        gen_compute_eflags_c(cpu_T[0]);
858
        break;
859
    case JCC_Z:
860
        gen_compute_eflags(cpu_T[0]);
861
        tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
862
        tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
863
        break;
864
    case JCC_BE:
865
        gen_compute_eflags(cpu_tmp0);
866
        tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
867
        tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
868
        tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
869
        break;
870
    case JCC_S:
871
        gen_compute_eflags(cpu_T[0]);
872
        tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
873
        tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
874
        break;
875
    case JCC_P:
876
        gen_compute_eflags(cpu_T[0]);
877
        tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
878
        tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
879
        break;
880
    case JCC_L:
881
        gen_compute_eflags(cpu_tmp0);
882
        tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
883
        tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
884
        tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
885
        tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
886
        break;
887
    default:
888
    case JCC_LE:
889
        gen_compute_eflags(cpu_tmp0);
890
        tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
891
        tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
892
        tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
893
        tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
894
        tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
895
        tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
896
        break;
897
    }
898
}
899

    
900
/* return true if setcc_slow is not needed (WARNING: must be kept in
901
   sync with gen_jcc1) */
902
static int is_fast_jcc_case(DisasContext *s, int b)
903
{
904
    int jcc_op;
905
    jcc_op = (b >> 1) & 7;
906
    switch(s->cc_op) {
907
        /* we optimize the cmp/jcc case */
908
    case CC_OP_SUBB:
909
    case CC_OP_SUBW:
910
    case CC_OP_SUBL:
911
    case CC_OP_SUBQ:
912
        if (jcc_op == JCC_O || jcc_op == JCC_P)
913
            goto slow_jcc;
914
        break;
915

    
916
        /* some jumps are easy to compute */
917
    case CC_OP_ADDB:
918
    case CC_OP_ADDW:
919
    case CC_OP_ADDL:
920
    case CC_OP_ADDQ:
921

    
922
    case CC_OP_LOGICB:
923
    case CC_OP_LOGICW:
924
    case CC_OP_LOGICL:
925
    case CC_OP_LOGICQ:
926

    
927
    case CC_OP_INCB:
928
    case CC_OP_INCW:
929
    case CC_OP_INCL:
930
    case CC_OP_INCQ:
931

    
932
    case CC_OP_DECB:
933
    case CC_OP_DECW:
934
    case CC_OP_DECL:
935
    case CC_OP_DECQ:
936

    
937
    case CC_OP_SHLB:
938
    case CC_OP_SHLW:
939
    case CC_OP_SHLL:
940
    case CC_OP_SHLQ:
941
        if (jcc_op != JCC_Z && jcc_op != JCC_S)
942
            goto slow_jcc;
943
        break;
944
    default:
945
    slow_jcc:
946
        return 0;
947
    }
948
    return 1;
949
}
950

    
951
/* generate a conditional jump to label 'l1' according to jump opcode
952
   value 'b'. In the fast case, T0 is guaranted not to be used. */
953
static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
954
{
955
    int inv, jcc_op, size, cond;
956
    TCGv t0;
957

    
958
    inv = b & 1;
959
    jcc_op = (b >> 1) & 7;
960

    
961
    switch(cc_op) {
962
        /* we optimize the cmp/jcc case */
963
    case CC_OP_SUBB:
964
    case CC_OP_SUBW:
965
    case CC_OP_SUBL:
966
    case CC_OP_SUBQ:
967
        
968
        size = cc_op - CC_OP_SUBB;
969
        switch(jcc_op) {
970
        case JCC_Z:
971
        fast_jcc_z:
972
            switch(size) {
973
            case 0:
974
                tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
975
                t0 = cpu_tmp0;
976
                break;
977
            case 1:
978
                tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
979
                t0 = cpu_tmp0;
980
                break;
981
#ifdef TARGET_X86_64
982
            case 2:
983
                tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
984
                t0 = cpu_tmp0;
985
                break;
986
#endif
987
            default:
988
                t0 = cpu_cc_dst;
989
                break;
990
            }
991
            tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
992
            break;
993
        case JCC_S:
994
        fast_jcc_s:
995
            switch(size) {
996
            case 0:
997
                tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
998
                tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0, 
999
                                   0, l1);
1000
                break;
1001
            case 1:
1002
                tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1003
                tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0, 
1004
                                   0, l1);
1005
                break;
1006
#ifdef TARGET_X86_64
1007
            case 2:
1008
                tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1009
                tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0, 
1010
                                   0, l1);
1011
                break;
1012
#endif
1013
            default:
1014
                tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst, 
1015
                                   0, l1);
1016
                break;
1017
            }
1018
            break;
1019
            
1020
        case JCC_B:
1021
            cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1022
            goto fast_jcc_b;
1023
        case JCC_BE:
1024
            cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1025
        fast_jcc_b:
1026
            tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1027
            switch(size) {
1028
            case 0:
1029
                t0 = cpu_tmp0;
1030
                tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1031
                tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1032
                break;
1033
            case 1:
1034
                t0 = cpu_tmp0;
1035
                tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1036
                tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1037
                break;
1038
#ifdef TARGET_X86_64
1039
            case 2:
1040
                t0 = cpu_tmp0;
1041
                tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1042
                tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1043
                break;
1044
#endif
1045
            default:
1046
                t0 = cpu_cc_src;
1047
                break;
1048
            }
1049
            tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1050
            break;
1051
            
1052
        case JCC_L:
1053
            cond = inv ? TCG_COND_GE : TCG_COND_LT;
1054
            goto fast_jcc_l;
1055
        case JCC_LE:
1056
            cond = inv ? TCG_COND_GT : TCG_COND_LE;
1057
        fast_jcc_l:
1058
            tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1059
            switch(size) {
1060
            case 0:
1061
                t0 = cpu_tmp0;
1062
                tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1063
                tcg_gen_ext8s_tl(t0, cpu_cc_src);
1064
                break;
1065
            case 1:
1066
                t0 = cpu_tmp0;
1067
                tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1068
                tcg_gen_ext16s_tl(t0, cpu_cc_src);
1069
                break;
1070
#ifdef TARGET_X86_64
1071
            case 2:
1072
                t0 = cpu_tmp0;
1073
                tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1074
                tcg_gen_ext32s_tl(t0, cpu_cc_src);
1075
                break;
1076
#endif
1077
            default:
1078
                t0 = cpu_cc_src;
1079
                break;
1080
            }
1081
            tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1082
            break;
1083
            
1084
        default:
1085
            goto slow_jcc;
1086
        }
1087
        break;
1088
        
1089
        /* some jumps are easy to compute */
1090
    case CC_OP_ADDB:
1091
    case CC_OP_ADDW:
1092
    case CC_OP_ADDL:
1093
    case CC_OP_ADDQ:
1094
        
1095
    case CC_OP_ADCB:
1096
    case CC_OP_ADCW:
1097
    case CC_OP_ADCL:
1098
    case CC_OP_ADCQ:
1099
        
1100
    case CC_OP_SBBB:
1101
    case CC_OP_SBBW:
1102
    case CC_OP_SBBL:
1103
    case CC_OP_SBBQ:
1104
        
1105
    case CC_OP_LOGICB:
1106
    case CC_OP_LOGICW:
1107
    case CC_OP_LOGICL:
1108
    case CC_OP_LOGICQ:
1109
        
1110
    case CC_OP_INCB:
1111
    case CC_OP_INCW:
1112
    case CC_OP_INCL:
1113
    case CC_OP_INCQ:
1114
        
1115
    case CC_OP_DECB:
1116
    case CC_OP_DECW:
1117
    case CC_OP_DECL:
1118
    case CC_OP_DECQ:
1119
        
1120
    case CC_OP_SHLB:
1121
    case CC_OP_SHLW:
1122
    case CC_OP_SHLL:
1123
    case CC_OP_SHLQ:
1124
        
1125
    case CC_OP_SARB:
1126
    case CC_OP_SARW:
1127
    case CC_OP_SARL:
1128
    case CC_OP_SARQ:
1129
        switch(jcc_op) {
1130
        case JCC_Z:
1131
            size = (cc_op - CC_OP_ADDB) & 3;
1132
            goto fast_jcc_z;
1133
        case JCC_S:
1134
            size = (cc_op - CC_OP_ADDB) & 3;
1135
            goto fast_jcc_s;
1136
        default:
1137
            goto slow_jcc;
1138
        }
1139
        break;
1140
    default:
1141
    slow_jcc:
1142
        gen_setcc_slow_T0(s, jcc_op);
1143
        tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, 
1144
                           cpu_T[0], 0, l1);
1145
        break;
1146
    }
1147
}
1148

    
1149
/* XXX: does not work with gdbstub "ice" single step - not a
1150
   serious problem */
1151
static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1152
{
1153
    int l1, l2;
1154

    
1155
    l1 = gen_new_label();
1156
    l2 = gen_new_label();
1157
    gen_op_jnz_ecx(s->aflag, l1);
1158
    gen_set_label(l2);
1159
    gen_jmp_tb(s, next_eip, 1);
1160
    gen_set_label(l1);
1161
    return l2;
1162
}
1163

    
1164
static inline void gen_stos(DisasContext *s, int ot)
1165
{
1166
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1167
    gen_string_movl_A0_EDI(s);
1168
    gen_op_st_T0_A0(ot + s->mem_index);
1169
    gen_op_movl_T0_Dshift(ot);
1170
    gen_op_add_reg_T0(s->aflag, R_EDI);
1171
}
1172

    
1173
static inline void gen_lods(DisasContext *s, int ot)
1174
{
1175
    gen_string_movl_A0_ESI(s);
1176
    gen_op_ld_T0_A0(ot + s->mem_index);
1177
    gen_op_mov_reg_T0(ot, R_EAX);
1178
    gen_op_movl_T0_Dshift(ot);
1179
    gen_op_add_reg_T0(s->aflag, R_ESI);
1180
}
1181

    
1182
static inline void gen_scas(DisasContext *s, int ot)
1183
{
1184
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1185
    gen_string_movl_A0_EDI(s);
1186
    gen_op_ld_T1_A0(ot + s->mem_index);
1187
    gen_op_cmpl_T0_T1_cc();
1188
    gen_op_movl_T0_Dshift(ot);
1189
    gen_op_add_reg_T0(s->aflag, R_EDI);
1190
}
1191

    
1192
static inline void gen_cmps(DisasContext *s, int ot)
1193
{
1194
    gen_string_movl_A0_ESI(s);
1195
    gen_op_ld_T0_A0(ot + s->mem_index);
1196
    gen_string_movl_A0_EDI(s);
1197
    gen_op_ld_T1_A0(ot + s->mem_index);
1198
    gen_op_cmpl_T0_T1_cc();
1199
    gen_op_movl_T0_Dshift(ot);
1200
    gen_op_add_reg_T0(s->aflag, R_ESI);
1201
    gen_op_add_reg_T0(s->aflag, R_EDI);
1202
}
1203

    
1204
static inline void gen_ins(DisasContext *s, int ot)
1205
{
1206
    gen_string_movl_A0_EDI(s);
1207
    /* Note: we must do this dummy write first to be restartable in
1208
       case of page fault. */
1209
    gen_op_movl_T0_0();
1210
    gen_op_st_T0_A0(ot + s->mem_index);
1211
    gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1212
    tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1213
    tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1214
    tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1215
    gen_op_st_T0_A0(ot + s->mem_index);
1216
    gen_op_movl_T0_Dshift(ot);
1217
    gen_op_add_reg_T0(s->aflag, R_EDI);
1218
}
1219

    
1220
static inline void gen_outs(DisasContext *s, int ot)
1221
{
1222
    gen_string_movl_A0_ESI(s);
1223
    gen_op_ld_T0_A0(ot + s->mem_index);
1224

    
1225
    gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1226
    tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1227
    tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1228
    tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1229
    tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1230

    
1231
    gen_op_movl_T0_Dshift(ot);
1232
    gen_op_add_reg_T0(s->aflag, R_ESI);
1233
}
1234

    
1235
/* same method as Valgrind : we generate jumps to current or next
1236
   instruction */
1237
#define GEN_REPZ(op)                                                          \
1238
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1239
                                 target_ulong cur_eip, target_ulong next_eip) \
1240
{                                                                             \
1241
    int l2;\
1242
    gen_update_cc_op(s);                                                      \
1243
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1244
    gen_ ## op(s, ot);                                                        \
1245
    gen_op_add_reg_im(s->aflag, R_ECX, -1);                                   \
1246
    /* a loop would cause two single step exceptions if ECX = 1               \
1247
       before rep string_insn */                                              \
1248
    if (!s->jmp_opt)                                                          \
1249
        gen_op_jz_ecx(s->aflag, l2);                                          \
1250
    gen_jmp(s, cur_eip);                                                      \
1251
}
1252

    
1253
#define GEN_REPZ2(op)                                                         \
1254
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1255
                                   target_ulong cur_eip,                      \
1256
                                   target_ulong next_eip,                     \
1257
                                   int nz)                                    \
1258
{                                                                             \
1259
    int l2;\
1260
    gen_update_cc_op(s);                                                      \
1261
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1262
    gen_ ## op(s, ot);                                                        \
1263
    gen_op_add_reg_im(s->aflag, R_ECX, -1);                                   \
1264
    gen_op_set_cc_op(CC_OP_SUBB + ot);                                        \
1265
    gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2);                \
1266
    if (!s->jmp_opt)                                                          \
1267
        gen_op_jz_ecx(s->aflag, l2);                                          \
1268
    gen_jmp(s, cur_eip);                                                      \
1269
}
1270

    
1271
GEN_REPZ(movs)
1272
GEN_REPZ(stos)
1273
GEN_REPZ(lods)
1274
GEN_REPZ(ins)
1275
GEN_REPZ(outs)
1276
GEN_REPZ2(scas)
1277
GEN_REPZ2(cmps)
1278

    
1279
static void *helper_fp_arith_ST0_FT0[8] = {
1280
    helper_fadd_ST0_FT0,
1281
    helper_fmul_ST0_FT0,
1282
    helper_fcom_ST0_FT0,
1283
    helper_fcom_ST0_FT0,
1284
    helper_fsub_ST0_FT0,
1285
    helper_fsubr_ST0_FT0,
1286
    helper_fdiv_ST0_FT0,
1287
    helper_fdivr_ST0_FT0,
1288
};
1289

    
1290
/* NOTE the exception in "r" op ordering */
1291
static void *helper_fp_arith_STN_ST0[8] = {
1292
    helper_fadd_STN_ST0,
1293
    helper_fmul_STN_ST0,
1294
    NULL,
1295
    NULL,
1296
    helper_fsubr_STN_ST0,
1297
    helper_fsub_STN_ST0,
1298
    helper_fdivr_STN_ST0,
1299
    helper_fdiv_STN_ST0,
1300
};
1301

    
1302
/* if d == OR_TMP0, it means memory operand (address in A0) */
1303
static void gen_op(DisasContext *s1, int op, int ot, int d)
1304
{
1305
    if (d != OR_TMP0) {
1306
        gen_op_mov_TN_reg(ot, 0, d);
1307
    } else {
1308
        gen_op_ld_T0_A0(ot + s1->mem_index);
1309
    }
1310
    switch(op) {
1311
    case OP_ADCL:
1312
        if (s1->cc_op != CC_OP_DYNAMIC)
1313
            gen_op_set_cc_op(s1->cc_op);
1314
        gen_compute_eflags_c(cpu_tmp4);
1315
        tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1316
        tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1317
        if (d != OR_TMP0)
1318
            gen_op_mov_reg_T0(ot, d);
1319
        else
1320
            gen_op_st_T0_A0(ot + s1->mem_index);
1321
        tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1322
        tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1323
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1324
        tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1325
        tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1326
        s1->cc_op = CC_OP_DYNAMIC;
1327
        break;
1328
    case OP_SBBL:
1329
        if (s1->cc_op != CC_OP_DYNAMIC)
1330
            gen_op_set_cc_op(s1->cc_op);
1331
        gen_compute_eflags_c(cpu_tmp4);
1332
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1333
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1334
        if (d != OR_TMP0)
1335
            gen_op_mov_reg_T0(ot, d);
1336
        else
1337
            gen_op_st_T0_A0(ot + s1->mem_index);
1338
        tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1339
        tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1340
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1341
        tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1342
        tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1343
        s1->cc_op = CC_OP_DYNAMIC;
1344
        break;
1345
    case OP_ADDL:
1346
        gen_op_addl_T0_T1();
1347
        if (d != OR_TMP0)
1348
            gen_op_mov_reg_T0(ot, d);
1349
        else
1350
            gen_op_st_T0_A0(ot + s1->mem_index);
1351
        gen_op_update2_cc();
1352
        s1->cc_op = CC_OP_ADDB + ot;
1353
        break;
1354
    case OP_SUBL:
1355
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1356
        if (d != OR_TMP0)
1357
            gen_op_mov_reg_T0(ot, d);
1358
        else
1359
            gen_op_st_T0_A0(ot + s1->mem_index);
1360
        gen_op_update2_cc();
1361
        s1->cc_op = CC_OP_SUBB + ot;
1362
        break;
1363
    default:
1364
    case OP_ANDL:
1365
        tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1366
        if (d != OR_TMP0)
1367
            gen_op_mov_reg_T0(ot, d);
1368
        else
1369
            gen_op_st_T0_A0(ot + s1->mem_index);
1370
        gen_op_update1_cc();
1371
        s1->cc_op = CC_OP_LOGICB + ot;
1372
        break;
1373
    case OP_ORL:
1374
        tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1375
        if (d != OR_TMP0)
1376
            gen_op_mov_reg_T0(ot, d);
1377
        else
1378
            gen_op_st_T0_A0(ot + s1->mem_index);
1379
        gen_op_update1_cc();
1380
        s1->cc_op = CC_OP_LOGICB + ot;
1381
        break;
1382
    case OP_XORL:
1383
        tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1384
        if (d != OR_TMP0)
1385
            gen_op_mov_reg_T0(ot, d);
1386
        else
1387
            gen_op_st_T0_A0(ot + s1->mem_index);
1388
        gen_op_update1_cc();
1389
        s1->cc_op = CC_OP_LOGICB + ot;
1390
        break;
1391
    case OP_CMPL:
1392
        gen_op_cmpl_T0_T1_cc();
1393
        s1->cc_op = CC_OP_SUBB + ot;
1394
        break;
1395
    }
1396
}
1397

    
1398
/* if d == OR_TMP0, it means memory operand (address in A0) */
1399
static void gen_inc(DisasContext *s1, int ot, int d, int c)
1400
{
1401
    if (d != OR_TMP0)
1402
        gen_op_mov_TN_reg(ot, 0, d);
1403
    else
1404
        gen_op_ld_T0_A0(ot + s1->mem_index);
1405
    if (s1->cc_op != CC_OP_DYNAMIC)
1406
        gen_op_set_cc_op(s1->cc_op);
1407
    if (c > 0) {
1408
        tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1409
        s1->cc_op = CC_OP_INCB + ot;
1410
    } else {
1411
        tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1412
        s1->cc_op = CC_OP_DECB + ot;
1413
    }
1414
    if (d != OR_TMP0)
1415
        gen_op_mov_reg_T0(ot, d);
1416
    else
1417
        gen_op_st_T0_A0(ot + s1->mem_index);
1418
    gen_compute_eflags_c(cpu_cc_src);
1419
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1420
}
1421

    
1422
static void gen_shift_rm_T1(DisasContext *s, int ot, int op1, 
1423
                            int is_right, int is_arith)
1424
{
1425
    target_ulong mask;
1426
    int shift_label;
1427
    TCGv t0, t1;
1428

    
1429
    if (ot == OT_QUAD)
1430
        mask = 0x3f;
1431
    else
1432
        mask = 0x1f;
1433

    
1434
    /* load */
1435
    if (op1 == OR_TMP0)
1436
        gen_op_ld_T0_A0(ot + s->mem_index);
1437
    else
1438
        gen_op_mov_TN_reg(ot, 0, op1);
1439

    
1440
    tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1441

    
1442
    tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1443

    
1444
    if (is_right) {
1445
        if (is_arith) {
1446
            gen_exts(ot, cpu_T[0]);
1447
            tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1448
            tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1449
        } else {
1450
            gen_extu(ot, cpu_T[0]);
1451
            tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1452
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1453
        }
1454
    } else {
1455
        tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1456
        tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1457
    }
1458

    
1459
    /* store */
1460
    if (op1 == OR_TMP0)
1461
        gen_op_st_T0_A0(ot + s->mem_index);
1462
    else
1463
        gen_op_mov_reg_T0(ot, op1);
1464
        
1465
    /* update eflags if non zero shift */
1466
    if (s->cc_op != CC_OP_DYNAMIC)
1467
        gen_op_set_cc_op(s->cc_op);
1468

    
1469
    /* XXX: inefficient */
1470
    t0 = tcg_temp_local_new(TCG_TYPE_TL);
1471
    t1 = tcg_temp_local_new(TCG_TYPE_TL);
1472

    
1473
    tcg_gen_mov_tl(t0, cpu_T[0]);
1474
    tcg_gen_mov_tl(t1, cpu_T3);
1475

    
1476
    shift_label = gen_new_label();
1477
    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1478

    
1479
    tcg_gen_mov_tl(cpu_cc_src, t1);
1480
    tcg_gen_mov_tl(cpu_cc_dst, t0);
1481
    if (is_right)
1482
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1483
    else
1484
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1485
        
1486
    gen_set_label(shift_label);
1487
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1488

    
1489
    tcg_temp_free(t0);
1490
    tcg_temp_free(t1);
1491
}
1492

    
1493
static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1494
                            int is_right, int is_arith)
1495
{
1496
    int mask;
1497
    
1498
    if (ot == OT_QUAD)
1499
        mask = 0x3f;
1500
    else
1501
        mask = 0x1f;
1502

    
1503
    /* load */
1504
    if (op1 == OR_TMP0)
1505
        gen_op_ld_T0_A0(ot + s->mem_index);
1506
    else
1507
        gen_op_mov_TN_reg(ot, 0, op1);
1508

    
1509
    op2 &= mask;
1510
    if (op2 != 0) {
1511
        if (is_right) {
1512
            if (is_arith) {
1513
                gen_exts(ot, cpu_T[0]);
1514
                tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1515
                tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1516
            } else {
1517
                gen_extu(ot, cpu_T[0]);
1518
                tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1519
                tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1520
            }
1521
        } else {
1522
            tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1523
            tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1524
        }
1525
    }
1526

    
1527
    /* store */
1528
    if (op1 == OR_TMP0)
1529
        gen_op_st_T0_A0(ot + s->mem_index);
1530
    else
1531
        gen_op_mov_reg_T0(ot, op1);
1532
        
1533
    /* update eflags if non zero shift */
1534
    if (op2 != 0) {
1535
        tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
1536
        tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1537
        if (is_right)
1538
            s->cc_op = CC_OP_SARB + ot;
1539
        else
1540
            s->cc_op = CC_OP_SHLB + ot;
1541
    }
1542
}
1543

    
1544
static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1545
{
1546
    if (arg2 >= 0)
1547
        tcg_gen_shli_tl(ret, arg1, arg2);
1548
    else
1549
        tcg_gen_shri_tl(ret, arg1, -arg2);
1550
}
1551

    
1552
/* XXX: add faster immediate case */
1553
static void gen_rot_rm_T1(DisasContext *s, int ot, int op1, 
1554
                          int is_right)
1555
{
1556
    target_ulong mask;
1557
    int label1, label2, data_bits;
1558
    TCGv t0, t1, t2, a0;
1559

    
1560
    /* XXX: inefficient, but we must use local temps */
1561
    t0 = tcg_temp_local_new(TCG_TYPE_TL);
1562
    t1 = tcg_temp_local_new(TCG_TYPE_TL);
1563
    t2 = tcg_temp_local_new(TCG_TYPE_TL);
1564
    a0 = tcg_temp_local_new(TCG_TYPE_TL);
1565

    
1566
    if (ot == OT_QUAD)
1567
        mask = 0x3f;
1568
    else
1569
        mask = 0x1f;
1570

    
1571
    /* load */
1572
    if (op1 == OR_TMP0) {
1573
        tcg_gen_mov_tl(a0, cpu_A0);
1574
        gen_op_ld_v(ot + s->mem_index, t0, a0);
1575
    } else {
1576
        gen_op_mov_v_reg(ot, t0, op1);
1577
    }
1578

    
1579
    tcg_gen_mov_tl(t1, cpu_T[1]);
1580

    
1581
    tcg_gen_andi_tl(t1, t1, mask);
1582

    
1583
    /* Must test zero case to avoid using undefined behaviour in TCG
1584
       shifts. */
1585
    label1 = gen_new_label();
1586
    tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
1587
    
1588
    if (ot <= OT_WORD)
1589
        tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
1590
    else
1591
        tcg_gen_mov_tl(cpu_tmp0, t1);
1592
    
1593
    gen_extu(ot, t0);
1594
    tcg_gen_mov_tl(t2, t0);
1595

    
1596
    data_bits = 8 << ot;
1597
    /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1598
       fix TCG definition) */
1599
    if (is_right) {
1600
        tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
1601
        tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1602
        tcg_gen_shl_tl(t0, t0, cpu_tmp0);
1603
    } else {
1604
        tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
1605
        tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1606
        tcg_gen_shr_tl(t0, t0, cpu_tmp0);
1607
    }
1608
    tcg_gen_or_tl(t0, t0, cpu_tmp4);
1609

    
1610
    gen_set_label(label1);
1611
    /* store */
1612
    if (op1 == OR_TMP0) {
1613
        gen_op_st_v(ot + s->mem_index, t0, a0);
1614
    } else {
1615
        gen_op_mov_reg_v(ot, op1, t0);
1616
    }
1617
    
1618
    /* update eflags */
1619
    if (s->cc_op != CC_OP_DYNAMIC)
1620
        gen_op_set_cc_op(s->cc_op);
1621

    
1622
    label2 = gen_new_label();
1623
    tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
1624

    
1625
    gen_compute_eflags(cpu_cc_src);
1626
    tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1627
    tcg_gen_xor_tl(cpu_tmp0, t2, t0);
1628
    tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1629
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1630
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1631
    if (is_right) {
1632
        tcg_gen_shri_tl(t0, t0, data_bits - 1);
1633
    }
1634
    tcg_gen_andi_tl(t0, t0, CC_C);
1635
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
1636
    
1637
    tcg_gen_discard_tl(cpu_cc_dst);
1638
    tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1639
        
1640
    gen_set_label(label2);
1641
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1642

    
1643
    tcg_temp_free(t0);
1644
    tcg_temp_free(t1);
1645
    tcg_temp_free(t2);
1646
    tcg_temp_free(a0);
1647
}
1648

    
1649
static void *helper_rotc[8] = {
1650
    helper_rclb,
1651
    helper_rclw,
1652
    helper_rcll,
1653
    X86_64_ONLY(helper_rclq),
1654
    helper_rcrb,
1655
    helper_rcrw,
1656
    helper_rcrl,
1657
    X86_64_ONLY(helper_rcrq),
1658
};
1659

    
1660
/* XXX: add faster immediate = 1 case */
1661
static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1, 
1662
                           int is_right)
1663
{
1664
    int label1;
1665

    
1666
    if (s->cc_op != CC_OP_DYNAMIC)
1667
        gen_op_set_cc_op(s->cc_op);
1668

    
1669
    /* load */
1670
    if (op1 == OR_TMP0)
1671
        gen_op_ld_T0_A0(ot + s->mem_index);
1672
    else
1673
        gen_op_mov_TN_reg(ot, 0, op1);
1674
    
1675
    tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
1676
                       cpu_T[0], cpu_T[0], cpu_T[1]);
1677
    /* store */
1678
    if (op1 == OR_TMP0)
1679
        gen_op_st_T0_A0(ot + s->mem_index);
1680
    else
1681
        gen_op_mov_reg_T0(ot, op1);
1682

    
1683
    /* update eflags */
1684
    label1 = gen_new_label();
1685
    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
1686

    
1687
    tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
1688
    tcg_gen_discard_tl(cpu_cc_dst);
1689
    tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1690
        
1691
    gen_set_label(label1);
1692
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1693
}
1694

    
1695
/* XXX: add faster immediate case */
1696
static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1, 
1697
                                int is_right)
1698
{
1699
    int label1, label2, data_bits;
1700
    target_ulong mask;
1701
    TCGv t0, t1, t2, a0;
1702

    
1703
    t0 = tcg_temp_local_new(TCG_TYPE_TL);
1704
    t1 = tcg_temp_local_new(TCG_TYPE_TL);
1705
    t2 = tcg_temp_local_new(TCG_TYPE_TL);
1706
    a0 = tcg_temp_local_new(TCG_TYPE_TL);
1707

    
1708
    if (ot == OT_QUAD)
1709
        mask = 0x3f;
1710
    else
1711
        mask = 0x1f;
1712

    
1713
    /* load */
1714
    if (op1 == OR_TMP0) {
1715
        tcg_gen_mov_tl(a0, cpu_A0);
1716
        gen_op_ld_v(ot + s->mem_index, t0, a0);
1717
    } else {
1718
        gen_op_mov_v_reg(ot, t0, op1);
1719
    }
1720

    
1721
    tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
1722

    
1723
    tcg_gen_mov_tl(t1, cpu_T[1]);
1724
    tcg_gen_mov_tl(t2, cpu_T3);
1725

    
1726
    /* Must test zero case to avoid using undefined behaviour in TCG
1727
       shifts. */
1728
    label1 = gen_new_label();
1729
    tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
1730
    
1731
    tcg_gen_addi_tl(cpu_tmp5, t2, -1);
1732
    if (ot == OT_WORD) {
1733
        /* Note: we implement the Intel behaviour for shift count > 16 */
1734
        if (is_right) {
1735
            tcg_gen_andi_tl(t0, t0, 0xffff);
1736
            tcg_gen_shli_tl(cpu_tmp0, t1, 16);
1737
            tcg_gen_or_tl(t0, t0, cpu_tmp0);
1738
            tcg_gen_ext32u_tl(t0, t0);
1739

    
1740
            tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
1741
            
1742
            /* only needed if count > 16, but a test would complicate */
1743
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
1744
            tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
1745

    
1746
            tcg_gen_shr_tl(t0, t0, t2);
1747

    
1748
            tcg_gen_or_tl(t0, t0, cpu_tmp0);
1749
        } else {
1750
            /* XXX: not optimal */
1751
            tcg_gen_andi_tl(t0, t0, 0xffff);
1752
            tcg_gen_shli_tl(t1, t1, 16);
1753
            tcg_gen_or_tl(t1, t1, t0);
1754
            tcg_gen_ext32u_tl(t1, t1);
1755
            
1756
            tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
1757
            tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
1758
            tcg_gen_shr_tl(cpu_tmp6, t1, cpu_tmp0);
1759
            tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
1760

    
1761
            tcg_gen_shl_tl(t0, t0, t2);
1762
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
1763
            tcg_gen_shr_tl(t1, t1, cpu_tmp5);
1764
            tcg_gen_or_tl(t0, t0, t1);
1765
        }
1766
    } else {
1767
        data_bits = 8 << ot;
1768
        if (is_right) {
1769
            if (ot == OT_LONG)
1770
                tcg_gen_ext32u_tl(t0, t0);
1771

    
1772
            tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
1773

    
1774
            tcg_gen_shr_tl(t0, t0, t2);
1775
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
1776
            tcg_gen_shl_tl(t1, t1, cpu_tmp5);
1777
            tcg_gen_or_tl(t0, t0, t1);
1778
            
1779
        } else {
1780
            if (ot == OT_LONG)
1781
                tcg_gen_ext32u_tl(t1, t1);
1782

    
1783
            tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
1784
            
1785
            tcg_gen_shl_tl(t0, t0, t2);
1786
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
1787
            tcg_gen_shr_tl(t1, t1, cpu_tmp5);
1788
            tcg_gen_or_tl(t0, t0, t1);
1789
        }
1790
    }
1791
    tcg_gen_mov_tl(t1, cpu_tmp4);
1792

    
1793
    gen_set_label(label1);
1794
    /* store */
1795
    if (op1 == OR_TMP0) {
1796
        gen_op_st_v(ot + s->mem_index, t0, a0);
1797
    } else {
1798
        gen_op_mov_reg_v(ot, op1, t0);
1799
    }
1800
    
1801
    /* update eflags */
1802
    if (s->cc_op != CC_OP_DYNAMIC)
1803
        gen_op_set_cc_op(s->cc_op);
1804

    
1805
    label2 = gen_new_label();
1806
    tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
1807

    
1808
    tcg_gen_mov_tl(cpu_cc_src, t1);
1809
    tcg_gen_mov_tl(cpu_cc_dst, t0);
1810
    if (is_right) {
1811
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1812
    } else {
1813
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1814
    }
1815
    gen_set_label(label2);
1816
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1817

    
1818
    tcg_temp_free(t0);
1819
    tcg_temp_free(t1);
1820
    tcg_temp_free(t2);
1821
    tcg_temp_free(a0);
1822
}
1823

    
1824
static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1825
{
1826
    if (s != OR_TMP1)
1827
        gen_op_mov_TN_reg(ot, 1, s);
1828
    switch(op) {
1829
    case OP_ROL:
1830
        gen_rot_rm_T1(s1, ot, d, 0);
1831
        break;
1832
    case OP_ROR:
1833
        gen_rot_rm_T1(s1, ot, d, 1);
1834
        break;
1835
    case OP_SHL:
1836
    case OP_SHL1:
1837
        gen_shift_rm_T1(s1, ot, d, 0, 0);
1838
        break;
1839
    case OP_SHR:
1840
        gen_shift_rm_T1(s1, ot, d, 1, 0);
1841
        break;
1842
    case OP_SAR:
1843
        gen_shift_rm_T1(s1, ot, d, 1, 1);
1844
        break;
1845
    case OP_RCL:
1846
        gen_rotc_rm_T1(s1, ot, d, 0);
1847
        break;
1848
    case OP_RCR:
1849
        gen_rotc_rm_T1(s1, ot, d, 1);
1850
        break;
1851
    }
1852
}
1853

    
1854
static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1855
{
1856
    switch(op) {
1857
    case OP_SHL:
1858
    case OP_SHL1:
1859
        gen_shift_rm_im(s1, ot, d, c, 0, 0);
1860
        break;
1861
    case OP_SHR:
1862
        gen_shift_rm_im(s1, ot, d, c, 1, 0);
1863
        break;
1864
    case OP_SAR:
1865
        gen_shift_rm_im(s1, ot, d, c, 1, 1);
1866
        break;
1867
    default:
1868
        /* currently not optimized */
1869
        gen_op_movl_T1_im(c);
1870
        gen_shift(s1, op, ot, d, OR_TMP1);
1871
        break;
1872
    }
1873
}
1874

    
1875
static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1876
{
1877
    target_long disp;
1878
    int havesib;
1879
    int base;
1880
    int index;
1881
    int scale;
1882
    int opreg;
1883
    int mod, rm, code, override, must_add_seg;
1884

    
1885
    override = s->override;
1886
    must_add_seg = s->addseg;
1887
    if (override >= 0)
1888
        must_add_seg = 1;
1889
    mod = (modrm >> 6) & 3;
1890
    rm = modrm & 7;
1891

    
1892
    if (s->aflag) {
1893

    
1894
        havesib = 0;
1895
        base = rm;
1896
        index = 0;
1897
        scale = 0;
1898

    
1899
        if (base == 4) {
1900
            havesib = 1;
1901
            code = ldub_code(s->pc++);
1902
            scale = (code >> 6) & 3;
1903
            index = ((code >> 3) & 7) | REX_X(s);
1904
            base = (code & 7);
1905
        }
1906
        base |= REX_B(s);
1907

    
1908
        switch (mod) {
1909
        case 0:
1910
            if ((base & 7) == 5) {
1911
                base = -1;
1912
                disp = (int32_t)ldl_code(s->pc);
1913
                s->pc += 4;
1914
                if (CODE64(s) && !havesib) {
1915
                    disp += s->pc + s->rip_offset;
1916
                }
1917
            } else {
1918
                disp = 0;
1919
            }
1920
            break;
1921
        case 1:
1922
            disp = (int8_t)ldub_code(s->pc++);
1923
            break;
1924
        default:
1925
        case 2:
1926
            disp = ldl_code(s->pc);
1927
            s->pc += 4;
1928
            break;
1929
        }
1930

    
1931
        if (base >= 0) {
1932
            /* for correct popl handling with esp */
1933
            if (base == 4 && s->popl_esp_hack)
1934
                disp += s->popl_esp_hack;
1935
#ifdef TARGET_X86_64
1936
            if (s->aflag == 2) {
1937
                gen_op_movq_A0_reg(base);
1938
                if (disp != 0) {
1939
                    gen_op_addq_A0_im(disp);
1940
                }
1941
            } else
1942
#endif
1943
            {
1944
                gen_op_movl_A0_reg(base);
1945
                if (disp != 0)
1946
                    gen_op_addl_A0_im(disp);
1947
            }
1948
        } else {
1949
#ifdef TARGET_X86_64
1950
            if (s->aflag == 2) {
1951
                gen_op_movq_A0_im(disp);
1952
            } else
1953
#endif
1954
            {
1955
                gen_op_movl_A0_im(disp);
1956
            }
1957
        }
1958
        /* XXX: index == 4 is always invalid */
1959
        if (havesib && (index != 4 || scale != 0)) {
1960
#ifdef TARGET_X86_64
1961
            if (s->aflag == 2) {
1962
                gen_op_addq_A0_reg_sN(scale, index);
1963
            } else
1964
#endif
1965
            {
1966
                gen_op_addl_A0_reg_sN(scale, index);
1967
            }
1968
        }
1969
        if (must_add_seg) {
1970
            if (override < 0) {
1971
                if (base == R_EBP || base == R_ESP)
1972
                    override = R_SS;
1973
                else
1974
                    override = R_DS;
1975
            }
1976
#ifdef TARGET_X86_64
1977
            if (s->aflag == 2) {
1978
                gen_op_addq_A0_seg(override);
1979
            } else
1980
#endif
1981
            {
1982
                gen_op_addl_A0_seg(override);
1983
            }
1984
        }
1985
    } else {
1986
        switch (mod) {
1987
        case 0:
1988
            if (rm == 6) {
1989
                disp = lduw_code(s->pc);
1990
                s->pc += 2;
1991
                gen_op_movl_A0_im(disp);
1992
                rm = 0; /* avoid SS override */
1993
                goto no_rm;
1994
            } else {
1995
                disp = 0;
1996
            }
1997
            break;
1998
        case 1:
1999
            disp = (int8_t)ldub_code(s->pc++);
2000
            break;
2001
        default:
2002
        case 2:
2003
            disp = lduw_code(s->pc);
2004
            s->pc += 2;
2005
            break;
2006
        }
2007
        switch(rm) {
2008
        case 0:
2009
            gen_op_movl_A0_reg(R_EBX);
2010
            gen_op_addl_A0_reg_sN(0, R_ESI);
2011
            break;
2012
        case 1:
2013
            gen_op_movl_A0_reg(R_EBX);
2014
            gen_op_addl_A0_reg_sN(0, R_EDI);
2015
            break;
2016
        case 2:
2017
            gen_op_movl_A0_reg(R_EBP);
2018
            gen_op_addl_A0_reg_sN(0, R_ESI);
2019
            break;
2020
        case 3:
2021
            gen_op_movl_A0_reg(R_EBP);
2022
            gen_op_addl_A0_reg_sN(0, R_EDI);
2023
            break;
2024
        case 4:
2025
            gen_op_movl_A0_reg(R_ESI);
2026
            break;
2027
        case 5:
2028
            gen_op_movl_A0_reg(R_EDI);
2029
            break;
2030
        case 6:
2031
            gen_op_movl_A0_reg(R_EBP);
2032
            break;
2033
        default:
2034
        case 7:
2035
            gen_op_movl_A0_reg(R_EBX);
2036
            break;
2037
        }
2038
        if (disp != 0)
2039
            gen_op_addl_A0_im(disp);
2040
        gen_op_andl_A0_ffff();
2041
    no_rm:
2042
        if (must_add_seg) {
2043
            if (override < 0) {
2044
                if (rm == 2 || rm == 3 || rm == 6)
2045
                    override = R_SS;
2046
                else
2047
                    override = R_DS;
2048
            }
2049
            gen_op_addl_A0_seg(override);
2050
        }
2051
    }
2052

    
2053
    opreg = OR_A0;
2054
    disp = 0;
2055
    *reg_ptr = opreg;
2056
    *offset_ptr = disp;
2057
}
2058

    
2059
static void gen_nop_modrm(DisasContext *s, int modrm)
2060
{
2061
    int mod, rm, base, code;
2062

    
2063
    mod = (modrm >> 6) & 3;
2064
    if (mod == 3)
2065
        return;
2066
    rm = modrm & 7;
2067

    
2068
    if (s->aflag) {
2069

    
2070
        base = rm;
2071

    
2072
        if (base == 4) {
2073
            code = ldub_code(s->pc++);
2074
            base = (code & 7);
2075
        }
2076

    
2077
        switch (mod) {
2078
        case 0:
2079
            if (base == 5) {
2080
                s->pc += 4;
2081
            }
2082
            break;
2083
        case 1:
2084
            s->pc++;
2085
            break;
2086
        default:
2087
        case 2:
2088
            s->pc += 4;
2089
            break;
2090
        }
2091
    } else {
2092
        switch (mod) {
2093
        case 0:
2094
            if (rm == 6) {
2095
                s->pc += 2;
2096
            }
2097
            break;
2098
        case 1:
2099
            s->pc++;
2100
            break;
2101
        default:
2102
        case 2:
2103
            s->pc += 2;
2104
            break;
2105
        }
2106
    }
2107
}
2108

    
2109
/* used for LEA and MOV AX, mem */
2110
static void gen_add_A0_ds_seg(DisasContext *s)
2111
{
2112
    int override, must_add_seg;
2113
    must_add_seg = s->addseg;
2114
    override = R_DS;
2115
    if (s->override >= 0) {
2116
        override = s->override;
2117
        must_add_seg = 1;
2118
    } else {
2119
        override = R_DS;
2120
    }
2121
    if (must_add_seg) {
2122
#ifdef TARGET_X86_64
2123
        if (CODE64(s)) {
2124
            gen_op_addq_A0_seg(override);
2125
        } else
2126
#endif
2127
        {
2128
            gen_op_addl_A0_seg(override);
2129
        }
2130
    }
2131
}
2132

    
2133
/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
2134
   OR_TMP0 */
2135
static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2136
{
2137
    int mod, rm, opreg, disp;
2138

    
2139
    mod = (modrm >> 6) & 3;
2140
    rm = (modrm & 7) | REX_B(s);
2141
    if (mod == 3) {
2142
        if (is_store) {
2143
            if (reg != OR_TMP0)
2144
                gen_op_mov_TN_reg(ot, 0, reg);
2145
            gen_op_mov_reg_T0(ot, rm);
2146
        } else {
2147
            gen_op_mov_TN_reg(ot, 0, rm);
2148
            if (reg != OR_TMP0)
2149
                gen_op_mov_reg_T0(ot, reg);
2150
        }
2151
    } else {
2152
        gen_lea_modrm(s, modrm, &opreg, &disp);
2153
        if (is_store) {
2154
            if (reg != OR_TMP0)
2155
                gen_op_mov_TN_reg(ot, 0, reg);
2156
            gen_op_st_T0_A0(ot + s->mem_index);
2157
        } else {
2158
            gen_op_ld_T0_A0(ot + s->mem_index);
2159
            if (reg != OR_TMP0)
2160
                gen_op_mov_reg_T0(ot, reg);
2161
        }
2162
    }
2163
}
2164

    
2165
static inline uint32_t insn_get(DisasContext *s, int ot)
2166
{
2167
    uint32_t ret;
2168

    
2169
    switch(ot) {
2170
    case OT_BYTE:
2171
        ret = ldub_code(s->pc);
2172
        s->pc++;
2173
        break;
2174
    case OT_WORD:
2175
        ret = lduw_code(s->pc);
2176
        s->pc += 2;
2177
        break;
2178
    default:
2179
    case OT_LONG:
2180
        ret = ldl_code(s->pc);
2181
        s->pc += 4;
2182
        break;
2183
    }
2184
    return ret;
2185
}
2186

    
2187
static inline int insn_const_size(unsigned int ot)
2188
{
2189
    if (ot <= OT_LONG)
2190
        return 1 << ot;
2191
    else
2192
        return 4;
2193
}
2194

    
2195
static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2196
{
2197
    TranslationBlock *tb;
2198
    target_ulong pc;
2199

    
2200
    pc = s->cs_base + eip;
2201
    tb = s->tb;
2202
    /* NOTE: we handle the case where the TB spans two pages here */
2203
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2204
        (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK))  {
2205
        /* jump to same page: we can use a direct jump */
2206
        tcg_gen_goto_tb(tb_num);
2207
        gen_jmp_im(eip);
2208
        tcg_gen_exit_tb((long)tb + tb_num);
2209
    } else {
2210
        /* jump to another page: currently not optimized */
2211
        gen_jmp_im(eip);
2212
        gen_eob(s);
2213
    }
2214
}
2215

    
2216
static inline void gen_jcc(DisasContext *s, int b,
2217
                           target_ulong val, target_ulong next_eip)
2218
{
2219
    int l1, l2, cc_op;
2220

    
2221
    cc_op = s->cc_op;
2222
    if (s->cc_op != CC_OP_DYNAMIC) {
2223
        gen_op_set_cc_op(s->cc_op);
2224
        s->cc_op = CC_OP_DYNAMIC;
2225
    }
2226
    if (s->jmp_opt) {
2227
        l1 = gen_new_label();
2228
        gen_jcc1(s, cc_op, b, l1);
2229
        
2230
        gen_goto_tb(s, 0, next_eip);
2231

    
2232
        gen_set_label(l1);
2233
        gen_goto_tb(s, 1, val);
2234
        s->is_jmp = 3;
2235
    } else {
2236

    
2237
        l1 = gen_new_label();
2238
        l2 = gen_new_label();
2239
        gen_jcc1(s, cc_op, b, l1);
2240

    
2241
        gen_jmp_im(next_eip);
2242
        tcg_gen_br(l2);
2243

    
2244
        gen_set_label(l1);
2245
        gen_jmp_im(val);
2246
        gen_set_label(l2);
2247
        gen_eob(s);
2248
    }
2249
}
2250

    
2251
static void gen_setcc(DisasContext *s, int b)
2252
{
2253
    int inv, jcc_op, l1;
2254
    TCGv t0;
2255

    
2256
    if (is_fast_jcc_case(s, b)) {
2257
        /* nominal case: we use a jump */
2258
        /* XXX: make it faster by adding new instructions in TCG */
2259
        t0 = tcg_temp_local_new(TCG_TYPE_TL);
2260
        tcg_gen_movi_tl(t0, 0);
2261
        l1 = gen_new_label();
2262
        gen_jcc1(s, s->cc_op, b ^ 1, l1);
2263
        tcg_gen_movi_tl(t0, 1);
2264
        gen_set_label(l1);
2265
        tcg_gen_mov_tl(cpu_T[0], t0);
2266
        tcg_temp_free(t0);
2267
    } else {
2268
        /* slow case: it is more efficient not to generate a jump,
2269
           although it is questionnable whether this optimization is
2270
           worth to */
2271
        inv = b & 1;
2272
        jcc_op = (b >> 1) & 7;
2273
        gen_setcc_slow_T0(s, jcc_op);
2274
        if (inv) {
2275
            tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2276
        }
2277
    }
2278
}
2279

    
2280
static inline void gen_op_movl_T0_seg(int seg_reg)
2281
{
2282
    tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
2283
                     offsetof(CPUX86State,segs[seg_reg].selector));
2284
}
2285

    
2286
static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2287
{
2288
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2289
    tcg_gen_st32_tl(cpu_T[0], cpu_env, 
2290
                    offsetof(CPUX86State,segs[seg_reg].selector));
2291
    tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2292
    tcg_gen_st_tl(cpu_T[0], cpu_env, 
2293
                  offsetof(CPUX86State,segs[seg_reg].base));
2294
}
2295

    
2296
/* move T0 to seg_reg and compute if the CPU state may change. Never
2297
   call this function with seg_reg == R_CS */
2298
static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2299
{
2300
    if (s->pe && !s->vm86) {
2301
        /* XXX: optimize by finding processor state dynamically */
2302
        if (s->cc_op != CC_OP_DYNAMIC)
2303
            gen_op_set_cc_op(s->cc_op);
2304
        gen_jmp_im(cur_eip);
2305
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2306
        tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2307
        /* abort translation because the addseg value may change or
2308
           because ss32 may change. For R_SS, translation must always
2309
           stop as a special handling must be done to disable hardware
2310
           interrupts for the next instruction */
2311
        if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2312
            s->is_jmp = 3;
2313
    } else {
2314
        gen_op_movl_seg_T0_vm(seg_reg);
2315
        if (seg_reg == R_SS)
2316
            s->is_jmp = 3;
2317
    }
2318
}
2319

    
2320
static inline int svm_is_rep(int prefixes)
2321
{
2322
    return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2323
}
2324

    
2325
static inline void
2326
gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2327
                              uint32_t type, uint64_t param)
2328
{
2329
    /* no SVM activated; fast case */
2330
    if (likely(!(s->flags & HF_SVMI_MASK)))
2331
        return;
2332
    if (s->cc_op != CC_OP_DYNAMIC)
2333
        gen_op_set_cc_op(s->cc_op);
2334
    gen_jmp_im(pc_start - s->cs_base);
2335
    tcg_gen_helper_0_2(helper_svm_check_intercept_param, 
2336
                       tcg_const_i32(type), tcg_const_i64(param));
2337
}
2338

    
2339
static inline void
2340
gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2341
{
2342
    gen_svm_check_intercept_param(s, pc_start, type, 0);
2343
}
2344

    
2345
static inline void gen_stack_update(DisasContext *s, int addend)
2346
{
2347
#ifdef TARGET_X86_64
2348
    if (CODE64(s)) {
2349
        gen_op_add_reg_im(2, R_ESP, addend);
2350
    } else
2351
#endif
2352
    if (s->ss32) {
2353
        gen_op_add_reg_im(1, R_ESP, addend);
2354
    } else {
2355
        gen_op_add_reg_im(0, R_ESP, addend);
2356
    }
2357
}
2358

    
2359
/* generate a push. It depends on ss32, addseg and dflag */
2360
static void gen_push_T0(DisasContext *s)
2361
{
2362
#ifdef TARGET_X86_64
2363
    if (CODE64(s)) {
2364
        gen_op_movq_A0_reg(R_ESP);
2365
        if (s->dflag) {
2366
            gen_op_addq_A0_im(-8);
2367
            gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2368
        } else {
2369
            gen_op_addq_A0_im(-2);
2370
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2371
        }
2372
        gen_op_mov_reg_A0(2, R_ESP);
2373
    } else
2374
#endif
2375
    {
2376
        gen_op_movl_A0_reg(R_ESP);
2377
        if (!s->dflag)
2378
            gen_op_addl_A0_im(-2);
2379
        else
2380
            gen_op_addl_A0_im(-4);
2381
        if (s->ss32) {
2382
            if (s->addseg) {
2383
                tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2384
                gen_op_addl_A0_seg(R_SS);
2385
            }
2386
        } else {
2387
            gen_op_andl_A0_ffff();
2388
            tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2389
            gen_op_addl_A0_seg(R_SS);
2390
        }
2391
        gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2392
        if (s->ss32 && !s->addseg)
2393
            gen_op_mov_reg_A0(1, R_ESP);
2394
        else
2395
            gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2396
    }
2397
}
2398

    
2399
/* generate a push. It depends on ss32, addseg and dflag */
2400
/* slower version for T1, only used for call Ev */
2401
static void gen_push_T1(DisasContext *s)
2402
{
2403
#ifdef TARGET_X86_64
2404
    if (CODE64(s)) {
2405
        gen_op_movq_A0_reg(R_ESP);
2406
        if (s->dflag) {
2407
            gen_op_addq_A0_im(-8);
2408
            gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2409
        } else {
2410
            gen_op_addq_A0_im(-2);
2411
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2412
        }
2413
        gen_op_mov_reg_A0(2, R_ESP);
2414
    } else
2415
#endif
2416
    {
2417
        gen_op_movl_A0_reg(R_ESP);
2418
        if (!s->dflag)
2419
            gen_op_addl_A0_im(-2);
2420
        else
2421
            gen_op_addl_A0_im(-4);
2422
        if (s->ss32) {
2423
            if (s->addseg) {
2424
                gen_op_addl_A0_seg(R_SS);
2425
            }
2426
        } else {
2427
            gen_op_andl_A0_ffff();
2428
            gen_op_addl_A0_seg(R_SS);
2429
        }
2430
        gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2431

    
2432
        if (s->ss32 && !s->addseg)
2433
            gen_op_mov_reg_A0(1, R_ESP);
2434
        else
2435
            gen_stack_update(s, (-2) << s->dflag);
2436
    }
2437
}
2438

    
2439
/* two step pop is necessary for precise exceptions */
2440
static void gen_pop_T0(DisasContext *s)
2441
{
2442
#ifdef TARGET_X86_64
2443
    if (CODE64(s)) {
2444
        gen_op_movq_A0_reg(R_ESP);
2445
        gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2446
    } else
2447
#endif
2448
    {
2449
        gen_op_movl_A0_reg(R_ESP);
2450
        if (s->ss32) {
2451
            if (s->addseg)
2452
                gen_op_addl_A0_seg(R_SS);
2453
        } else {
2454
            gen_op_andl_A0_ffff();
2455
            gen_op_addl_A0_seg(R_SS);
2456
        }
2457
        gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2458
    }
2459
}
2460

    
2461
static void gen_pop_update(DisasContext *s)
2462
{
2463
#ifdef TARGET_X86_64
2464
    if (CODE64(s) && s->dflag) {
2465
        gen_stack_update(s, 8);
2466
    } else
2467
#endif
2468
    {
2469
        gen_stack_update(s, 2 << s->dflag);
2470
    }
2471
}
2472

    
2473
static void gen_stack_A0(DisasContext *s)
2474
{
2475
    gen_op_movl_A0_reg(R_ESP);
2476
    if (!s->ss32)
2477
        gen_op_andl_A0_ffff();
2478
    tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2479
    if (s->addseg)
2480
        gen_op_addl_A0_seg(R_SS);
2481
}
2482

    
2483
/* NOTE: wrap around in 16 bit not fully handled */
2484
static void gen_pusha(DisasContext *s)
2485
{
2486
    int i;
2487
    gen_op_movl_A0_reg(R_ESP);
2488
    gen_op_addl_A0_im(-16 <<  s->dflag);
2489
    if (!s->ss32)
2490
        gen_op_andl_A0_ffff();
2491
    tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2492
    if (s->addseg)
2493
        gen_op_addl_A0_seg(R_SS);
2494
    for(i = 0;i < 8; i++) {
2495
        gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2496
        gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2497
        gen_op_addl_A0_im(2 <<  s->dflag);
2498
    }
2499
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2500
}
2501

    
2502
/* NOTE: wrap around in 16 bit not fully handled */
2503
static void gen_popa(DisasContext *s)
2504
{
2505
    int i;
2506
    gen_op_movl_A0_reg(R_ESP);
2507
    if (!s->ss32)
2508
        gen_op_andl_A0_ffff();
2509
    tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2510
    tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 <<  s->dflag);
2511
    if (s->addseg)
2512
        gen_op_addl_A0_seg(R_SS);
2513
    for(i = 0;i < 8; i++) {
2514
        /* ESP is not reloaded */
2515
        if (i != 3) {
2516
            gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2517
            gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2518
        }
2519
        gen_op_addl_A0_im(2 <<  s->dflag);
2520
    }
2521
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2522
}
2523

    
2524
static void gen_enter(DisasContext *s, int esp_addend, int level)
2525
{
2526
    int ot, opsize;
2527

    
2528
    level &= 0x1f;
2529
#ifdef TARGET_X86_64
2530
    if (CODE64(s)) {
2531
        ot = s->dflag ? OT_QUAD : OT_WORD;
2532
        opsize = 1 << ot;
2533

    
2534
        gen_op_movl_A0_reg(R_ESP);
2535
        gen_op_addq_A0_im(-opsize);
2536
        tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2537

    
2538
        /* push bp */
2539
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2540
        gen_op_st_T0_A0(ot + s->mem_index);
2541
        if (level) {
2542
            /* XXX: must save state */
2543
            tcg_gen_helper_0_3(helper_enter64_level,
2544
                               tcg_const_i32(level),
2545
                               tcg_const_i32((ot == OT_QUAD)),
2546
                               cpu_T[1]);
2547
        }
2548
        gen_op_mov_reg_T1(ot, R_EBP);
2549
        tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2550
        gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2551
    } else
2552
#endif
2553
    {
2554
        ot = s->dflag + OT_WORD;
2555
        opsize = 2 << s->dflag;
2556

    
2557
        gen_op_movl_A0_reg(R_ESP);
2558
        gen_op_addl_A0_im(-opsize);
2559
        if (!s->ss32)
2560
            gen_op_andl_A0_ffff();
2561
        tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2562
        if (s->addseg)
2563
            gen_op_addl_A0_seg(R_SS);
2564
        /* push bp */
2565
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2566
        gen_op_st_T0_A0(ot + s->mem_index);
2567
        if (level) {
2568
            /* XXX: must save state */
2569
            tcg_gen_helper_0_3(helper_enter_level,
2570
                               tcg_const_i32(level),
2571
                               tcg_const_i32(s->dflag),
2572
                               cpu_T[1]);
2573
        }
2574
        gen_op_mov_reg_T1(ot, R_EBP);
2575
        tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2576
        gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2577
    }
2578
}
2579

    
2580
static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2581
{
2582
    if (s->cc_op != CC_OP_DYNAMIC)
2583
        gen_op_set_cc_op(s->cc_op);
2584
    gen_jmp_im(cur_eip);
2585
    tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
2586
    s->is_jmp = 3;
2587
}
2588

    
2589
/* an interrupt is different from an exception because of the
2590
   privilege checks */
2591
static void gen_interrupt(DisasContext *s, int intno,
2592
                          target_ulong cur_eip, target_ulong next_eip)
2593
{
2594
    if (s->cc_op != CC_OP_DYNAMIC)
2595
        gen_op_set_cc_op(s->cc_op);
2596
    gen_jmp_im(cur_eip);
2597
    tcg_gen_helper_0_2(helper_raise_interrupt, 
2598
                       tcg_const_i32(intno), 
2599
                       tcg_const_i32(next_eip - cur_eip));
2600
    s->is_jmp = 3;
2601
}
2602

    
2603
static void gen_debug(DisasContext *s, target_ulong cur_eip)
2604
{
2605
    if (s->cc_op != CC_OP_DYNAMIC)
2606
        gen_op_set_cc_op(s->cc_op);
2607
    gen_jmp_im(cur_eip);
2608
    tcg_gen_helper_0_0(helper_debug);
2609
    s->is_jmp = 3;
2610
}
2611

    
2612
/* generate a generic end of block. Trace exception is also generated
2613
   if needed */
2614
static void gen_eob(DisasContext *s)
2615
{
2616
    if (s->cc_op != CC_OP_DYNAMIC)
2617
        gen_op_set_cc_op(s->cc_op);
2618
    if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2619
        tcg_gen_helper_0_0(helper_reset_inhibit_irq);
2620
    }
2621
    if (s->singlestep_enabled) {
2622
        tcg_gen_helper_0_0(helper_debug);
2623
    } else if (s->tf) {
2624
        tcg_gen_helper_0_0(helper_single_step);
2625
    } else {
2626
        tcg_gen_exit_tb(0);
2627
    }
2628
    s->is_jmp = 3;
2629
}
2630

    
2631
/* generate a jump to eip. No segment change must happen before as a
2632
   direct call to the next block may occur */
2633
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2634
{
2635
    if (s->jmp_opt) {
2636
        if (s->cc_op != CC_OP_DYNAMIC) {
2637
            gen_op_set_cc_op(s->cc_op);
2638
            s->cc_op = CC_OP_DYNAMIC;
2639
        }
2640
        gen_goto_tb(s, tb_num, eip);
2641
        s->is_jmp = 3;
2642
    } else {
2643
        gen_jmp_im(eip);
2644
        gen_eob(s);
2645
    }
2646
}
2647

    
2648
static void gen_jmp(DisasContext *s, target_ulong eip)
2649
{
2650
    gen_jmp_tb(s, eip, 0);
2651
}
2652

    
2653
static inline void gen_ldq_env_A0(int idx, int offset)
2654
{
2655
    int mem_index = (idx >> 2) - 1;
2656
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2657
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
2658
}
2659

    
2660
static inline void gen_stq_env_A0(int idx, int offset)
2661
{
2662
    int mem_index = (idx >> 2) - 1;
2663
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2664
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2665
}
2666

    
2667
static inline void gen_ldo_env_A0(int idx, int offset)
2668
{
2669
    int mem_index = (idx >> 2) - 1;
2670
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2671
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2672
    tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2673
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2674
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2675
}
2676

    
2677
static inline void gen_sto_env_A0(int idx, int offset)
2678
{
2679
    int mem_index = (idx >> 2) - 1;
2680
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2681
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2682
    tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2683
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2684
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2685
}
2686

    
2687
static inline void gen_op_movo(int d_offset, int s_offset)
2688
{
2689
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2690
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2691
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2692
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
2693
}
2694

    
2695
static inline void gen_op_movq(int d_offset, int s_offset)
2696
{
2697
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2698
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2699
}
2700

    
2701
static inline void gen_op_movl(int d_offset, int s_offset)
2702
{
2703
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2704
    tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
2705
}
2706

    
2707
static inline void gen_op_movq_env_0(int d_offset)
2708
{
2709
    tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2710
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2711
}
2712

    
2713
#define SSE_SPECIAL ((void *)1)
2714
#define SSE_DUMMY ((void *)2)
2715

    
2716
#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2717
#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2718
                     helper_ ## x ## ss, helper_ ## x ## sd, }
2719

    
2720
static void *sse_op_table1[256][4] = {
2721
    /* 3DNow! extensions */
2722
    [0x0e] = { SSE_DUMMY }, /* femms */
2723
    [0x0f] = { SSE_DUMMY }, /* pf... */
2724
    /* pure SSE operations */
2725
    [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2726
    [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2727
    [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2728
    [0x13] = { SSE_SPECIAL, SSE_SPECIAL },  /* movlps, movlpd */
2729
    [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2730
    [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2731
    [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd, movshdup */
2732
    [0x17] = { SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd */
2733

    
2734
    [0x28] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2735
    [0x29] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2736
    [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2737
    [0x2b] = { SSE_SPECIAL, SSE_SPECIAL },  /* movntps, movntpd */
2738
    [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2739
    [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2740
    [0x2e] = { helper_ucomiss, helper_ucomisd },
2741
    [0x2f] = { helper_comiss, helper_comisd },
2742
    [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2743
    [0x51] = SSE_FOP(sqrt),
2744
    [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
2745
    [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
2746
    [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
2747
    [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
2748
    [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
2749
    [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
2750
    [0x58] = SSE_FOP(add),
2751
    [0x59] = SSE_FOP(mul),
2752
    [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
2753
               helper_cvtss2sd, helper_cvtsd2ss },
2754
    [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
2755
    [0x5c] = SSE_FOP(sub),
2756
    [0x5d] = SSE_FOP(min),
2757
    [0x5e] = SSE_FOP(div),
2758
    [0x5f] = SSE_FOP(max),
2759

    
2760
    [0xc2] = SSE_FOP(cmpeq),
2761
    [0xc6] = { helper_shufps, helper_shufpd },
2762

    
2763
    /* MMX ops and their SSE extensions */
2764
    [0x60] = MMX_OP2(punpcklbw),
2765
    [0x61] = MMX_OP2(punpcklwd),
2766
    [0x62] = MMX_OP2(punpckldq),
2767
    [0x63] = MMX_OP2(packsswb),
2768
    [0x64] = MMX_OP2(pcmpgtb),
2769
    [0x65] = MMX_OP2(pcmpgtw),
2770
    [0x66] = MMX_OP2(pcmpgtl),
2771
    [0x67] = MMX_OP2(packuswb),
2772
    [0x68] = MMX_OP2(punpckhbw),
2773
    [0x69] = MMX_OP2(punpckhwd),
2774
    [0x6a] = MMX_OP2(punpckhdq),
2775
    [0x6b] = MMX_OP2(packssdw),
2776
    [0x6c] = { NULL, helper_punpcklqdq_xmm },
2777
    [0x6d] = { NULL, helper_punpckhqdq_xmm },
2778
    [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2779
    [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2780
    [0x70] = { helper_pshufw_mmx,
2781
               helper_pshufd_xmm,
2782
               helper_pshufhw_xmm,
2783
               helper_pshuflw_xmm },
2784
    [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2785
    [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2786
    [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2787
    [0x74] = MMX_OP2(pcmpeqb),
2788
    [0x75] = MMX_OP2(pcmpeqw),
2789
    [0x76] = MMX_OP2(pcmpeql),
2790
    [0x77] = { SSE_DUMMY }, /* emms */
2791
    [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
2792
    [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
2793
    [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2794
    [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2795
    [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2796
    [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2797
    [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
2798
    [0xd1] = MMX_OP2(psrlw),
2799
    [0xd2] = MMX_OP2(psrld),
2800
    [0xd3] = MMX_OP2(psrlq),
2801
    [0xd4] = MMX_OP2(paddq),
2802
    [0xd5] = MMX_OP2(pmullw),
2803
    [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2804
    [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2805
    [0xd8] = MMX_OP2(psubusb),
2806
    [0xd9] = MMX_OP2(psubusw),
2807
    [0xda] = MMX_OP2(pminub),
2808
    [0xdb] = MMX_OP2(pand),
2809
    [0xdc] = MMX_OP2(paddusb),
2810
    [0xdd] = MMX_OP2(paddusw),
2811
    [0xde] = MMX_OP2(pmaxub),
2812
    [0xdf] = MMX_OP2(pandn),
2813
    [0xe0] = MMX_OP2(pavgb),
2814
    [0xe1] = MMX_OP2(psraw),
2815
    [0xe2] = MMX_OP2(psrad),
2816
    [0xe3] = MMX_OP2(pavgw),
2817
    [0xe4] = MMX_OP2(pmulhuw),
2818
    [0xe5] = MMX_OP2(pmulhw),
2819
    [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
2820
    [0xe7] = { SSE_SPECIAL , SSE_SPECIAL },  /* movntq, movntq */
2821
    [0xe8] = MMX_OP2(psubsb),
2822
    [0xe9] = MMX_OP2(psubsw),
2823
    [0xea] = MMX_OP2(pminsw),
2824
    [0xeb] = MMX_OP2(por),
2825
    [0xec] = MMX_OP2(paddsb),
2826
    [0xed] = MMX_OP2(paddsw),
2827
    [0xee] = MMX_OP2(pmaxsw),
2828
    [0xef] = MMX_OP2(pxor),
2829
    [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2830
    [0xf1] = MMX_OP2(psllw),
2831
    [0xf2] = MMX_OP2(pslld),
2832
    [0xf3] = MMX_OP2(psllq),
2833
    [0xf4] = MMX_OP2(pmuludq),
2834
    [0xf5] = MMX_OP2(pmaddwd),
2835
    [0xf6] = MMX_OP2(psadbw),
2836
    [0xf7] = MMX_OP2(maskmov),
2837
    [0xf8] = MMX_OP2(psubb),
2838
    [0xf9] = MMX_OP2(psubw),
2839
    [0xfa] = MMX_OP2(psubl),
2840
    [0xfb] = MMX_OP2(psubq),
2841
    [0xfc] = MMX_OP2(paddb),
2842
    [0xfd] = MMX_OP2(paddw),
2843
    [0xfe] = MMX_OP2(paddl),
2844
};
2845

    
2846
static void *sse_op_table2[3 * 8][2] = {
2847
    [0 + 2] = MMX_OP2(psrlw),
2848
    [0 + 4] = MMX_OP2(psraw),
2849
    [0 + 6] = MMX_OP2(psllw),
2850
    [8 + 2] = MMX_OP2(psrld),
2851
    [8 + 4] = MMX_OP2(psrad),
2852
    [8 + 6] = MMX_OP2(pslld),
2853
    [16 + 2] = MMX_OP2(psrlq),
2854
    [16 + 3] = { NULL, helper_psrldq_xmm },
2855
    [16 + 6] = MMX_OP2(psllq),
2856
    [16 + 7] = { NULL, helper_pslldq_xmm },
2857
};
2858

    
2859
static void *sse_op_table3[4 * 3] = {
2860
    helper_cvtsi2ss,
2861
    helper_cvtsi2sd,
2862
    X86_64_ONLY(helper_cvtsq2ss),
2863
    X86_64_ONLY(helper_cvtsq2sd),
2864

    
2865
    helper_cvttss2si,
2866
    helper_cvttsd2si,
2867
    X86_64_ONLY(helper_cvttss2sq),
2868
    X86_64_ONLY(helper_cvttsd2sq),
2869

    
2870
    helper_cvtss2si,
2871
    helper_cvtsd2si,
2872
    X86_64_ONLY(helper_cvtss2sq),
2873
    X86_64_ONLY(helper_cvtsd2sq),
2874
};
2875

    
2876
static void *sse_op_table4[8][4] = {
2877
    SSE_FOP(cmpeq),
2878
    SSE_FOP(cmplt),
2879
    SSE_FOP(cmple),
2880
    SSE_FOP(cmpunord),
2881
    SSE_FOP(cmpneq),
2882
    SSE_FOP(cmpnlt),
2883
    SSE_FOP(cmpnle),
2884
    SSE_FOP(cmpord),
2885
};
2886

    
2887
static void *sse_op_table5[256] = {
2888
    [0x0c] = helper_pi2fw,
2889
    [0x0d] = helper_pi2fd,
2890
    [0x1c] = helper_pf2iw,
2891
    [0x1d] = helper_pf2id,
2892
    [0x8a] = helper_pfnacc,
2893
    [0x8e] = helper_pfpnacc,
2894
    [0x90] = helper_pfcmpge,
2895
    [0x94] = helper_pfmin,
2896
    [0x96] = helper_pfrcp,
2897
    [0x97] = helper_pfrsqrt,
2898
    [0x9a] = helper_pfsub,
2899
    [0x9e] = helper_pfadd,
2900
    [0xa0] = helper_pfcmpgt,
2901
    [0xa4] = helper_pfmax,
2902
    [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2903
    [0xa7] = helper_movq, /* pfrsqit1 */
2904
    [0xaa] = helper_pfsubr,
2905
    [0xae] = helper_pfacc,
2906
    [0xb0] = helper_pfcmpeq,
2907
    [0xb4] = helper_pfmul,
2908
    [0xb6] = helper_movq, /* pfrcpit2 */
2909
    [0xb7] = helper_pmulhrw_mmx,
2910
    [0xbb] = helper_pswapd,
2911
    [0xbf] = helper_pavgb_mmx /* pavgusb */
2912
};
2913

    
2914
static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2915
{
2916
    int b1, op1_offset, op2_offset, is_xmm, val, ot;
2917
    int modrm, mod, rm, reg, reg_addr, offset_addr;
2918
    void *sse_op2;
2919

    
2920
    b &= 0xff;
2921
    if (s->prefix & PREFIX_DATA)
2922
        b1 = 1;
2923
    else if (s->prefix & PREFIX_REPZ)
2924
        b1 = 2;
2925
    else if (s->prefix & PREFIX_REPNZ)
2926
        b1 = 3;
2927
    else
2928
        b1 = 0;
2929
    sse_op2 = sse_op_table1[b][b1];
2930
    if (!sse_op2)
2931
        goto illegal_op;
2932
    if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2933
        is_xmm = 1;
2934
    } else {
2935
        if (b1 == 0) {
2936
            /* MMX case */
2937
            is_xmm = 0;
2938
        } else {
2939
            is_xmm = 1;
2940
        }
2941
    }
2942
    /* simple MMX/SSE operation */
2943
    if (s->flags & HF_TS_MASK) {
2944
        gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2945
        return;
2946
    }
2947
    if (s->flags & HF_EM_MASK) {
2948
    illegal_op:
2949
        gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2950
        return;
2951
    }
2952
    if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2953
        goto illegal_op;
2954
    if (b == 0x0e) {
2955
        if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
2956
            goto illegal_op;
2957
        /* femms */
2958
        tcg_gen_helper_0_0(helper_emms);
2959
        return;
2960
    }
2961
    if (b == 0x77) {
2962
        /* emms */
2963
        tcg_gen_helper_0_0(helper_emms);
2964
        return;
2965
    }
2966
    /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2967
       the static cpu state) */
2968
    if (!is_xmm) {
2969
        tcg_gen_helper_0_0(helper_enter_mmx);
2970
    }
2971

    
2972
    modrm = ldub_code(s->pc++);
2973
    reg = ((modrm >> 3) & 7);
2974
    if (is_xmm)
2975
        reg |= rex_r;
2976
    mod = (modrm >> 6) & 3;
2977
    if (sse_op2 == SSE_SPECIAL) {
2978
        b |= (b1 << 8);
2979
        switch(b) {
2980
        case 0x0e7: /* movntq */
2981
            if (mod == 3)
2982
                goto illegal_op;
2983
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2984
            gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2985
            break;
2986
        case 0x1e7: /* movntdq */
2987
        case 0x02b: /* movntps */
2988
        case 0x12b: /* movntps */
2989
        case 0x3f0: /* lddqu */
2990
            if (mod == 3)
2991
                goto illegal_op;
2992
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2993
            gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2994
            break;
2995
        case 0x6e: /* movd mm, ea */
2996
#ifdef TARGET_X86_64
2997
            if (s->dflag == 2) {
2998
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2999
                tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3000
            } else
3001
#endif
3002
            {
3003
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3004
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3005
                                 offsetof(CPUX86State,fpregs[reg].mmx));
3006
                tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
3007
            }
3008
            break;
3009
        case 0x16e: /* movd xmm, ea */
3010
#ifdef TARGET_X86_64
3011
            if (s->dflag == 2) {
3012
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3013
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3014
                                 offsetof(CPUX86State,xmm_regs[reg]));
3015
                tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3016
            } else
3017
#endif
3018
            {
3019
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3020
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3021
                                 offsetof(CPUX86State,xmm_regs[reg]));
3022
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3023
                tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3024
            }
3025
            break;
3026
        case 0x6f: /* movq mm, ea */
3027
            if (mod != 3) {
3028
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3029
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3030
            } else {
3031
                rm = (modrm & 7);
3032
                tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3033
                               offsetof(CPUX86State,fpregs[rm].mmx));
3034
                tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3035
                               offsetof(CPUX86State,fpregs[reg].mmx));
3036
            }
3037
            break;
3038
        case 0x010: /* movups */
3039
        case 0x110: /* movupd */
3040
        case 0x028: /* movaps */
3041
        case 0x128: /* movapd */
3042
        case 0x16f: /* movdqa xmm, ea */
3043
        case 0x26f: /* movdqu xmm, ea */
3044
            if (mod != 3) {
3045
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3046
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3047
            } else {
3048
                rm = (modrm & 7) | REX_B(s);
3049
                gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3050
                            offsetof(CPUX86State,xmm_regs[rm]));
3051
            }
3052
            break;
3053
        case 0x210: /* movss xmm, ea */
3054
            if (mod != 3) {
3055
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3056
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3057
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3058
                gen_op_movl_T0_0();
3059
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3060
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3061
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3062
            } else {
3063
                rm = (modrm & 7) | REX_B(s);
3064
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3065
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3066
            }
3067
            break;
3068
        case 0x310: /* movsd xmm, ea */
3069
            if (mod != 3) {
3070
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3071
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3072
                gen_op_movl_T0_0();
3073
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3074
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3075
            } else {
3076
                rm = (modrm & 7) | REX_B(s);
3077
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3078
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3079
            }
3080
            break;
3081
        case 0x012: /* movlps */
3082
        case 0x112: /* movlpd */
3083
            if (mod != 3) {
3084
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3085
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3086
            } else {
3087
                /* movhlps */
3088
                rm = (modrm & 7) | REX_B(s);
3089
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3090
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3091
            }
3092
            break;
3093
        case 0x212: /* movsldup */
3094
            if (mod != 3) {
3095
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3096
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3097
            } else {
3098
                rm = (modrm & 7) | REX_B(s);
3099
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3100
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3101
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3102
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3103
            }
3104
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3105
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3106
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3107
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3108
            break;
3109
        case 0x312: /* movddup */
3110
            if (mod != 3) {
3111
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3112
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3113
            } else {
3114
                rm = (modrm & 7) | REX_B(s);
3115
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3116
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3117
            }
3118
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3119
                        offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3120
            break;
3121
        case 0x016: /* movhps */
3122
        case 0x116: /* movhpd */
3123
            if (mod != 3) {
3124
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3125
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3126
            } else {
3127
                /* movlhps */
3128
                rm = (modrm & 7) | REX_B(s);
3129
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3130
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3131
            }
3132
            break;
3133
        case 0x216: /* movshdup */
3134
            if (mod != 3) {
3135
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3136
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3137
            } else {
3138
                rm = (modrm & 7) | REX_B(s);
3139
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3140
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3141
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3142
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3143
            }
3144
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3145
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3146
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3147
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3148
            break;
3149
        case 0x7e: /* movd ea, mm */
3150
#ifdef TARGET_X86_64
3151
            if (s->dflag == 2) {
3152
                tcg_gen_ld_i64(cpu_T[0], cpu_env, 
3153
                               offsetof(CPUX86State,fpregs[reg].mmx));
3154
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3155
            } else
3156
#endif
3157
            {
3158
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
3159
                                 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3160
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3161
            }
3162
            break;
3163
        case 0x17e: /* movd ea, xmm */
3164
#ifdef TARGET_X86_64
3165
            if (s->dflag == 2) {
3166
                tcg_gen_ld_i64(cpu_T[0], cpu_env, 
3167
                               offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3168
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3169
            } else
3170
#endif
3171
            {
3172
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
3173
                                 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3174
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3175
            }
3176
            break;
3177
        case 0x27e: /* movq xmm, ea */
3178
            if (mod != 3) {
3179
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3180
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3181
            } else {
3182
                rm = (modrm & 7) | REX_B(s);
3183
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3184
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3185
            }
3186
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3187
            break;
3188
        case 0x7f: /* movq ea, mm */
3189
            if (mod != 3) {
3190
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3191
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3192
            } else {
3193
                rm = (modrm & 7);
3194
                gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3195
                            offsetof(CPUX86State,fpregs[reg].mmx));
3196
            }
3197
            break;
3198
        case 0x011: /* movups */
3199
        case 0x111: /* movupd */
3200
        case 0x029: /* movaps */
3201
        case 0x129: /* movapd */
3202
        case 0x17f: /* movdqa ea, xmm */
3203
        case 0x27f: /* movdqu ea, xmm */
3204
            if (mod != 3) {
3205
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3206
                gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3207
            } else {
3208
                rm = (modrm & 7) | REX_B(s);
3209
                gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3210
                            offsetof(CPUX86State,xmm_regs[reg]));
3211
            }
3212
            break;
3213
        case 0x211: /* movss ea, xmm */
3214
            if (mod != 3) {
3215
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3216
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3217
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
3218
            } else {
3219
                rm = (modrm & 7) | REX_B(s);
3220
                gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3221
                            offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3222
            }
3223
            break;
3224
        case 0x311: /* movsd ea, xmm */
3225
            if (mod != 3) {
3226
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3227
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3228
            } else {
3229
                rm = (modrm & 7) | REX_B(s);
3230
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3231
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3232
            }
3233
            break;
3234
        case 0x013: /* movlps */
3235
        case 0x113: /* movlpd */
3236
            if (mod != 3) {
3237
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3238
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3239
            } else {
3240
                goto illegal_op;
3241
            }
3242
            break;
3243
        case 0x017: /* movhps */
3244
        case 0x117: /* movhpd */
3245
            if (mod != 3) {
3246
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3247
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3248
            } else {
3249
                goto illegal_op;
3250
            }
3251
            break;
3252
        case 0x71: /* shift mm, im */
3253
        case 0x72:
3254
        case 0x73:
3255
        case 0x171: /* shift xmm, im */
3256
        case 0x172:
3257
        case 0x173:
3258
            val = ldub_code(s->pc++);
3259
            if (is_xmm) {
3260
                gen_op_movl_T0_im(val);
3261
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3262
                gen_op_movl_T0_0();
3263
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3264
                op1_offset = offsetof(CPUX86State,xmm_t0);
3265
            } else {
3266
                gen_op_movl_T0_im(val);
3267
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3268
                gen_op_movl_T0_0();
3269
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3270
                op1_offset = offsetof(CPUX86State,mmx_t0);
3271
            }
3272
            sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3273
            if (!sse_op2)
3274
                goto illegal_op;
3275
            if (is_xmm) {
3276
                rm = (modrm & 7) | REX_B(s);
3277
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3278
            } else {
3279
                rm = (modrm & 7);
3280
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3281
            }
3282
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3283
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3284
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3285
            break;
3286
        case 0x050: /* movmskps */
3287
            rm = (modrm & 7) | REX_B(s);
3288
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3289
                             offsetof(CPUX86State,xmm_regs[rm]));
3290
            tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3291
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3292
            gen_op_mov_reg_T0(OT_LONG, reg);
3293
            break;
3294
        case 0x150: /* movmskpd */
3295
            rm = (modrm & 7) | REX_B(s);
3296
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3297
                             offsetof(CPUX86State,xmm_regs[rm]));
3298
            tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3299
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3300
            gen_op_mov_reg_T0(OT_LONG, reg);
3301
            break;
3302
        case 0x02a: /* cvtpi2ps */
3303
        case 0x12a: /* cvtpi2pd */
3304
            tcg_gen_helper_0_0(helper_enter_mmx);
3305
            if (mod != 3) {
3306
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3307
                op2_offset = offsetof(CPUX86State,mmx_t0);
3308
                gen_ldq_env_A0(s->mem_index, op2_offset);
3309
            } else {
3310
                rm = (modrm & 7);
3311
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3312
            }
3313
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3314
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3315
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3316
            switch(b >> 8) {
3317
            case 0x0:
3318
                tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3319
                break;
3320
            default:
3321
            case 0x1:
3322
                tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3323
                break;
3324
            }
3325
            break;
3326
        case 0x22a: /* cvtsi2ss */
3327
        case 0x32a: /* cvtsi2sd */
3328
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3329
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3330
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3331
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3332
            sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3333
            tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3334
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3335
            break;
3336
        case 0x02c: /* cvttps2pi */
3337
        case 0x12c: /* cvttpd2pi */
3338
        case 0x02d: /* cvtps2pi */
3339
        case 0x12d: /* cvtpd2pi */
3340
            tcg_gen_helper_0_0(helper_enter_mmx);
3341
            if (mod != 3) {
3342
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3343
                op2_offset = offsetof(CPUX86State,xmm_t0);
3344
                gen_ldo_env_A0(s->mem_index, op2_offset);
3345
            } else {
3346
                rm = (modrm & 7) | REX_B(s);
3347
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3348
            }
3349
            op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3350
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3351
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3352
            switch(b) {
3353
            case 0x02c:
3354
                tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3355
                break;
3356
            case 0x12c:
3357
                tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3358
                break;
3359
            case 0x02d:
3360
                tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3361
                break;
3362
            case 0x12d:
3363
                tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3364
                break;
3365
            }
3366
            break;
3367
        case 0x22c: /* cvttss2si */
3368
        case 0x32c: /* cvttsd2si */
3369
        case 0x22d: /* cvtss2si */
3370
        case 0x32d: /* cvtsd2si */
3371
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3372
            if (mod != 3) {
3373
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3374
                if ((b >> 8) & 1) {
3375
                    gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3376
                } else {
3377
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3378
                    tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3379
                }
3380
                op2_offset = offsetof(CPUX86State,xmm_t0);
3381
            } else {
3382
                rm = (modrm & 7) | REX_B(s);
3383
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3384
            }
3385
            sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3386
                                    (b & 1) * 4];
3387
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3388
            if (ot == OT_LONG) {
3389
                tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
3390
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3391
            } else {
3392
                tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3393
            }
3394
            gen_op_mov_reg_T0(ot, reg);
3395
            break;
3396
        case 0xc4: /* pinsrw */
3397
        case 0x1c4:
3398
            s->rip_offset = 1;
3399
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3400
            val = ldub_code(s->pc++);
3401
            if (b1) {
3402
                val &= 7;
3403
                tcg_gen_st16_tl(cpu_T[0], cpu_env,
3404
                                offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3405
            } else {
3406
                val &= 3;
3407
                tcg_gen_st16_tl(cpu_T[0], cpu_env,
3408
                                offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3409
            }
3410
            break;
3411
        case 0xc5: /* pextrw */
3412
        case 0x1c5:
3413
            if (mod != 3)
3414
                goto illegal_op;
3415
            val = ldub_code(s->pc++);
3416
            if (b1) {
3417
                val &= 7;
3418
                rm = (modrm & 7) | REX_B(s);
3419
                tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3420
                                 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3421
            } else {
3422
                val &= 3;
3423
                rm = (modrm & 7);
3424
                tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3425
                                offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3426
            }
3427
            reg = ((modrm >> 3) & 7) | rex_r;
3428
            gen_op_mov_reg_T0(OT_LONG, reg);
3429
            break;
3430
        case 0x1d6: /* movq ea, xmm */
3431
            if (mod != 3) {
3432
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3433
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3434
            } else {
3435
                rm = (modrm & 7) | REX_B(s);
3436
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3437
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3438
                gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3439
            }
3440
            break;
3441
        case 0x2d6: /* movq2dq */
3442
            tcg_gen_helper_0_0(helper_enter_mmx);
3443
            rm = (modrm & 7);
3444
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3445
                        offsetof(CPUX86State,fpregs[rm].mmx));
3446
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3447
            break;
3448
        case 0x3d6: /* movdq2q */
3449
            tcg_gen_helper_0_0(helper_enter_mmx);
3450
            rm = (modrm & 7) | REX_B(s);
3451
            gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3452
                        offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3453
            break;
3454
        case 0xd7: /* pmovmskb */
3455
        case 0x1d7:
3456
            if (mod != 3)
3457
                goto illegal_op;
3458
            if (b1) {
3459
                rm = (modrm & 7) | REX_B(s);
3460
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3461
                tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
3462
            } else {
3463
                rm = (modrm & 7);
3464
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3465
                tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
3466
            }
3467
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3468
            reg = ((modrm >> 3) & 7) | rex_r;
3469
            gen_op_mov_reg_T0(OT_LONG, reg);
3470
            break;
3471
        default:
3472
            goto illegal_op;
3473
        }
3474
    } else {
3475
        /* generic MMX or SSE operation */
3476
        switch(b) {
3477
        case 0x70: /* pshufx insn */
3478
        case 0xc6: /* pshufx insn */
3479
        case 0xc2: /* compare insns */
3480
            s->rip_offset = 1;
3481
            break;
3482
        default:
3483
            break;
3484
        }
3485
        if (is_xmm) {
3486
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3487
            if (mod != 3) {
3488
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3489
                op2_offset = offsetof(CPUX86State,xmm_t0);
3490
                if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3491
                                b == 0xc2)) {
3492
                    /* specific case for SSE single instructions */
3493
                    if (b1 == 2) {
3494
                        /* 32 bit access */
3495
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3496
                        tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3497
                    } else {
3498
                        /* 64 bit access */
3499
                        gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3500
                    }
3501
                } else {
3502
                    gen_ldo_env_A0(s->mem_index, op2_offset);
3503
                }
3504
            } else {
3505
                rm = (modrm & 7) | REX_B(s);
3506
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3507
            }
3508
        } else {
3509
            op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3510
            if (mod != 3) {
3511
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3512
                op2_offset = offsetof(CPUX86State,mmx_t0);
3513
                gen_ldq_env_A0(s->mem_index, op2_offset);
3514
            } else {
3515
                rm = (modrm & 7);
3516
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3517
            }
3518
        }
3519
        switch(b) {
3520
        case 0x0f: /* 3DNow! data insns */
3521
            if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3522
                goto illegal_op;
3523
            val = ldub_code(s->pc++);
3524
            sse_op2 = sse_op_table5[val];
3525
            if (!sse_op2)
3526
                goto illegal_op;
3527
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3528
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3529
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3530
            break;
3531
        case 0x70: /* pshufx insn */
3532
        case 0xc6: /* pshufx insn */
3533
            val = ldub_code(s->pc++);
3534
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3535
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3536
            tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3537
            break;
3538
        case 0xc2:
3539
            /* compare insns */
3540
            val = ldub_code(s->pc++);
3541
            if (val >= 8)
3542
                goto illegal_op;
3543
            sse_op2 = sse_op_table4[val][b1];
3544
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3545
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3546
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3547
            break;
3548
        case 0xf7:
3549
            /* maskmov : we must prepare A0 */
3550
            if (mod != 3)
3551
                goto illegal_op;
3552
#ifdef TARGET_X86_64
3553
            if (s->aflag == 2) {
3554
                gen_op_movq_A0_reg(R_EDI);
3555
            } else
3556
#endif
3557
            {
3558
                gen_op_movl_A0_reg(R_EDI);
3559
                if (s->aflag == 0)
3560
                    gen_op_andl_A0_ffff();
3561
            }
3562
            gen_add_A0_ds_seg(s);
3563

    
3564
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3565
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3566
            tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
3567
            break;
3568
        default:
3569
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3570
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3571
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3572
            break;
3573
        }
3574
        if (b == 0x2e || b == 0x2f) {
3575
            s->cc_op = CC_OP_EFLAGS;
3576
        }
3577
    }
3578
}
3579

    
3580
/* convert one instruction. s->is_jmp is set if the translation must
3581
   be stopped. Return the next pc value */
3582
static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3583
{
3584
    int b, prefixes, aflag, dflag;
3585
    int shift, ot;
3586
    int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3587
    target_ulong next_eip, tval;
3588
    int rex_w, rex_r;
3589

    
3590
    if (unlikely(loglevel & CPU_LOG_TB_OP))
3591
        tcg_gen_debug_insn_start(pc_start);
3592
    s->pc = pc_start;
3593
    prefixes = 0;
3594
    aflag = s->code32;
3595
    dflag = s->code32;
3596
    s->override = -1;
3597
    rex_w = -1;
3598
    rex_r = 0;
3599
#ifdef TARGET_X86_64
3600
    s->rex_x = 0;
3601
    s->rex_b = 0;
3602
    x86_64_hregs = 0;
3603
#endif
3604
    s->rip_offset = 0; /* for relative ip address */
3605
 next_byte:
3606
    b = ldub_code(s->pc);
3607
    s->pc++;
3608
    /* check prefixes */
3609
#ifdef TARGET_X86_64
3610
    if (CODE64(s)) {
3611
        switch (b) {
3612
        case 0xf3:
3613
            prefixes |= PREFIX_REPZ;
3614
            goto next_byte;
3615
        case 0xf2:
3616
            prefixes |= PREFIX_REPNZ;
3617
            goto next_byte;
3618
        case 0xf0:
3619
            prefixes |= PREFIX_LOCK;
3620
            goto next_byte;
3621
        case 0x2e:
3622
            s->override = R_CS;
3623
            goto next_byte;
3624
        case 0x36:
3625
            s->override = R_SS;
3626
            goto next_byte;
3627
        case 0x3e:
3628
            s->override = R_DS;
3629
            goto next_byte;
3630
        case 0x26:
3631
            s->override = R_ES;
3632
            goto next_byte;
3633
        case 0x64:
3634
            s->override = R_FS;
3635
            goto next_byte;
3636
        case 0x65:
3637
            s->override = R_GS;
3638
            goto next_byte;
3639
        case 0x66:
3640
            prefixes |= PREFIX_DATA;
3641
            goto next_byte;
3642
        case 0x67:
3643
            prefixes |= PREFIX_ADR;
3644
            goto next_byte;
3645
        case 0x40 ... 0x4f:
3646
            /* REX prefix */
3647
            rex_w = (b >> 3) & 1;
3648
            rex_r = (b & 0x4) << 1;
3649
            s->rex_x = (b & 0x2) << 2;
3650
            REX_B(s) = (b & 0x1) << 3;
3651
            x86_64_hregs = 1; /* select uniform byte register addressing */
3652
            goto next_byte;
3653
        }
3654
        if (rex_w == 1) {
3655
            /* 0x66 is ignored if rex.w is set */
3656
            dflag = 2;
3657
        } else {
3658
            if (prefixes & PREFIX_DATA)
3659
                dflag ^= 1;
3660
        }
3661
        if (!(prefixes & PREFIX_ADR))
3662
            aflag = 2;
3663
    } else
3664
#endif
3665
    {
3666
        switch (b) {
3667
        case 0xf3:
3668
            prefixes |= PREFIX_REPZ;
3669
            goto next_byte;
3670
        case 0xf2:
3671
            prefixes |= PREFIX_REPNZ;
3672
            goto next_byte;
3673
        case 0xf0:
3674
            prefixes |= PREFIX_LOCK;
3675
            goto next_byte;
3676
        case 0x2e:
3677
            s->override = R_CS;
3678
            goto next_byte;
3679
        case 0x36:
3680
            s->override = R_SS;
3681
            goto next_byte;
3682
        case 0x3e:
3683
            s->override = R_DS;
3684
            goto next_byte;
3685
        case 0x26:
3686
            s->override = R_ES;
3687
            goto next_byte;
3688
        case 0x64:
3689
            s->override = R_FS;
3690
            goto next_byte;
3691
        case 0x65:
3692
            s->override = R_GS;
3693
            goto next_byte;
3694
        case 0x66:
3695
            prefixes |= PREFIX_DATA;
3696
            goto next_byte;
3697
        case 0x67:
3698
            prefixes |= PREFIX_ADR;
3699
            goto next_byte;
3700
        }
3701
        if (prefixes & PREFIX_DATA)
3702
            dflag ^= 1;
3703
        if (prefixes & PREFIX_ADR)
3704
            aflag ^= 1;
3705
    }
3706

    
3707
    s->prefix = prefixes;
3708
    s->aflag = aflag;
3709
    s->dflag = dflag;
3710

    
3711
    /* lock generation */
3712
    if (prefixes & PREFIX_LOCK)
3713
        tcg_gen_helper_0_0(helper_lock);
3714

    
3715
    /* now check op code */
3716
 reswitch:
3717
    switch(b) {
3718
    case 0x0f:
3719
        /**************************/
3720
        /* extended op code */
3721
        b = ldub_code(s->pc++) | 0x100;
3722
        goto reswitch;
3723

    
3724
        /**************************/
3725
        /* arith & logic */
3726
    case 0x00 ... 0x05:
3727
    case 0x08 ... 0x0d:
3728
    case 0x10 ... 0x15:
3729
    case 0x18 ... 0x1d:
3730
    case 0x20 ... 0x25:
3731
    case 0x28 ... 0x2d:
3732
    case 0x30 ... 0x35:
3733
    case 0x38 ... 0x3d:
3734
        {
3735
            int op, f, val;
3736
            op = (b >> 3) & 7;
3737
            f = (b >> 1) & 3;
3738

    
3739
            if ((b & 1) == 0)
3740
                ot = OT_BYTE;
3741
            else
3742
                ot = dflag + OT_WORD;
3743

    
3744
            switch(f) {
3745
            case 0: /* OP Ev, Gv */
3746
                modrm = ldub_code(s->pc++);
3747
                reg = ((modrm >> 3) & 7) | rex_r;
3748
                mod = (modrm >> 6) & 3;
3749
                rm = (modrm & 7) | REX_B(s);
3750
                if (mod != 3) {
3751
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3752
                    opreg = OR_TMP0;
3753
                } else if (op == OP_XORL && rm == reg) {
3754
                xor_zero:
3755
                    /* xor reg, reg optimisation */
3756
                    gen_op_movl_T0_0();
3757
                    s->cc_op = CC_OP_LOGICB + ot;
3758
                    gen_op_mov_reg_T0(ot, reg);
3759
                    gen_op_update1_cc();
3760
                    break;
3761
                } else {
3762
                    opreg = rm;
3763
                }
3764
                gen_op_mov_TN_reg(ot, 1, reg);
3765
                gen_op(s, op, ot, opreg);
3766
                break;
3767
            case 1: /* OP Gv, Ev */
3768
                modrm = ldub_code(s->pc++);
3769
                mod = (modrm >> 6) & 3;
3770
                reg = ((modrm >> 3) & 7) | rex_r;
3771
                rm = (modrm & 7) | REX_B(s);
3772
                if (mod != 3) {
3773
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3774
                    gen_op_ld_T1_A0(ot + s->mem_index);
3775
                } else if (op == OP_XORL && rm == reg) {
3776
                    goto xor_zero;
3777
                } else {
3778
                    gen_op_mov_TN_reg(ot, 1, rm);
3779
                }
3780
                gen_op(s, op, ot, reg);
3781
                break;
3782
            case 2: /* OP A, Iv */
3783
                val = insn_get(s, ot);
3784
                gen_op_movl_T1_im(val);
3785
                gen_op(s, op, ot, OR_EAX);
3786
                break;
3787
            }
3788
        }
3789
        break;
3790

    
3791
    case 0x80: /* GRP1 */
3792
    case 0x81:
3793
    case 0x82:
3794
    case 0x83:
3795
        {
3796
            int val;
3797

    
3798
            if ((b & 1) == 0)
3799
                ot = OT_BYTE;
3800
            else
3801
                ot = dflag + OT_WORD;
3802

    
3803
            modrm = ldub_code(s->pc++);
3804
            mod = (modrm >> 6) & 3;
3805
            rm = (modrm & 7) | REX_B(s);
3806
            op = (modrm >> 3) & 7;
3807

    
3808
            if (mod != 3) {
3809
                if (b == 0x83)
3810
                    s->rip_offset = 1;
3811
                else
3812
                    s->rip_offset = insn_const_size(ot);
3813
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3814
                opreg = OR_TMP0;
3815
            } else {
3816
                opreg = rm;
3817
            }
3818

    
3819
            switch(b) {
3820
            default:
3821
            case 0x80:
3822
            case 0x81:
3823
            case 0x82:
3824
                val = insn_get(s, ot);
3825
                break;
3826
            case 0x83:
3827
                val = (int8_t)insn_get(s, OT_BYTE);
3828
                break;
3829
            }
3830
            gen_op_movl_T1_im(val);
3831
            gen_op(s, op, ot, opreg);
3832
        }
3833
        break;
3834

    
3835
        /**************************/
3836
        /* inc, dec, and other misc arith */
3837
    case 0x40 ... 0x47: /* inc Gv */
3838
        ot = dflag ? OT_LONG : OT_WORD;
3839
        gen_inc(s, ot, OR_EAX + (b & 7), 1);
3840
        break;
3841
    case 0x48 ... 0x4f: /* dec Gv */
3842
        ot = dflag ? OT_LONG : OT_WORD;
3843
        gen_inc(s, ot, OR_EAX + (b & 7), -1);
3844
        break;
3845
    case 0xf6: /* GRP3 */
3846
    case 0xf7:
3847
        if ((b & 1) == 0)
3848
            ot = OT_BYTE;
3849
        else
3850
            ot = dflag + OT_WORD;
3851

    
3852
        modrm = ldub_code(s->pc++);
3853
        mod = (modrm >> 6) & 3;
3854
        rm = (modrm & 7) | REX_B(s);
3855
        op = (modrm >> 3) & 7;
3856
        if (mod != 3) {
3857
            if (op == 0)
3858
                s->rip_offset = insn_const_size(ot);
3859
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3860
            gen_op_ld_T0_A0(ot + s->mem_index);
3861
        } else {
3862
            gen_op_mov_TN_reg(ot, 0, rm);
3863
        }
3864

    
3865
        switch(op) {
3866
        case 0: /* test */
3867
            val = insn_get(s, ot);
3868
            gen_op_movl_T1_im(val);
3869
            gen_op_testl_T0_T1_cc();
3870
            s->cc_op = CC_OP_LOGICB + ot;
3871
            break;
3872
        case 2: /* not */
3873
            tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
3874
            if (mod != 3) {
3875
                gen_op_st_T0_A0(ot + s->mem_index);
3876
            } else {
3877
                gen_op_mov_reg_T0(ot, rm);
3878
            }
3879
            break;
3880
        case 3: /* neg */
3881
            tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
3882
            if (mod != 3) {
3883
                gen_op_st_T0_A0(ot + s->mem_index);
3884
            } else {
3885
                gen_op_mov_reg_T0(ot, rm);
3886
            }
3887
            gen_op_update_neg_cc();
3888
            s->cc_op = CC_OP_SUBB + ot;
3889
            break;
3890
        case 4: /* mul */
3891
            switch(ot) {
3892
            case OT_BYTE:
3893
                gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
3894
                tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
3895
                tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
3896
                /* XXX: use 32 bit mul which could be faster */
3897
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3898
                gen_op_mov_reg_T0(OT_WORD, R_EAX);
3899
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3900
                tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
3901
                s->cc_op = CC_OP_MULB;
3902
                break;
3903
            case OT_WORD:
3904
                gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
3905
                tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
3906
                tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
3907
                /* XXX: use 32 bit mul which could be faster */
3908
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3909
                gen_op_mov_reg_T0(OT_WORD, R_EAX);
3910
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3911
                tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
3912
                gen_op_mov_reg_T0(OT_WORD, R_EDX);
3913
                tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3914
                s->cc_op = CC_OP_MULW;
3915
                break;
3916
            default:
3917
            case OT_LONG:
3918
#ifdef TARGET_X86_64
3919
                gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3920
                tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
3921
                tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
3922
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3923
                gen_op_mov_reg_T0(OT_LONG, R_EAX);
3924
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3925
                tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
3926
                gen_op_mov_reg_T0(OT_LONG, R_EDX);
3927
                tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3928
#else
3929
                {
3930
                    TCGv t0, t1;
3931
                    t0 = tcg_temp_new(TCG_TYPE_I64);
3932
                    t1 = tcg_temp_new(TCG_TYPE_I64);
3933
                    gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3934
                    tcg_gen_extu_i32_i64(t0, cpu_T[0]);
3935
                    tcg_gen_extu_i32_i64(t1, cpu_T[1]);
3936
                    tcg_gen_mul_i64(t0, t0, t1);
3937
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3938
                    gen_op_mov_reg_T0(OT_LONG, R_EAX);
3939
                    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3940
                    tcg_gen_shri_i64(t0, t0, 32);
3941
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3942
                    gen_op_mov_reg_T0(OT_LONG, R_EDX);
3943
                    tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3944
                }
3945
#endif
3946
                s->cc_op = CC_OP_MULL;
3947
                break;
3948
#ifdef TARGET_X86_64
3949
            case OT_QUAD:
3950
                tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
3951
                s->cc_op = CC_OP_MULQ;
3952
                break;
3953
#endif
3954
            }
3955
            break;
3956
        case 5: /* imul */
3957
            switch(ot) {
3958
            case OT_BYTE:
3959
                gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
3960
                tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
3961
                tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
3962
                /* XXX: use 32 bit mul which could be faster */
3963
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3964
                gen_op_mov_reg_T0(OT_WORD, R_EAX);
3965
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3966
                tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
3967
                tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3968
                s->cc_op = CC_OP_MULB;
3969
                break;
3970
            case OT_WORD:
3971
                gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
3972
                tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
3973
                tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
3974
                /* XXX: use 32 bit mul which could be faster */
3975
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3976
                gen_op_mov_reg_T0(OT_WORD, R_EAX);
3977
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3978
                tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
3979
                tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3980
                tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
3981
                gen_op_mov_reg_T0(OT_WORD, R_EDX);
3982
                s->cc_op = CC_OP_MULW;
3983
                break;
3984
            default:
3985
            case OT_LONG:
3986
#ifdef TARGET_X86_64
3987
                gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3988
                tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
3989
                tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
3990
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3991
                gen_op_mov_reg_T0(OT_LONG, R_EAX);
3992
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3993
                tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
3994
                tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3995
                tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
3996
                gen_op_mov_reg_T0(OT_LONG, R_EDX);
3997
#else
3998
                {
3999
                    TCGv t0, t1;
4000
                    t0 = tcg_temp_new(TCG_TYPE_I64);
4001
                    t1 = tcg_temp_new(TCG_TYPE_I64);
4002
                    gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4003
                    tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4004
                    tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4005
                    tcg_gen_mul_i64(t0, t0, t1);
4006
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4007
                    gen_op_mov_reg_T0(OT_LONG, R_EAX);
4008
                    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4009
                    tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4010
                    tcg_gen_shri_i64(t0, t0, 32);
4011
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4012
                    gen_op_mov_reg_T0(OT_LONG, R_EDX);
4013
                    tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4014
                }
4015
#endif
4016
                s->cc_op = CC_OP_MULL;
4017
                break;
4018
#ifdef TARGET_X86_64
4019
            case OT_QUAD:
4020
                tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
4021
                s->cc_op = CC_OP_MULQ;
4022
                break;
4023
#endif
4024
            }
4025
            break;
4026
        case 6: /* div */
4027
            switch(ot) {
4028
            case OT_BYTE:
4029
                gen_jmp_im(pc_start - s->cs_base);
4030
                tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
4031
                break;
4032
            case OT_WORD:
4033
                gen_jmp_im(pc_start - s->cs_base);
4034
                tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
4035
                break;
4036
            default:
4037
            case OT_LONG:
4038
                gen_jmp_im(pc_start - s->cs_base);
4039
                tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
4040
                break;
4041
#ifdef TARGET_X86_64
4042
            case OT_QUAD:
4043
                gen_jmp_im(pc_start - s->cs_base);
4044
                tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
4045
                break;
4046
#endif
4047
            }
4048
            break;
4049
        case 7: /* idiv */
4050
            switch(ot) {
4051
            case OT_BYTE:
4052
                gen_jmp_im(pc_start - s->cs_base);
4053
                tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
4054
                break;
4055
            case OT_WORD:
4056
                gen_jmp_im(pc_start - s->cs_base);
4057
                tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
4058
                break;
4059
            default:
4060
            case OT_LONG:
4061
                gen_jmp_im(pc_start - s->cs_base);
4062
                tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
4063
                break;
4064
#ifdef TARGET_X86_64
4065
            case OT_QUAD:
4066
                gen_jmp_im(pc_start - s->cs_base);
4067
                tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
4068
                break;
4069
#endif
4070
            }
4071
            break;
4072
        default:
4073
            goto illegal_op;
4074
        }
4075
        break;
4076

    
4077
    case 0xfe: /* GRP4 */
4078
    case 0xff: /* GRP5 */
4079
        if ((b & 1) == 0)
4080
            ot = OT_BYTE;
4081
        else
4082
            ot = dflag + OT_WORD;
4083

    
4084
        modrm = ldub_code(s->pc++);
4085
        mod = (modrm >> 6) & 3;
4086
        rm = (modrm & 7) | REX_B(s);
4087
        op = (modrm >> 3) & 7;
4088
        if (op >= 2 && b == 0xfe) {
4089
            goto illegal_op;
4090
        }
4091
        if (CODE64(s)) {
4092
            if (op == 2 || op == 4) {
4093
                /* operand size for jumps is 64 bit */
4094
                ot = OT_QUAD;
4095
            } else if (op == 3 || op == 5) {
4096
                /* for call calls, the operand is 16 or 32 bit, even
4097
                   in long mode */
4098
                ot = dflag ? OT_LONG : OT_WORD;
4099
            } else if (op == 6) {
4100
                /* default push size is 64 bit */
4101
                ot = dflag ? OT_QUAD : OT_WORD;
4102
            }
4103
        }
4104
        if (mod != 3) {
4105
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4106
            if (op >= 2 && op != 3 && op != 5)
4107
                gen_op_ld_T0_A0(ot + s->mem_index);
4108
        } else {
4109
            gen_op_mov_TN_reg(ot, 0, rm);
4110
        }
4111

    
4112
        switch(op) {
4113
        case 0: /* inc Ev */
4114
            if (mod != 3)
4115
                opreg = OR_TMP0;
4116
            else
4117
                opreg = rm;
4118
            gen_inc(s, ot, opreg, 1);
4119
            break;
4120
        case 1: /* dec Ev */
4121
            if (mod != 3)
4122
                opreg = OR_TMP0;
4123
            else
4124
                opreg = rm;
4125
            gen_inc(s, ot, opreg, -1);
4126
            break;
4127
        case 2: /* call Ev */
4128
            /* XXX: optimize if memory (no 'and' is necessary) */
4129
            if (s->dflag == 0)
4130
                gen_op_andl_T0_ffff();
4131
            next_eip = s->pc - s->cs_base;
4132
            gen_movtl_T1_im(next_eip);
4133
            gen_push_T1(s);
4134
            gen_op_jmp_T0();
4135
            gen_eob(s);
4136
            break;
4137
        case 3: /* lcall Ev */
4138
            gen_op_ld_T1_A0(ot + s->mem_index);
4139
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4140
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4141
        do_lcall:
4142
            if (s->pe && !s->vm86) {
4143
                if (s->cc_op != CC_OP_DYNAMIC)
4144
                    gen_op_set_cc_op(s->cc_op);
4145
                gen_jmp_im(pc_start - s->cs_base);
4146
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4147
                tcg_gen_helper_0_4(helper_lcall_protected,
4148
                                   cpu_tmp2_i32, cpu_T[1],
4149
                                   tcg_const_i32(dflag), 
4150
                                   tcg_const_i32(s->pc - pc_start));
4151
            } else {
4152
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4153
                tcg_gen_helper_0_4(helper_lcall_real,
4154
                                   cpu_tmp2_i32, cpu_T[1],
4155
                                   tcg_const_i32(dflag), 
4156
                                   tcg_const_i32(s->pc - s->cs_base));
4157
            }
4158
            gen_eob(s);
4159
            break;
4160
        case 4: /* jmp Ev */
4161
            if (s->dflag == 0)
4162
                gen_op_andl_T0_ffff();
4163
            gen_op_jmp_T0();
4164
            gen_eob(s);
4165
            break;
4166
        case 5: /* ljmp Ev */
4167
            gen_op_ld_T1_A0(ot + s->mem_index);
4168
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4169
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4170
        do_ljmp:
4171
            if (s->pe && !s->vm86) {
4172
                if (s->cc_op != CC_OP_DYNAMIC)
4173
                    gen_op_set_cc_op(s->cc_op);
4174
                gen_jmp_im(pc_start - s->cs_base);
4175
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4176
                tcg_gen_helper_0_3(helper_ljmp_protected,
4177
                                   cpu_tmp2_i32,
4178
                                   cpu_T[1],
4179
                                   tcg_const_i32(s->pc - pc_start));
4180
            } else {
4181
                gen_op_movl_seg_T0_vm(R_CS);
4182
                gen_op_movl_T0_T1();
4183
                gen_op_jmp_T0();
4184
            }
4185
            gen_eob(s);
4186
            break;
4187
        case 6: /* push Ev */
4188
            gen_push_T0(s);
4189
            break;
4190
        default:
4191
            goto illegal_op;
4192
        }
4193
        break;
4194

    
4195
    case 0x84: /* test Ev, Gv */
4196
    case 0x85:
4197
        if ((b & 1) == 0)
4198
            ot = OT_BYTE;
4199
        else
4200
            ot = dflag + OT_WORD;
4201

    
4202
        modrm = ldub_code(s->pc++);
4203
        mod = (modrm >> 6) & 3;
4204
        rm = (modrm & 7) | REX_B(s);
4205
        reg = ((modrm >> 3) & 7) | rex_r;
4206

    
4207
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4208
        gen_op_mov_TN_reg(ot, 1, reg);
4209
        gen_op_testl_T0_T1_cc();
4210
        s->cc_op = CC_OP_LOGICB + ot;
4211
        break;
4212

    
4213
    case 0xa8: /* test eAX, Iv */
4214
    case 0xa9:
4215
        if ((b & 1) == 0)
4216
            ot = OT_BYTE;
4217
        else
4218
            ot = dflag + OT_WORD;
4219
        val = insn_get(s, ot);
4220

    
4221
        gen_op_mov_TN_reg(ot, 0, OR_EAX);
4222
        gen_op_movl_T1_im(val);
4223
        gen_op_testl_T0_T1_cc();
4224
        s->cc_op = CC_OP_LOGICB + ot;
4225
        break;
4226

    
4227
    case 0x98: /* CWDE/CBW */
4228
#ifdef TARGET_X86_64
4229
        if (dflag == 2) {
4230
            gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4231
            tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4232
            gen_op_mov_reg_T0(OT_QUAD, R_EAX);
4233
        } else
4234
#endif
4235
        if (dflag == 1) {
4236
            gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4237
            tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4238
            gen_op_mov_reg_T0(OT_LONG, R_EAX);
4239
        } else {
4240
            gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
4241
            tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4242
            gen_op_mov_reg_T0(OT_WORD, R_EAX);
4243
        }
4244
        break;
4245
    case 0x99: /* CDQ/CWD */
4246
#ifdef TARGET_X86_64
4247
        if (dflag == 2) {
4248
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4249
            tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
4250
            gen_op_mov_reg_T0(OT_QUAD, R_EDX);
4251
        } else
4252
#endif
4253
        if (dflag == 1) {
4254
            gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4255
            tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4256
            tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
4257
            gen_op_mov_reg_T0(OT_LONG, R_EDX);
4258
        } else {
4259
            gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4260
            tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4261
            tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
4262
            gen_op_mov_reg_T0(OT_WORD, R_EDX);
4263
        }
4264
        break;
4265
    case 0x1af: /* imul Gv, Ev */
4266
    case 0x69: /* imul Gv, Ev, I */
4267
    case 0x6b:
4268
        ot = dflag + OT_WORD;
4269
        modrm = ldub_code(s->pc++);
4270
        reg = ((modrm >> 3) & 7) | rex_r;
4271
        if (b == 0x69)
4272
            s->rip_offset = insn_const_size(ot);
4273
        else if (b == 0x6b)
4274
            s->rip_offset = 1;
4275
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4276
        if (b == 0x69) {
4277
            val = insn_get(s, ot);
4278
            gen_op_movl_T1_im(val);
4279
        } else if (b == 0x6b) {
4280
            val = (int8_t)insn_get(s, OT_BYTE);
4281
            gen_op_movl_T1_im(val);
4282
        } else {
4283
            gen_op_mov_TN_reg(ot, 1, reg);
4284
        }
4285

    
4286
#ifdef TARGET_X86_64
4287
        if (ot == OT_QUAD) {
4288
            tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
4289
        } else
4290
#endif
4291
        if (ot == OT_LONG) {
4292
#ifdef TARGET_X86_64
4293
                tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4294
                tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4295
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4296
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4297
                tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4298
                tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4299
#else
4300
                {
4301
                    TCGv t0, t1;
4302
                    t0 = tcg_temp_new(TCG_TYPE_I64);
4303
                    t1 = tcg_temp_new(TCG_TYPE_I64);
4304
                    tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4305
                    tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4306
                    tcg_gen_mul_i64(t0, t0, t1);
4307
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4308
                    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4309
                    tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4310
                    tcg_gen_shri_i64(t0, t0, 32);
4311
                    tcg_gen_trunc_i64_i32(cpu_T[1], t0);
4312
                    tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
4313
                }
4314
#endif
4315
        } else {
4316
            tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4317
            tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4318
            /* XXX: use 32 bit mul which could be faster */
4319
            tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4320
            tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4321
            tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4322
            tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4323
        }
4324
        gen_op_mov_reg_T0(ot, reg);
4325
        s->cc_op = CC_OP_MULB + ot;
4326
        break;
4327
    case 0x1c0:
4328
    case 0x1c1: /* xadd Ev, Gv */
4329
        if ((b & 1) == 0)
4330
            ot = OT_BYTE;
4331
        else
4332
            ot = dflag + OT_WORD;
4333
        modrm = ldub_code(s->pc++);
4334
        reg = ((modrm >> 3) & 7) | rex_r;
4335
        mod = (modrm >> 6) & 3;
4336
        if (mod == 3) {
4337
            rm = (modrm & 7) | REX_B(s);
4338
            gen_op_mov_TN_reg(ot, 0, reg);
4339
            gen_op_mov_TN_reg(ot, 1, rm);
4340
            gen_op_addl_T0_T1();
4341
            gen_op_mov_reg_T1(ot, reg);
4342
            gen_op_mov_reg_T0(ot, rm);
4343
        } else {
4344
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4345
            gen_op_mov_TN_reg(ot, 0, reg);
4346
            gen_op_ld_T1_A0(ot + s->mem_index);
4347
            gen_op_addl_T0_T1();
4348
            gen_op_st_T0_A0(ot + s->mem_index);
4349
            gen_op_mov_reg_T1(ot, reg);
4350
        }
4351
        gen_op_update2_cc();
4352
        s->cc_op = CC_OP_ADDB + ot;
4353
        break;
4354
    case 0x1b0:
4355
    case 0x1b1: /* cmpxchg Ev, Gv */
4356
        {
4357
            int label1, label2;
4358
            TCGv t0, t1, t2, a0;
4359

    
4360
            if ((b & 1) == 0)
4361
                ot = OT_BYTE;
4362
            else
4363
                ot = dflag + OT_WORD;
4364
            modrm = ldub_code(s->pc++);
4365
            reg = ((modrm >> 3) & 7) | rex_r;
4366
            mod = (modrm >> 6) & 3;
4367
            t0 = tcg_temp_local_new(TCG_TYPE_TL);
4368
            t1 = tcg_temp_local_new(TCG_TYPE_TL);
4369
            t2 = tcg_temp_local_new(TCG_TYPE_TL);
4370
            a0 = tcg_temp_local_new(TCG_TYPE_TL);
4371
            gen_op_mov_v_reg(ot, t1, reg);
4372
            if (mod == 3) {
4373
                rm = (modrm & 7) | REX_B(s);
4374
                gen_op_mov_v_reg(ot, t0, rm);
4375
            } else {
4376
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4377
                tcg_gen_mov_tl(a0, cpu_A0);
4378
                gen_op_ld_v(ot + s->mem_index, t0, a0);
4379
                rm = 0; /* avoid warning */
4380
            }
4381
            label1 = gen_new_label();
4382
            tcg_gen_ld_tl(t2, cpu_env, offsetof(CPUState, regs[R_EAX]));
4383
            tcg_gen_sub_tl(t2, t2, t0);
4384
            gen_extu(ot, t2);
4385
            tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
4386
            if (mod == 3) {
4387
                label2 = gen_new_label();
4388
                gen_op_mov_reg_v(ot, R_EAX, t0);
4389
                tcg_gen_br(label2);
4390
                gen_set_label(label1);
4391
                gen_op_mov_reg_v(ot, rm, t1);
4392
                gen_set_label(label2);
4393
            } else {
4394
                tcg_gen_mov_tl(t1, t0);
4395
                gen_op_mov_reg_v(ot, R_EAX, t0);
4396
                gen_set_label(label1);
4397
                /* always store */
4398
                gen_op_st_v(ot + s->mem_index, t1, a0);
4399
            }
4400
            tcg_gen_mov_tl(cpu_cc_src, t0);
4401
            tcg_gen_mov_tl(cpu_cc_dst, t2);
4402
            s->cc_op = CC_OP_SUBB + ot;
4403
            tcg_temp_free(t0);
4404
            tcg_temp_free(t1);
4405
            tcg_temp_free(t2);
4406
            tcg_temp_free(a0);
4407
        }
4408
        break;
4409
    case 0x1c7: /* cmpxchg8b */
4410
        modrm = ldub_code(s->pc++);
4411
        mod = (modrm >> 6) & 3;
4412
        if ((mod == 3) || ((modrm & 0x38) != 0x8))
4413
            goto illegal_op;
4414
#ifdef TARGET_X86_64
4415
        if (dflag == 2) {
4416
            if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
4417
                goto illegal_op;
4418
            gen_jmp_im(pc_start - s->cs_base);
4419
            if (s->cc_op != CC_OP_DYNAMIC)
4420
                gen_op_set_cc_op(s->cc_op);
4421
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4422
            tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
4423
        } else
4424
#endif        
4425
        {
4426
            if (!(s->cpuid_features & CPUID_CX8))
4427
                goto illegal_op;
4428
            gen_jmp_im(pc_start - s->cs_base);
4429
            if (s->cc_op != CC_OP_DYNAMIC)
4430
                gen_op_set_cc_op(s->cc_op);
4431
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4432
            tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
4433
        }
4434
        s->cc_op = CC_OP_EFLAGS;
4435
        break;
4436

    
4437
        /**************************/
4438
        /* push/pop */
4439
    case 0x50 ... 0x57: /* push */
4440
        gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
4441
        gen_push_T0(s);
4442
        break;
4443
    case 0x58 ... 0x5f: /* pop */
4444
        if (CODE64(s)) {
4445
            ot = dflag ? OT_QUAD : OT_WORD;
4446
        } else {
4447
            ot = dflag + OT_WORD;
4448
        }
4449
        gen_pop_T0(s);
4450
        /* NOTE: order is important for pop %sp */
4451
        gen_pop_update(s);
4452
        gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
4453
        break;
4454
    case 0x60: /* pusha */
4455
        if (CODE64(s))
4456
            goto illegal_op;
4457
        gen_pusha(s);
4458
        break;
4459
    case 0x61: /* popa */
4460
        if (CODE64(s))
4461
            goto illegal_op;
4462
        gen_popa(s);
4463
        break;
4464
    case 0x68: /* push Iv */
4465
    case 0x6a:
4466
        if (CODE64(s)) {
4467
            ot = dflag ? OT_QUAD : OT_WORD;
4468
        } else {
4469
            ot = dflag + OT_WORD;
4470
        }
4471
        if (b == 0x68)
4472
            val = insn_get(s, ot);
4473
        else
4474
            val = (int8_t)insn_get(s, OT_BYTE);
4475
        gen_op_movl_T0_im(val);
4476
        gen_push_T0(s);
4477
        break;
4478
    case 0x8f: /* pop Ev */
4479
        if (CODE64(s)) {
4480
            ot = dflag ? OT_QUAD : OT_WORD;
4481
        } else {
4482
            ot = dflag + OT_WORD;
4483
        }
4484
        modrm = ldub_code(s->pc++);
4485
        mod = (modrm >> 6) & 3;
4486
        gen_pop_T0(s);
4487
        if (mod == 3) {
4488
            /* NOTE: order is important for pop %sp */
4489
            gen_pop_update(s);
4490
            rm = (modrm & 7) | REX_B(s);
4491
            gen_op_mov_reg_T0(ot, rm);
4492
        } else {
4493
            /* NOTE: order is important too for MMU exceptions */
4494
            s->popl_esp_hack = 1 << ot;
4495
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4496
            s->popl_esp_hack = 0;
4497
            gen_pop_update(s);
4498
        }
4499
        break;
4500
    case 0xc8: /* enter */
4501
        {
4502
            int level;
4503
            val = lduw_code(s->pc);
4504
            s->pc += 2;
4505
            level = ldub_code(s->pc++);
4506
            gen_enter(s, val, level);
4507
        }
4508
        break;
4509
    case 0xc9: /* leave */
4510
        /* XXX: exception not precise (ESP is updated before potential exception) */
4511
        if (CODE64(s)) {
4512
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4513
            gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4514
        } else if (s->ss32) {
4515
            gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4516
            gen_op_mov_reg_T0(OT_LONG, R_ESP);
4517
        } else {
4518
            gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4519
            gen_op_mov_reg_T0(OT_WORD, R_ESP);
4520
        }
4521
        gen_pop_T0(s);
4522
        if (CODE64(s)) {
4523
            ot = dflag ? OT_QUAD : OT_WORD;
4524
        } else {
4525
            ot = dflag + OT_WORD;
4526
        }
4527
        gen_op_mov_reg_T0(ot, R_EBP);
4528
        gen_pop_update(s);
4529
        break;
4530
    case 0x06: /* push es */
4531
    case 0x0e: /* push cs */
4532
    case 0x16: /* push ss */
4533
    case 0x1e: /* push ds */
4534
        if (CODE64(s))
4535
            goto illegal_op;
4536
        gen_op_movl_T0_seg(b >> 3);
4537
        gen_push_T0(s);
4538
        break;
4539
    case 0x1a0: /* push fs */
4540
    case 0x1a8: /* push gs */
4541
        gen_op_movl_T0_seg((b >> 3) & 7);
4542
        gen_push_T0(s);
4543
        break;
4544
    case 0x07: /* pop es */
4545
    case 0x17: /* pop ss */
4546
    case 0x1f: /* pop ds */
4547
        if (CODE64(s))
4548
            goto illegal_op;
4549
        reg = b >> 3;
4550
        gen_pop_T0(s);
4551
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4552
        gen_pop_update(s);
4553
        if (reg == R_SS) {
4554
            /* if reg == SS, inhibit interrupts/trace. */
4555
            /* If several instructions disable interrupts, only the
4556
               _first_ does it */
4557
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4558
                tcg_gen_helper_0_0(helper_set_inhibit_irq);
4559
            s->tf = 0;
4560
        }
4561
        if (s->is_jmp) {
4562
            gen_jmp_im(s->pc - s->cs_base);
4563
            gen_eob(s);
4564
        }
4565
        break;
4566
    case 0x1a1: /* pop fs */
4567
    case 0x1a9: /* pop gs */
4568
        gen_pop_T0(s);
4569
        gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4570
        gen_pop_update(s);
4571
        if (s->is_jmp) {
4572
            gen_jmp_im(s->pc - s->cs_base);
4573
            gen_eob(s);
4574
        }
4575
        break;
4576

    
4577
        /**************************/
4578
        /* mov */
4579
    case 0x88:
4580
    case 0x89: /* mov Gv, Ev */
4581
        if ((b & 1) == 0)
4582
            ot = OT_BYTE;
4583
        else
4584
            ot = dflag + OT_WORD;
4585
        modrm = ldub_code(s->pc++);
4586
        reg = ((modrm >> 3) & 7) | rex_r;
4587

    
4588
        /* generate a generic store */
4589
        gen_ldst_modrm(s, modrm, ot, reg, 1);
4590
        break;
4591
    case 0xc6:
4592
    case 0xc7: /* mov Ev, Iv */
4593
        if ((b & 1) == 0)
4594
            ot = OT_BYTE;
4595
        else
4596
            ot = dflag + OT_WORD;
4597
        modrm = ldub_code(s->pc++);
4598
        mod = (modrm >> 6) & 3;
4599
        if (mod != 3) {
4600
            s->rip_offset = insn_const_size(ot);
4601
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4602
        }
4603
        val = insn_get(s, ot);
4604
        gen_op_movl_T0_im(val);
4605
        if (mod != 3)
4606
            gen_op_st_T0_A0(ot + s->mem_index);
4607
        else
4608
            gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4609
        break;
4610
    case 0x8a:
4611
    case 0x8b: /* mov Ev, Gv */
4612
        if ((b & 1) == 0)
4613
            ot = OT_BYTE;
4614
        else
4615
            ot = OT_WORD + dflag;
4616
        modrm = ldub_code(s->pc++);
4617
        reg = ((modrm >> 3) & 7) | rex_r;
4618

    
4619
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4620
        gen_op_mov_reg_T0(ot, reg);
4621
        break;
4622
    case 0x8e: /* mov seg, Gv */
4623
        modrm = ldub_code(s->pc++);
4624
        reg = (modrm >> 3) & 7;
4625
        if (reg >= 6 || reg == R_CS)
4626
            goto illegal_op;
4627
        gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4628
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4629
        if (reg == R_SS) {
4630
            /* if reg == SS, inhibit interrupts/trace */
4631
            /* If several instructions disable interrupts, only the
4632
               _first_ does it */
4633
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4634
                tcg_gen_helper_0_0(helper_set_inhibit_irq);
4635
            s->tf = 0;
4636
        }
4637
        if (s->is_jmp) {
4638
            gen_jmp_im(s->pc - s->cs_base);
4639
            gen_eob(s);
4640
        }
4641
        break;
4642
    case 0x8c: /* mov Gv, seg */
4643
        modrm = ldub_code(s->pc++);
4644
        reg = (modrm >> 3) & 7;
4645
        mod = (modrm >> 6) & 3;
4646
        if (reg >= 6)
4647
            goto illegal_op;
4648
        gen_op_movl_T0_seg(reg);
4649
        if (mod == 3)
4650
            ot = OT_WORD + dflag;
4651
        else
4652
            ot = OT_WORD;
4653
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4654
        break;
4655

    
4656
    case 0x1b6: /* movzbS Gv, Eb */
4657
    case 0x1b7: /* movzwS Gv, Eb */
4658
    case 0x1be: /* movsbS Gv, Eb */
4659
    case 0x1bf: /* movswS Gv, Eb */
4660
        {
4661
            int d_ot;
4662
            /* d_ot is the size of destination */
4663
            d_ot = dflag + OT_WORD;
4664
            /* ot is the size of source */
4665
            ot = (b & 1) + OT_BYTE;
4666
            modrm = ldub_code(s->pc++);
4667
            reg = ((modrm >> 3) & 7) | rex_r;
4668
            mod = (modrm >> 6) & 3;
4669
            rm = (modrm & 7) | REX_B(s);
4670

    
4671
            if (mod == 3) {
4672
                gen_op_mov_TN_reg(ot, 0, rm);
4673
                switch(ot | (b & 8)) {
4674
                case OT_BYTE:
4675
                    tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4676
                    break;
4677
                case OT_BYTE | 8:
4678
                    tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4679
                    break;
4680
                case OT_WORD:
4681
                    tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4682
                    break;
4683
                default:
4684
                case OT_WORD | 8:
4685
                    tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4686
                    break;
4687
                }
4688
                gen_op_mov_reg_T0(d_ot, reg);
4689
            } else {
4690
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4691
                if (b & 8) {
4692
                    gen_op_lds_T0_A0(ot + s->mem_index);
4693
                } else {
4694
                    gen_op_ldu_T0_A0(ot + s->mem_index);
4695
                }
4696
                gen_op_mov_reg_T0(d_ot, reg);
4697
            }
4698
        }
4699
        break;
4700

    
4701
    case 0x8d: /* lea */
4702
        ot = dflag + OT_WORD;
4703
        modrm = ldub_code(s->pc++);
4704
        mod = (modrm >> 6) & 3;
4705
        if (mod == 3)
4706
            goto illegal_op;
4707
        reg = ((modrm >> 3) & 7) | rex_r;
4708
        /* we must ensure that no segment is added */
4709
        s->override = -1;
4710
        val = s->addseg;
4711
        s->addseg = 0;
4712
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4713
        s->addseg = val;
4714
        gen_op_mov_reg_A0(ot - OT_WORD, reg);
4715
        break;
4716

    
4717
    case 0xa0: /* mov EAX, Ov */
4718
    case 0xa1:
4719
    case 0xa2: /* mov Ov, EAX */
4720
    case 0xa3:
4721
        {
4722
            target_ulong offset_addr;
4723

    
4724
            if ((b & 1) == 0)
4725
                ot = OT_BYTE;
4726
            else
4727
                ot = dflag + OT_WORD;
4728
#ifdef TARGET_X86_64
4729
            if (s->aflag == 2) {
4730
                offset_addr = ldq_code(s->pc);
4731
                s->pc += 8;
4732
                gen_op_movq_A0_im(offset_addr);
4733
            } else
4734
#endif
4735
            {
4736
                if (s->aflag) {
4737
                    offset_addr = insn_get(s, OT_LONG);
4738
                } else {
4739
                    offset_addr = insn_get(s, OT_WORD);
4740
                }
4741
                gen_op_movl_A0_im(offset_addr);
4742
            }
4743
            gen_add_A0_ds_seg(s);
4744
            if ((b & 2) == 0) {
4745
                gen_op_ld_T0_A0(ot + s->mem_index);
4746
                gen_op_mov_reg_T0(ot, R_EAX);
4747
            } else {
4748
                gen_op_mov_TN_reg(ot, 0, R_EAX);
4749
                gen_op_st_T0_A0(ot + s->mem_index);
4750
            }
4751
        }
4752
        break;
4753
    case 0xd7: /* xlat */
4754
#ifdef TARGET_X86_64
4755
        if (s->aflag == 2) {
4756
            gen_op_movq_A0_reg(R_EBX);
4757
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4758
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4759
            tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4760
        } else
4761
#endif
4762
        {
4763
            gen_op_movl_A0_reg(R_EBX);
4764
            gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4765
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4766
            tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4767
            if (s->aflag == 0)
4768
                gen_op_andl_A0_ffff();
4769
            else
4770
                tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
4771
        }
4772
        gen_add_A0_ds_seg(s);
4773
        gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4774
        gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4775
        break;
4776
    case 0xb0 ... 0xb7: /* mov R, Ib */
4777
        val = insn_get(s, OT_BYTE);
4778
        gen_op_movl_T0_im(val);
4779
        gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4780
        break;
4781
    case 0xb8 ... 0xbf: /* mov R, Iv */
4782
#ifdef TARGET_X86_64
4783
        if (dflag == 2) {
4784
            uint64_t tmp;
4785
            /* 64 bit case */
4786
            tmp = ldq_code(s->pc);
4787
            s->pc += 8;
4788
            reg = (b & 7) | REX_B(s);
4789
            gen_movtl_T0_im(tmp);
4790
            gen_op_mov_reg_T0(OT_QUAD, reg);
4791
        } else
4792
#endif
4793
        {
4794
            ot = dflag ? OT_LONG : OT_WORD;
4795
            val = insn_get(s, ot);
4796
            reg = (b & 7) | REX_B(s);
4797
            gen_op_movl_T0_im(val);
4798
            gen_op_mov_reg_T0(ot, reg);
4799
        }
4800
        break;
4801

    
4802
    case 0x91 ... 0x97: /* xchg R, EAX */
4803
        ot = dflag + OT_WORD;
4804
        reg = (b & 7) | REX_B(s);
4805
        rm = R_EAX;
4806
        goto do_xchg_reg;
4807
    case 0x86:
4808
    case 0x87: /* xchg Ev, Gv */
4809
        if ((b & 1) == 0)
4810
            ot = OT_BYTE;
4811
        else
4812
            ot = dflag + OT_WORD;
4813
        modrm = ldub_code(s->pc++);
4814
        reg = ((modrm >> 3) & 7) | rex_r;
4815
        mod = (modrm >> 6) & 3;
4816
        if (mod == 3) {
4817
            rm = (modrm & 7) | REX_B(s);
4818
        do_xchg_reg:
4819
            gen_op_mov_TN_reg(ot, 0, reg);
4820
            gen_op_mov_TN_reg(ot, 1, rm);
4821
            gen_op_mov_reg_T0(ot, rm);
4822
            gen_op_mov_reg_T1(ot, reg);
4823
        } else {
4824
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4825
            gen_op_mov_TN_reg(ot, 0, reg);
4826
            /* for xchg, lock is implicit */
4827
            if (!(prefixes & PREFIX_LOCK))
4828
                tcg_gen_helper_0_0(helper_lock);
4829
            gen_op_ld_T1_A0(ot + s->mem_index);
4830
            gen_op_st_T0_A0(ot + s->mem_index);
4831
            if (!(prefixes & PREFIX_LOCK))
4832
                tcg_gen_helper_0_0(helper_unlock);
4833
            gen_op_mov_reg_T1(ot, reg);
4834
        }
4835
        break;
4836
    case 0xc4: /* les Gv */
4837
        if (CODE64(s))
4838
            goto illegal_op;
4839
        op = R_ES;
4840
        goto do_lxx;
4841
    case 0xc5: /* lds Gv */
4842
        if (CODE64(s))
4843
            goto illegal_op;
4844
        op = R_DS;
4845
        goto do_lxx;
4846
    case 0x1b2: /* lss Gv */
4847
        op = R_SS;
4848
        goto do_lxx;
4849
    case 0x1b4: /* lfs Gv */
4850
        op = R_FS;
4851
        goto do_lxx;
4852
    case 0x1b5: /* lgs Gv */
4853
        op = R_GS;
4854
    do_lxx:
4855
        ot = dflag ? OT_LONG : OT_WORD;
4856
        modrm = ldub_code(s->pc++);
4857
        reg = ((modrm >> 3) & 7) | rex_r;
4858
        mod = (modrm >> 6) & 3;
4859
        if (mod == 3)
4860
            goto illegal_op;
4861
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4862
        gen_op_ld_T1_A0(ot + s->mem_index);
4863
        gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4864
        /* load the segment first to handle exceptions properly */
4865
        gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4866
        gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4867
        /* then put the data */
4868
        gen_op_mov_reg_T1(ot, reg);
4869
        if (s->is_jmp) {
4870
            gen_jmp_im(s->pc - s->cs_base);
4871
            gen_eob(s);
4872
        }
4873
        break;
4874

    
4875
        /************************/
4876
        /* shifts */
4877
    case 0xc0:
4878
    case 0xc1:
4879
        /* shift Ev,Ib */
4880
        shift = 2;
4881
    grp2:
4882
        {
4883
            if ((b & 1) == 0)
4884
                ot = OT_BYTE;
4885
            else
4886
                ot = dflag + OT_WORD;
4887

    
4888
            modrm = ldub_code(s->pc++);
4889
            mod = (modrm >> 6) & 3;
4890
            op = (modrm >> 3) & 7;
4891

    
4892
            if (mod != 3) {
4893
                if (shift == 2) {
4894
                    s->rip_offset = 1;
4895
                }
4896
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4897
                opreg = OR_TMP0;
4898
            } else {
4899
                opreg = (modrm & 7) | REX_B(s);
4900
            }
4901

    
4902
            /* simpler op */
4903
            if (shift == 0) {
4904
                gen_shift(s, op, ot, opreg, OR_ECX);
4905
            } else {
4906
                if (shift == 2) {
4907
                    shift = ldub_code(s->pc++);
4908
                }
4909
                gen_shifti(s, op, ot, opreg, shift);
4910
            }
4911
        }
4912
        break;
4913
    case 0xd0:
4914
    case 0xd1:
4915
        /* shift Ev,1 */
4916
        shift = 1;
4917
        goto grp2;
4918
    case 0xd2:
4919
    case 0xd3:
4920
        /* shift Ev,cl */
4921
        shift = 0;
4922
        goto grp2;
4923

    
4924
    case 0x1a4: /* shld imm */
4925
        op = 0;
4926
        shift = 1;
4927
        goto do_shiftd;
4928
    case 0x1a5: /* shld cl */
4929
        op = 0;
4930
        shift = 0;
4931
        goto do_shiftd;
4932
    case 0x1ac: /* shrd imm */
4933
        op = 1;
4934
        shift = 1;
4935
        goto do_shiftd;
4936
    case 0x1ad: /* shrd cl */
4937
        op = 1;
4938
        shift = 0;
4939
    do_shiftd:
4940
        ot = dflag + OT_WORD;
4941
        modrm = ldub_code(s->pc++);
4942
        mod = (modrm >> 6) & 3;
4943
        rm = (modrm & 7) | REX_B(s);
4944
        reg = ((modrm >> 3) & 7) | rex_r;
4945
        if (mod != 3) {
4946
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4947
            opreg = OR_TMP0;
4948
        } else {
4949
            opreg = rm;
4950
        }
4951
        gen_op_mov_TN_reg(ot, 1, reg);
4952

    
4953
        if (shift) {
4954
            val = ldub_code(s->pc++);
4955
            tcg_gen_movi_tl(cpu_T3, val);
4956
        } else {
4957
            tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
4958
        }
4959
        gen_shiftd_rm_T1_T3(s, ot, opreg, op);
4960
        break;
4961

    
4962
        /************************/
4963
        /* floats */
4964
    case 0xd8 ... 0xdf:
4965
        if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4966
            /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4967
            /* XXX: what to do if illegal op ? */
4968
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4969
            break;
4970
        }
4971
        modrm = ldub_code(s->pc++);
4972
        mod = (modrm >> 6) & 3;
4973
        rm = modrm & 7;
4974
        op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4975
        if (mod != 3) {
4976
            /* memory op */
4977
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4978
            switch(op) {
4979
            case 0x00 ... 0x07: /* fxxxs */
4980
            case 0x10 ... 0x17: /* fixxxl */
4981
            case 0x20 ... 0x27: /* fxxxl */
4982
            case 0x30 ... 0x37: /* fixxx */
4983
                {
4984
                    int op1;
4985
                    op1 = op & 7;
4986

    
4987
                    switch(op >> 4) {
4988
                    case 0:
4989
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4990
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4991
                        tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
4992
                        break;
4993
                    case 1:
4994
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4995
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4996
                        tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4997
                        break;
4998
                    case 2:
4999
                        tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
5000
                                          (s->mem_index >> 2) - 1);
5001
                        tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
5002
                        break;
5003
                    case 3:
5004
                    default:
5005
                        gen_op_lds_T0_A0(OT_WORD + s->mem_index);
5006
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5007
                        tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
5008
                        break;
5009
                    }
5010

    
5011
                    tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5012
                    if (op1 == 3) {
5013
                        /* fcomp needs pop */
5014
                        tcg_gen_helper_0_0(helper_fpop);
5015
                    }
5016
                }
5017
                break;
5018
            case 0x08: /* flds */
5019
            case 0x0a: /* fsts */
5020
            case 0x0b: /* fstps */
5021
            case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
5022
            case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
5023
            case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
5024
                switch(op & 7) {
5025
                case 0:
5026
                    switch(op >> 4) {
5027
                    case 0:
5028
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5029
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5030
                        tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
5031
                        break;
5032
                    case 1:
5033
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5034
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5035
                        tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
5036
                        break;
5037
                    case 2:
5038
                        tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
5039
                                          (s->mem_index >> 2) - 1);
5040
                        tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
5041
                        break;
5042
                    case 3:
5043
                    default:
5044
                        gen_op_lds_T0_A0(OT_WORD + s->mem_index);
5045
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5046
                        tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
5047
                        break;
5048
                    }
5049
                    break;
5050
                case 1:
5051
                    /* XXX: the corresponding CPUID bit must be tested ! */
5052
                    switch(op >> 4) {
5053
                    case 1:
5054
                        tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
5055
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5056
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
5057
                        break;
5058
                    case 2:
5059
                        tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
5060
                        tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
5061
                                          (s->mem_index >> 2) - 1);
5062
                        break;
5063
                    case 3:
5064
                    default:
5065
                        tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
5066
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5067
                        gen_op_st_T0_A0(OT_WORD + s->mem_index);
5068
                        break;
5069
                    }
5070
                    tcg_gen_helper_0_0(helper_fpop);
5071
                    break;
5072
                default:
5073
                    switch(op >> 4) {
5074
                    case 0:
5075
                        tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
5076
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5077
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
5078
                        break;
5079
                    case 1:
5080
                        tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
5081
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5082
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
5083
                        break;
5084
                    case 2:
5085
                        tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
5086
                        tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
5087
                                          (s->mem_index >> 2) - 1);
5088
                        break;
5089
                    case 3:
5090
                    default:
5091
                        tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
5092
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5093
                        gen_op_st_T0_A0(OT_WORD + s->mem_index);
5094
                        break;
5095
                    }
5096
                    if ((op & 7) == 3)
5097
                        tcg_gen_helper_0_0(helper_fpop);
5098
                    break;
5099
                }
5100
                break;
5101
            case 0x0c: /* fldenv mem */
5102
                if (s->cc_op != CC_OP_DYNAMIC)
5103
                    gen_op_set_cc_op(s->cc_op);
5104
                gen_jmp_im(pc_start - s->cs_base);
5105
                tcg_gen_helper_0_2(helper_fldenv, 
5106
                                   cpu_A0, tcg_const_i32(s->dflag));
5107
                break;
5108
            case 0x0d: /* fldcw mem */
5109
                gen_op_ld_T0_A0(OT_WORD + s->mem_index);
5110
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5111
                tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
5112
                break;
5113
            case 0x0e: /* fnstenv mem */
5114
                if (s->cc_op != CC_OP_DYNAMIC)
5115
                    gen_op_set_cc_op(s->cc_op);
5116
                gen_jmp_im(pc_start - s->cs_base);
5117
                tcg_gen_helper_0_2(helper_fstenv,
5118
                                   cpu_A0, tcg_const_i32(s->dflag));
5119
                break;
5120
            case 0x0f: /* fnstcw mem */
5121
                tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
5122
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5123
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
5124
                break;
5125
            case 0x1d: /* fldt mem */
5126
                if (s->cc_op != CC_OP_DYNAMIC)
5127
                    gen_op_set_cc_op(s->cc_op);
5128
                gen_jmp_im(pc_start - s->cs_base);
5129
                tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
5130
                break;
5131
            case 0x1f: /* fstpt mem */
5132
                if (s->cc_op != CC_OP_DYNAMIC)
5133
                    gen_op_set_cc_op(s->cc_op);
5134
                gen_jmp_im(pc_start - s->cs_base);
5135
                tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
5136
                tcg_gen_helper_0_0(helper_fpop);
5137
                break;
5138
            case 0x2c: /* frstor mem */
5139
                if (s->cc_op != CC_OP_DYNAMIC)
5140
                    gen_op_set_cc_op(s->cc_op);
5141
                gen_jmp_im(pc_start - s->cs_base);
5142
                tcg_gen_helper_0_2(helper_frstor,
5143
                                   cpu_A0, tcg_const_i32(s->dflag));
5144
                break;
5145
            case 0x2e: /* fnsave mem */
5146
                if (s->cc_op != CC_OP_DYNAMIC)
5147
                    gen_op_set_cc_op(s->cc_op);
5148
                gen_jmp_im(pc_start - s->cs_base);
5149
                tcg_gen_helper_0_2(helper_fsave,
5150
                                   cpu_A0, tcg_const_i32(s->dflag));
5151
                break;
5152
            case 0x2f: /* fnstsw mem */
5153
                tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5154
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5155
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
5156
                break;
5157
            case 0x3c: /* fbld */
5158
                if (s->cc_op != CC_OP_DYNAMIC)
5159
                    gen_op_set_cc_op(s->cc_op);
5160
                gen_jmp_im(pc_start - s->cs_base);
5161
                tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
5162
                break;
5163
            case 0x3e: /* fbstp */
5164
                if (s->cc_op != CC_OP_DYNAMIC)
5165
                    gen_op_set_cc_op(s->cc_op);
5166
                gen_jmp_im(pc_start - s->cs_base);
5167
                tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
5168
                tcg_gen_helper_0_0(helper_fpop);
5169
                break;
5170
            case 0x3d: /* fildll */
5171
                tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
5172
                                  (s->mem_index >> 2) - 1);
5173
                tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
5174
                break;
5175
            case 0x3f: /* fistpll */
5176
                tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
5177
                tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
5178
                                  (s->mem_index >> 2) - 1);
5179
                tcg_gen_helper_0_0(helper_fpop);
5180
                break;
5181
            default:
5182
                goto illegal_op;
5183
            }
5184
        } else {
5185
            /* register float ops */
5186
            opreg = rm;
5187

    
5188
            switch(op) {
5189
            case 0x08: /* fld sti */
5190
                tcg_gen_helper_0_0(helper_fpush);
5191
                tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
5192
                break;
5193
            case 0x09: /* fxchg sti */
5194
            case 0x29: /* fxchg4 sti, undocumented op */
5195
            case 0x39: /* fxchg7 sti, undocumented op */
5196
                tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
5197
                break;
5198
            case 0x0a: /* grp d9/2 */
5199
                switch(rm) {
5200
                case 0: /* fnop */
5201
                    /* check exceptions (FreeBSD FPU probe) */
5202
                    if (s->cc_op != CC_OP_DYNAMIC)
5203
                        gen_op_set_cc_op(s->cc_op);
5204
                    gen_jmp_im(pc_start - s->cs_base);
5205
                    tcg_gen_helper_0_0(helper_fwait);
5206
                    break;
5207
                default:
5208
                    goto illegal_op;
5209
                }
5210
                break;
5211
            case 0x0c: /* grp d9/4 */
5212
                switch(rm) {
5213
                case 0: /* fchs */
5214
                    tcg_gen_helper_0_0(helper_fchs_ST0);
5215
                    break;
5216
                case 1: /* fabs */
5217
                    tcg_gen_helper_0_0(helper_fabs_ST0);
5218
                    break;
5219
                case 4: /* ftst */
5220
                    tcg_gen_helper_0_0(helper_fldz_FT0);
5221
                    tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5222
                    break;
5223
                case 5: /* fxam */
5224
                    tcg_gen_helper_0_0(helper_fxam_ST0);
5225
                    break;
5226
                default:
5227
                    goto illegal_op;
5228
                }
5229
                break;
5230
            case 0x0d: /* grp d9/5 */
5231
                {
5232
                    switch(rm) {
5233
                    case 0:
5234
                        tcg_gen_helper_0_0(helper_fpush);
5235
                        tcg_gen_helper_0_0(helper_fld1_ST0);
5236
                        break;
5237
                    case 1:
5238
                        tcg_gen_helper_0_0(helper_fpush);
5239
                        tcg_gen_helper_0_0(helper_fldl2t_ST0);
5240
                        break;
5241
                    case 2:
5242
                        tcg_gen_helper_0_0(helper_fpush);
5243
                        tcg_gen_helper_0_0(helper_fldl2e_ST0);
5244
                        break;
5245
                    case 3:
5246
                        tcg_gen_helper_0_0(helper_fpush);
5247
                        tcg_gen_helper_0_0(helper_fldpi_ST0);
5248
                        break;
5249
                    case 4:
5250
                        tcg_gen_helper_0_0(helper_fpush);
5251
                        tcg_gen_helper_0_0(helper_fldlg2_ST0);
5252
                        break;
5253
                    case 5:
5254
                        tcg_gen_helper_0_0(helper_fpush);
5255
                        tcg_gen_helper_0_0(helper_fldln2_ST0);
5256
                        break;
5257
                    case 6:
5258
                        tcg_gen_helper_0_0(helper_fpush);
5259
                        tcg_gen_helper_0_0(helper_fldz_ST0);
5260
                        break;
5261
                    default:
5262
                        goto illegal_op;
5263
                    }
5264
                }
5265
                break;
5266
            case 0x0e: /* grp d9/6 */
5267
                switch(rm) {
5268
                case 0: /* f2xm1 */
5269
                    tcg_gen_helper_0_0(helper_f2xm1);
5270
                    break;
5271
                case 1: /* fyl2x */
5272
                    tcg_gen_helper_0_0(helper_fyl2x);
5273
                    break;
5274
                case 2: /* fptan */
5275
                    tcg_gen_helper_0_0(helper_fptan);
5276
                    break;
5277
                case 3: /* fpatan */
5278
                    tcg_gen_helper_0_0(helper_fpatan);
5279
                    break;
5280
                case 4: /* fxtract */
5281
                    tcg_gen_helper_0_0(helper_fxtract);
5282
                    break;
5283
                case 5: /* fprem1 */
5284
                    tcg_gen_helper_0_0(helper_fprem1);
5285
                    break;
5286
                case 6: /* fdecstp */
5287
                    tcg_gen_helper_0_0(helper_fdecstp);
5288
                    break;
5289
                default:
5290
                case 7: /* fincstp */
5291
                    tcg_gen_helper_0_0(helper_fincstp);
5292
                    break;
5293
                }
5294
                break;
5295
            case 0x0f: /* grp d9/7 */
5296
                switch(rm) {
5297
                case 0: /* fprem */
5298
                    tcg_gen_helper_0_0(helper_fprem);
5299
                    break;
5300
                case 1: /* fyl2xp1 */
5301
                    tcg_gen_helper_0_0(helper_fyl2xp1);
5302
                    break;
5303
                case 2: /* fsqrt */
5304
                    tcg_gen_helper_0_0(helper_fsqrt);
5305
                    break;
5306
                case 3: /* fsincos */
5307
                    tcg_gen_helper_0_0(helper_fsincos);
5308
                    break;
5309
                case 5: /* fscale */
5310
                    tcg_gen_helper_0_0(helper_fscale);
5311
                    break;
5312
                case 4: /* frndint */
5313
                    tcg_gen_helper_0_0(helper_frndint);
5314
                    break;
5315
                case 6: /* fsin */
5316
                    tcg_gen_helper_0_0(helper_fsin);
5317
                    break;
5318
                default:
5319
                case 7: /* fcos */
5320
                    tcg_gen_helper_0_0(helper_fcos);
5321
                    break;
5322
                }
5323
                break;
5324
            case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5325
            case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5326
            case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5327
                {
5328
                    int op1;
5329

    
5330
                    op1 = op & 7;
5331
                    if (op >= 0x20) {
5332
                        tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
5333
                        if (op >= 0x30)
5334
                            tcg_gen_helper_0_0(helper_fpop);
5335
                    } else {
5336
                        tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5337
                        tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5338
                    }
5339
                }
5340
                break;
5341
            case 0x02: /* fcom */
5342
            case 0x22: /* fcom2, undocumented op */
5343
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5344
                tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5345
                break;
5346
            case 0x03: /* fcomp */
5347
            case 0x23: /* fcomp3, undocumented op */
5348
            case 0x32: /* fcomp5, undocumented op */
5349
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5350
                tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5351
                tcg_gen_helper_0_0(helper_fpop);
5352
                break;
5353
            case 0x15: /* da/5 */
5354
                switch(rm) {
5355
                case 1: /* fucompp */
5356
                    tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5357
                    tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5358
                    tcg_gen_helper_0_0(helper_fpop);
5359
                    tcg_gen_helper_0_0(helper_fpop);
5360
                    break;
5361
                default:
5362
                    goto illegal_op;
5363
                }
5364
                break;
5365
            case 0x1c:
5366
                switch(rm) {
5367
                case 0: /* feni (287 only, just do nop here) */
5368
                    break;
5369
                case 1: /* fdisi (287 only, just do nop here) */
5370
                    break;
5371
                case 2: /* fclex */
5372
                    tcg_gen_helper_0_0(helper_fclex);
5373
                    break;
5374
                case 3: /* fninit */
5375
                    tcg_gen_helper_0_0(helper_fninit);
5376
                    break;
5377
                case 4: /* fsetpm (287 only, just do nop here) */
5378
                    break;
5379
                default:
5380
                    goto illegal_op;
5381
                }
5382
                break;
5383
            case 0x1d: /* fucomi */
5384
                if (s->cc_op != CC_OP_DYNAMIC)
5385
                    gen_op_set_cc_op(s->cc_op);
5386
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5387
                tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5388
                s->cc_op = CC_OP_EFLAGS;
5389
                break;
5390
            case 0x1e: /* fcomi */
5391
                if (s->cc_op != CC_OP_DYNAMIC)
5392
                    gen_op_set_cc_op(s->cc_op);
5393
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5394
                tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5395
                s->cc_op = CC_OP_EFLAGS;
5396
                break;
5397
            case 0x28: /* ffree sti */
5398
                tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5399
                break;
5400
            case 0x2a: /* fst sti */
5401
                tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5402
                break;
5403
            case 0x2b: /* fstp sti */
5404
            case 0x0b: /* fstp1 sti, undocumented op */
5405
            case 0x3a: /* fstp8 sti, undocumented op */
5406
            case 0x3b: /* fstp9 sti, undocumented op */
5407
                tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5408
                tcg_gen_helper_0_0(helper_fpop);
5409
                break;
5410
            case 0x2c: /* fucom st(i) */
5411
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5412
                tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5413
                break;
5414
            case 0x2d: /* fucomp st(i) */
5415
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5416
                tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5417
                tcg_gen_helper_0_0(helper_fpop);
5418
                break;
5419
            case 0x33: /* de/3 */
5420
                switch(rm) {
5421
                case 1: /* fcompp */
5422
                    tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5423
                    tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5424
                    tcg_gen_helper_0_0(helper_fpop);
5425
                    tcg_gen_helper_0_0(helper_fpop);
5426
                    break;
5427
                default:
5428
                    goto illegal_op;
5429
                }
5430
                break;
5431
            case 0x38: /* ffreep sti, undocumented op */
5432
                tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5433
                tcg_gen_helper_0_0(helper_fpop);
5434
                break;
5435
            case 0x3c: /* df/4 */
5436
                switch(rm) {
5437
                case 0:
5438
                    tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5439
                    tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5440
                    gen_op_mov_reg_T0(OT_WORD, R_EAX);
5441
                    break;
5442
                default:
5443
                    goto illegal_op;
5444
                }
5445
                break;
5446
            case 0x3d: /* fucomip */
5447
                if (s->cc_op != CC_OP_DYNAMIC)
5448
                    gen_op_set_cc_op(s->cc_op);
5449
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5450
                tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5451
                tcg_gen_helper_0_0(helper_fpop);
5452
                s->cc_op = CC_OP_EFLAGS;
5453
                break;
5454
            case 0x3e: /* fcomip */
5455
                if (s->cc_op != CC_OP_DYNAMIC)
5456
                    gen_op_set_cc_op(s->cc_op);
5457
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5458
                tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5459
                tcg_gen_helper_0_0(helper_fpop);
5460
                s->cc_op = CC_OP_EFLAGS;
5461
                break;
5462
            case 0x10 ... 0x13: /* fcmovxx */
5463
            case 0x18 ... 0x1b:
5464
                {
5465
                    int op1, l1;
5466
                    const static uint8_t fcmov_cc[8] = {
5467
                        (JCC_B << 1),
5468
                        (JCC_Z << 1),
5469
                        (JCC_BE << 1),
5470
                        (JCC_P << 1),
5471
                    };
5472
                    op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
5473
                    l1 = gen_new_label();
5474
                    gen_jcc1(s, s->cc_op, op1, l1);
5475
                    tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
5476
                    gen_set_label(l1);
5477
                }
5478
                break;
5479
            default:
5480
                goto illegal_op;
5481
            }
5482
        }
5483
        break;
5484
        /************************/
5485
        /* string ops */
5486

    
5487
    case 0xa4: /* movsS */
5488
    case 0xa5:
5489
        if ((b & 1) == 0)
5490
            ot = OT_BYTE;
5491
        else
5492
            ot = dflag + OT_WORD;
5493

    
5494
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5495
            gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5496
        } else {
5497
            gen_movs(s, ot);
5498
        }
5499
        break;
5500

    
5501
    case 0xaa: /* stosS */
5502
    case 0xab:
5503
        if ((b & 1) == 0)
5504
            ot = OT_BYTE;
5505
        else
5506
            ot = dflag + OT_WORD;
5507

    
5508
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5509
            gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5510
        } else {
5511
            gen_stos(s, ot);
5512
        }
5513
        break;
5514
    case 0xac: /* lodsS */
5515
    case 0xad:
5516
        if ((b & 1) == 0)
5517
            ot = OT_BYTE;
5518
        else
5519
            ot = dflag + OT_WORD;
5520
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5521
            gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5522
        } else {
5523
            gen_lods(s, ot);
5524
        }
5525
        break;
5526
    case 0xae: /* scasS */
5527
    case 0xaf:
5528
        if ((b & 1) == 0)
5529
            ot = OT_BYTE;
5530
        else
5531
            ot = dflag + OT_WORD;
5532
        if (prefixes & PREFIX_REPNZ) {
5533
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5534
        } else if (prefixes & PREFIX_REPZ) {
5535
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5536
        } else {
5537
            gen_scas(s, ot);
5538
            s->cc_op = CC_OP_SUBB + ot;
5539
        }
5540
        break;
5541

    
5542
    case 0xa6: /* cmpsS */
5543
    case 0xa7:
5544
        if ((b & 1) == 0)
5545
            ot = OT_BYTE;
5546
        else
5547
            ot = dflag + OT_WORD;
5548
        if (prefixes & PREFIX_REPNZ) {
5549
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5550
        } else if (prefixes & PREFIX_REPZ) {
5551
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5552
        } else {
5553
            gen_cmps(s, ot);
5554
            s->cc_op = CC_OP_SUBB + ot;
5555
        }
5556
        break;
5557
    case 0x6c: /* insS */
5558
    case 0x6d:
5559
        if ((b & 1) == 0)
5560
            ot = OT_BYTE;
5561
        else
5562
            ot = dflag ? OT_LONG : OT_WORD;
5563
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5564
        gen_op_andl_T0_ffff();
5565
        gen_check_io(s, ot, pc_start - s->cs_base, 
5566
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
5567
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5568
            gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5569
        } else {
5570
            gen_ins(s, ot);
5571
        }
5572
        break;
5573
    case 0x6e: /* outsS */
5574
    case 0x6f:
5575
        if ((b & 1) == 0)
5576
            ot = OT_BYTE;
5577
        else
5578
            ot = dflag ? OT_LONG : OT_WORD;
5579
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5580
        gen_op_andl_T0_ffff();
5581
        gen_check_io(s, ot, pc_start - s->cs_base,
5582
                     svm_is_rep(prefixes) | 4);
5583
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5584
            gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5585
        } else {
5586
            gen_outs(s, ot);
5587
        }
5588
        break;
5589

    
5590
        /************************/
5591
        /* port I/O */
5592

    
5593
    case 0xe4:
5594
    case 0xe5:
5595
        if ((b & 1) == 0)
5596
            ot = OT_BYTE;
5597
        else
5598
            ot = dflag ? OT_LONG : OT_WORD;
5599
        val = ldub_code(s->pc++);
5600
        gen_op_movl_T0_im(val);
5601
        gen_check_io(s, ot, pc_start - s->cs_base,
5602
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5603
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5604
        tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5605
        gen_op_mov_reg_T1(ot, R_EAX);
5606
        break;
5607
    case 0xe6:
5608
    case 0xe7:
5609
        if ((b & 1) == 0)
5610
            ot = OT_BYTE;
5611
        else
5612
            ot = dflag ? OT_LONG : OT_WORD;
5613
        val = ldub_code(s->pc++);
5614
        gen_op_movl_T0_im(val);
5615
        gen_check_io(s, ot, pc_start - s->cs_base,
5616
                     svm_is_rep(prefixes));
5617
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5618

    
5619
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5620
        tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5621
        tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5622
        tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5623
        break;
5624
    case 0xec:
5625
    case 0xed:
5626
        if ((b & 1) == 0)
5627
            ot = OT_BYTE;
5628
        else
5629
            ot = dflag ? OT_LONG : OT_WORD;
5630
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5631
        gen_op_andl_T0_ffff();
5632
        gen_check_io(s, ot, pc_start - s->cs_base,
5633
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5634
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5635
        tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5636
        gen_op_mov_reg_T1(ot, R_EAX);
5637
        break;
5638
    case 0xee:
5639
    case 0xef:
5640
        if ((b & 1) == 0)
5641
            ot = OT_BYTE;
5642
        else
5643
            ot = dflag ? OT_LONG : OT_WORD;
5644
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5645
        gen_op_andl_T0_ffff();
5646
        gen_check_io(s, ot, pc_start - s->cs_base,
5647
                     svm_is_rep(prefixes));
5648
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5649

    
5650
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5651
        tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5652
        tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5653
        tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5654
        break;
5655

    
5656
        /************************/
5657
        /* control */
5658
    case 0xc2: /* ret im */
5659
        val = ldsw_code(s->pc);
5660
        s->pc += 2;
5661
        gen_pop_T0(s);
5662
        if (CODE64(s) && s->dflag)
5663
            s->dflag = 2;
5664
        gen_stack_update(s, val + (2 << s->dflag));
5665
        if (s->dflag == 0)
5666
            gen_op_andl_T0_ffff();
5667
        gen_op_jmp_T0();
5668
        gen_eob(s);
5669
        break;
5670
    case 0xc3: /* ret */
5671
        gen_pop_T0(s);
5672
        gen_pop_update(s);
5673
        if (s->dflag == 0)
5674
            gen_op_andl_T0_ffff();
5675
        gen_op_jmp_T0();
5676
        gen_eob(s);
5677
        break;
5678
    case 0xca: /* lret im */
5679
        val = ldsw_code(s->pc);
5680
        s->pc += 2;
5681
    do_lret:
5682
        if (s->pe && !s->vm86) {
5683
            if (s->cc_op != CC_OP_DYNAMIC)
5684
                gen_op_set_cc_op(s->cc_op);
5685
            gen_jmp_im(pc_start - s->cs_base);
5686
            tcg_gen_helper_0_2(helper_lret_protected,
5687
                               tcg_const_i32(s->dflag), 
5688
                               tcg_const_i32(val));
5689
        } else {
5690
            gen_stack_A0(s);
5691
            /* pop offset */
5692
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5693
            if (s->dflag == 0)
5694
                gen_op_andl_T0_ffff();
5695
            /* NOTE: keeping EIP updated is not a problem in case of
5696
               exception */
5697
            gen_op_jmp_T0();
5698
            /* pop selector */
5699
            gen_op_addl_A0_im(2 << s->dflag);
5700
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5701
            gen_op_movl_seg_T0_vm(R_CS);
5702
            /* add stack offset */
5703
            gen_stack_update(s, val + (4 << s->dflag));
5704
        }
5705
        gen_eob(s);
5706
        break;
5707
    case 0xcb: /* lret */
5708
        val = 0;
5709
        goto do_lret;
5710
    case 0xcf: /* iret */
5711
        gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
5712
        if (!s->pe) {
5713
            /* real mode */
5714
            tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5715
            s->cc_op = CC_OP_EFLAGS;
5716
        } else if (s->vm86) {
5717
            if (s->iopl != 3) {
5718
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5719
            } else {
5720
                tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5721
                s->cc_op = CC_OP_EFLAGS;
5722
            }
5723
        } else {
5724
            if (s->cc_op != CC_OP_DYNAMIC)
5725
                gen_op_set_cc_op(s->cc_op);
5726
            gen_jmp_im(pc_start - s->cs_base);
5727
            tcg_gen_helper_0_2(helper_iret_protected,
5728
                               tcg_const_i32(s->dflag), 
5729
                               tcg_const_i32(s->pc - s->cs_base));
5730
            s->cc_op = CC_OP_EFLAGS;
5731
        }
5732
        gen_eob(s);
5733
        break;
5734
    case 0xe8: /* call im */
5735
        {
5736
            if (dflag)
5737
                tval = (int32_t)insn_get(s, OT_LONG);
5738
            else
5739
                tval = (int16_t)insn_get(s, OT_WORD);
5740
            next_eip = s->pc - s->cs_base;
5741
            tval += next_eip;
5742
            if (s->dflag == 0)
5743
                tval &= 0xffff;
5744
            gen_movtl_T0_im(next_eip);
5745
            gen_push_T0(s);
5746
            gen_jmp(s, tval);
5747
        }
5748
        break;
5749
    case 0x9a: /* lcall im */
5750
        {
5751
            unsigned int selector, offset;
5752

    
5753
            if (CODE64(s))
5754
                goto illegal_op;
5755
            ot = dflag ? OT_LONG : OT_WORD;
5756
            offset = insn_get(s, ot);
5757
            selector = insn_get(s, OT_WORD);
5758

    
5759
            gen_op_movl_T0_im(selector);
5760
            gen_op_movl_T1_imu(offset);
5761
        }
5762
        goto do_lcall;
5763
    case 0xe9: /* jmp im */
5764
        if (dflag)
5765
            tval = (int32_t)insn_get(s, OT_LONG);
5766
        else
5767
            tval = (int16_t)insn_get(s, OT_WORD);
5768
        tval += s->pc - s->cs_base;
5769
        if (s->dflag == 0)
5770
            tval &= 0xffff;
5771
        gen_jmp(s, tval);
5772
        break;
5773
    case 0xea: /* ljmp im */
5774
        {
5775
            unsigned int selector, offset;
5776

    
5777
            if (CODE64(s))
5778
                goto illegal_op;
5779
            ot = dflag ? OT_LONG : OT_WORD;
5780
            offset = insn_get(s, ot);
5781
            selector = insn_get(s, OT_WORD);
5782

    
5783
            gen_op_movl_T0_im(selector);
5784
            gen_op_movl_T1_imu(offset);
5785
        }
5786
        goto do_ljmp;
5787
    case 0xeb: /* jmp Jb */
5788
        tval = (int8_t)insn_get(s, OT_BYTE);
5789
        tval += s->pc - s->cs_base;
5790
        if (s->dflag == 0)
5791
            tval &= 0xffff;
5792
        gen_jmp(s, tval);
5793
        break;
5794
    case 0x70 ... 0x7f: /* jcc Jb */
5795
        tval = (int8_t)insn_get(s, OT_BYTE);
5796
        goto do_jcc;
5797
    case 0x180 ... 0x18f: /* jcc Jv */
5798
        if (dflag) {
5799
            tval = (int32_t)insn_get(s, OT_LONG);
5800
        } else {
5801
            tval = (int16_t)insn_get(s, OT_WORD);
5802
        }
5803
    do_jcc:
5804
        next_eip = s->pc - s->cs_base;
5805
        tval += next_eip;
5806
        if (s->dflag == 0)
5807
            tval &= 0xffff;
5808
        gen_jcc(s, b, tval, next_eip);
5809
        break;
5810

    
5811
    case 0x190 ... 0x19f: /* setcc Gv */
5812
        modrm = ldub_code(s->pc++);
5813
        gen_setcc(s, b);
5814
        gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5815
        break;
5816
    case 0x140 ... 0x14f: /* cmov Gv, Ev */
5817
        {
5818
            int l1;
5819
            TCGv t0;
5820

    
5821
            ot = dflag + OT_WORD;
5822
            modrm = ldub_code(s->pc++);
5823
            reg = ((modrm >> 3) & 7) | rex_r;
5824
            mod = (modrm >> 6) & 3;
5825
            t0 = tcg_temp_local_new(TCG_TYPE_TL);
5826
            if (mod != 3) {
5827
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5828
                gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
5829
            } else {
5830
                rm = (modrm & 7) | REX_B(s);
5831
                gen_op_mov_v_reg(ot, t0, rm);
5832
            }
5833
#ifdef TARGET_X86_64
5834
            if (ot == OT_LONG) {
5835
                /* XXX: specific Intel behaviour ? */
5836
                l1 = gen_new_label();
5837
                gen_jcc1(s, s->cc_op, b ^ 1, l1);
5838
                tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
5839
                gen_set_label(l1);
5840
                tcg_gen_movi_tl(cpu_tmp0, 0);
5841
                tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
5842
            } else
5843
#endif
5844
            {
5845
                l1 = gen_new_label();
5846
                gen_jcc1(s, s->cc_op, b ^ 1, l1);
5847
                gen_op_mov_reg_v(ot, reg, t0);
5848
                gen_set_label(l1);
5849
            }
5850
            tcg_temp_free(t0);
5851
        }
5852
        break;
5853

    
5854
        /************************/
5855
        /* flags */
5856
    case 0x9c: /* pushf */
5857
        gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
5858
        if (s->vm86 && s->iopl != 3) {
5859
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5860
        } else {
5861
            if (s->cc_op != CC_OP_DYNAMIC)
5862
                gen_op_set_cc_op(s->cc_op);
5863
            tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
5864
            gen_push_T0(s);
5865
        }
5866
        break;
5867
    case 0x9d: /* popf */
5868
        gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
5869
        if (s->vm86 && s->iopl != 3) {
5870
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5871
        } else {
5872
            gen_pop_T0(s);
5873
            if (s->cpl == 0) {
5874
                if (s->dflag) {
5875
                    tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5876
                                       tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
5877
                } else {
5878
                    tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5879
                                       tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
5880
                }
5881
            } else {
5882
                if (s->cpl <= s->iopl) {
5883
                    if (s->dflag) {
5884
                        tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5885
                                           tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
5886
                    } else {
5887
                        tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5888
                                           tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
5889
                    }
5890
                } else {
5891
                    if (s->dflag) {
5892
                        tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5893
                                           tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
5894
                    } else {
5895
                        tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5896
                                           tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
5897
                    }
5898
                }
5899
            }
5900
            gen_pop_update(s);
5901
            s->cc_op = CC_OP_EFLAGS;
5902
            /* abort translation because TF flag may change */
5903
            gen_jmp_im(s->pc - s->cs_base);
5904
            gen_eob(s);
5905
        }
5906
        break;
5907
    case 0x9e: /* sahf */
5908
        if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
5909
            goto illegal_op;
5910
        gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5911
        if (s->cc_op != CC_OP_DYNAMIC)
5912
            gen_op_set_cc_op(s->cc_op);
5913
        gen_compute_eflags(cpu_cc_src);
5914
        tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
5915
        tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
5916
        tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
5917
        s->cc_op = CC_OP_EFLAGS;
5918
        break;
5919
    case 0x9f: /* lahf */
5920
        if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
5921
            goto illegal_op;
5922
        if (s->cc_op != CC_OP_DYNAMIC)
5923
            gen_op_set_cc_op(s->cc_op);
5924
        gen_compute_eflags(cpu_T[0]);
5925
        /* Note: gen_compute_eflags() only gives the condition codes */
5926
        tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
5927
        gen_op_mov_reg_T0(OT_BYTE, R_AH);
5928
        break;
5929
    case 0xf5: /* cmc */
5930
        if (s->cc_op != CC_OP_DYNAMIC)
5931
            gen_op_set_cc_op(s->cc_op);
5932
        gen_compute_eflags(cpu_cc_src);
5933
        tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
5934
        s->cc_op = CC_OP_EFLAGS;
5935
        break;
5936
    case 0xf8: /* clc */
5937
        if (s->cc_op != CC_OP_DYNAMIC)
5938
            gen_op_set_cc_op(s->cc_op);
5939
        gen_compute_eflags(cpu_cc_src);
5940
        tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
5941
        s->cc_op = CC_OP_EFLAGS;
5942
        break;
5943
    case 0xf9: /* stc */
5944
        if (s->cc_op != CC_OP_DYNAMIC)
5945
            gen_op_set_cc_op(s->cc_op);
5946
        gen_compute_eflags(cpu_cc_src);
5947
        tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
5948
        s->cc_op = CC_OP_EFLAGS;
5949
        break;
5950
    case 0xfc: /* cld */
5951
        tcg_gen_movi_i32(cpu_tmp2_i32, 1);
5952
        tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5953
        break;
5954
    case 0xfd: /* std */
5955
        tcg_gen_movi_i32(cpu_tmp2_i32, -1);
5956
        tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5957
        break;
5958

    
5959
        /************************/
5960
        /* bit operations */
5961
    case 0x1ba: /* bt/bts/btr/btc Gv, im */
5962
        ot = dflag + OT_WORD;
5963
        modrm = ldub_code(s->pc++);
5964
        op = (modrm >> 3) & 7;
5965
        mod = (modrm >> 6) & 3;
5966
        rm = (modrm & 7) | REX_B(s);
5967
        if (mod != 3) {
5968
            s->rip_offset = 1;
5969
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5970
            gen_op_ld_T0_A0(ot + s->mem_index);
5971
        } else {
5972
            gen_op_mov_TN_reg(ot, 0, rm);
5973
        }
5974
        /* load shift */
5975
        val = ldub_code(s->pc++);
5976
        gen_op_movl_T1_im(val);
5977
        if (op < 4)
5978
            goto illegal_op;
5979
        op -= 4;
5980
        goto bt_op;
5981
    case 0x1a3: /* bt Gv, Ev */
5982
        op = 0;
5983
        goto do_btx;
5984
    case 0x1ab: /* bts */
5985
        op = 1;
5986
        goto do_btx;
5987
    case 0x1b3: /* btr */
5988
        op = 2;
5989
        goto do_btx;
5990
    case 0x1bb: /* btc */
5991
        op = 3;
5992
    do_btx:
5993
        ot = dflag + OT_WORD;
5994
        modrm = ldub_code(s->pc++);
5995
        reg = ((modrm >> 3) & 7) | rex_r;
5996
        mod = (modrm >> 6) & 3;
5997
        rm = (modrm & 7) | REX_B(s);
5998
        gen_op_mov_TN_reg(OT_LONG, 1, reg);
5999
        if (mod != 3) {
6000
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6001
            /* specific case: we need to add a displacement */
6002
            gen_exts(ot, cpu_T[1]);
6003
            tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
6004
            tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
6005
            tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
6006
            gen_op_ld_T0_A0(ot + s->mem_index);
6007
        } else {
6008
            gen_op_mov_TN_reg(ot, 0, rm);
6009
        }
6010
    bt_op:
6011
        tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
6012
        switch(op) {
6013
        case 0:
6014
            tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
6015
            tcg_gen_movi_tl(cpu_cc_dst, 0);
6016
            break;
6017
        case 1:
6018
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6019
            tcg_gen_movi_tl(cpu_tmp0, 1);
6020
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6021
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6022
            break;
6023
        case 2:
6024
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6025
            tcg_gen_movi_tl(cpu_tmp0, 1);
6026
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6027
            tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
6028
            tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6029
            break;
6030
        default:
6031
        case 3:
6032
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
6033
            tcg_gen_movi_tl(cpu_tmp0, 1);
6034
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
6035
            tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
6036
            break;
6037
        }
6038
        s->cc_op = CC_OP_SARB + ot;
6039
        if (op != 0) {
6040
            if (mod != 3)
6041
                gen_op_st_T0_A0(ot + s->mem_index);
6042
            else
6043
                gen_op_mov_reg_T0(ot, rm);
6044
            tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
6045
            tcg_gen_movi_tl(cpu_cc_dst, 0);
6046
        }
6047
        break;
6048
    case 0x1bc: /* bsf */
6049
    case 0x1bd: /* bsr */
6050
        {
6051
            int label1;
6052
            TCGv t0;
6053

    
6054
            ot = dflag + OT_WORD;
6055
            modrm = ldub_code(s->pc++);
6056
            reg = ((modrm >> 3) & 7) | rex_r;
6057
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6058
            gen_extu(ot, cpu_T[0]);
6059
            label1 = gen_new_label();
6060
            tcg_gen_movi_tl(cpu_cc_dst, 0);
6061
            t0 = tcg_temp_local_new(TCG_TYPE_TL);
6062
            tcg_gen_mov_tl(t0, cpu_T[0]);
6063
            tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
6064
            if (b & 1) {
6065
                tcg_gen_helper_1_1(helper_bsr, cpu_T[0], t0);
6066
            } else {
6067
                tcg_gen_helper_1_1(helper_bsf, cpu_T[0], t0);
6068
            }
6069
            gen_op_mov_reg_T0(ot, reg);
6070
            tcg_gen_movi_tl(cpu_cc_dst, 1);
6071
            gen_set_label(label1);
6072
            tcg_gen_discard_tl(cpu_cc_src);
6073
            s->cc_op = CC_OP_LOGICB + ot;
6074
            tcg_temp_free(t0);
6075
        }
6076
        break;
6077
        /************************/
6078
        /* bcd */
6079
    case 0x27: /* daa */
6080
        if (CODE64(s))
6081
            goto illegal_op;
6082
        if (s->cc_op != CC_OP_DYNAMIC)
6083
            gen_op_set_cc_op(s->cc_op);
6084
        tcg_gen_helper_0_0(helper_daa);
6085
        s->cc_op = CC_OP_EFLAGS;
6086
        break;
6087
    case 0x2f: /* das */
6088
        if (CODE64(s))
6089
            goto illegal_op;
6090
        if (s->cc_op != CC_OP_DYNAMIC)
6091
            gen_op_set_cc_op(s->cc_op);
6092
        tcg_gen_helper_0_0(helper_das);
6093
        s->cc_op = CC_OP_EFLAGS;
6094
        break;
6095
    case 0x37: /* aaa */
6096
        if (CODE64(s))
6097
            goto illegal_op;
6098
        if (s->cc_op != CC_OP_DYNAMIC)
6099
            gen_op_set_cc_op(s->cc_op);
6100
        tcg_gen_helper_0_0(helper_aaa);
6101
        s->cc_op = CC_OP_EFLAGS;
6102
        break;
6103
    case 0x3f: /* aas */
6104
        if (CODE64(s))
6105
            goto illegal_op;
6106
        if (s->cc_op != CC_OP_DYNAMIC)
6107
            gen_op_set_cc_op(s->cc_op);
6108
        tcg_gen_helper_0_0(helper_aas);
6109
        s->cc_op = CC_OP_EFLAGS;
6110
        break;
6111
    case 0xd4: /* aam */
6112
        if (CODE64(s))
6113
            goto illegal_op;
6114
        val = ldub_code(s->pc++);
6115
        if (val == 0) {
6116
            gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
6117
        } else {
6118
            tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
6119
            s->cc_op = CC_OP_LOGICB;
6120
        }
6121
        break;
6122
    case 0xd5: /* aad */
6123
        if (CODE64(s))
6124
            goto illegal_op;
6125
        val = ldub_code(s->pc++);
6126
        tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
6127
        s->cc_op = CC_OP_LOGICB;
6128
        break;
6129
        /************************/
6130
        /* misc */
6131
    case 0x90: /* nop */
6132
        /* XXX: xchg + rex handling */
6133
        /* XXX: correct lock test for all insn */
6134
        if (prefixes & PREFIX_LOCK)
6135
            goto illegal_op;
6136
        if (prefixes & PREFIX_REPZ) {
6137
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
6138
        }
6139
        break;
6140
    case 0x9b: /* fwait */
6141
        if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
6142
            (HF_MP_MASK | HF_TS_MASK)) {
6143
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6144
        } else {
6145
            if (s->cc_op != CC_OP_DYNAMIC)
6146
                gen_op_set_cc_op(s->cc_op);
6147
            gen_jmp_im(pc_start - s->cs_base);
6148
            tcg_gen_helper_0_0(helper_fwait);
6149
        }
6150
        break;
6151
    case 0xcc: /* int3 */
6152
        gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
6153
        break;
6154
    case 0xcd: /* int N */
6155
        val = ldub_code(s->pc++);
6156
        if (s->vm86 && s->iopl != 3) {
6157
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6158
        } else {
6159
            gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
6160
        }
6161
        break;
6162
    case 0xce: /* into */
6163
        if (CODE64(s))
6164
            goto illegal_op;
6165
        if (s->cc_op != CC_OP_DYNAMIC)
6166
            gen_op_set_cc_op(s->cc_op);
6167
        gen_jmp_im(pc_start - s->cs_base);
6168
        tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
6169
        break;
6170
    case 0xf1: /* icebp (undocumented, exits to external debugger) */
6171
        gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
6172
#if 1
6173
        gen_debug(s, pc_start - s->cs_base);
6174
#else
6175
        /* start debug */
6176
        tb_flush(cpu_single_env);
6177
        cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
6178
#endif
6179
        break;
6180
    case 0xfa: /* cli */
6181
        if (!s->vm86) {
6182
            if (s->cpl <= s->iopl) {
6183
                tcg_gen_helper_0_0(helper_cli);
6184
            } else {
6185
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6186
            }
6187
        } else {
6188
            if (s->iopl == 3) {
6189
                tcg_gen_helper_0_0(helper_cli);
6190
            } else {
6191
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6192
            }
6193
        }
6194
        break;
6195
    case 0xfb: /* sti */
6196
        if (!s->vm86) {
6197
            if (s->cpl <= s->iopl) {
6198
            gen_sti:
6199
                tcg_gen_helper_0_0(helper_sti);
6200
                /* interruptions are enabled only the first insn after sti */
6201
                /* If several instructions disable interrupts, only the
6202
                   _first_ does it */
6203
                if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
6204
                    tcg_gen_helper_0_0(helper_set_inhibit_irq);
6205
                /* give a chance to handle pending irqs */
6206
                gen_jmp_im(s->pc - s->cs_base);
6207
                gen_eob(s);
6208
            } else {
6209
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6210
            }
6211
        } else {
6212
            if (s->iopl == 3) {
6213
                goto gen_sti;
6214
            } else {
6215
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6216
            }
6217
        }
6218
        break;
6219
    case 0x62: /* bound */
6220
        if (CODE64(s))
6221
            goto illegal_op;
6222
        ot = dflag ? OT_LONG : OT_WORD;
6223
        modrm = ldub_code(s->pc++);
6224
        reg = (modrm >> 3) & 7;
6225
        mod = (modrm >> 6) & 3;
6226
        if (mod == 3)
6227
            goto illegal_op;
6228
        gen_op_mov_TN_reg(ot, 0, reg);
6229
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6230
        gen_jmp_im(pc_start - s->cs_base);
6231
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6232
        if (ot == OT_WORD)
6233
            tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
6234
        else
6235
            tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
6236
        break;
6237
    case 0x1c8 ... 0x1cf: /* bswap reg */
6238
        reg = (b & 7) | REX_B(s);
6239
#ifdef TARGET_X86_64
6240
        if (dflag == 2) {
6241
            gen_op_mov_TN_reg(OT_QUAD, 0, reg);
6242
            tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
6243
            gen_op_mov_reg_T0(OT_QUAD, reg);
6244
        } else
6245
        {
6246
            TCGv tmp0;
6247
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
6248
            
6249
            tmp0 = tcg_temp_new(TCG_TYPE_I32);
6250
            tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
6251
            tcg_gen_bswap_i32(tmp0, tmp0);
6252
            tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
6253
            gen_op_mov_reg_T0(OT_LONG, reg);
6254
        }
6255
#else
6256
        {
6257
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
6258
            tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
6259
            gen_op_mov_reg_T0(OT_LONG, reg);
6260
        }
6261
#endif
6262
        break;
6263
    case 0xd6: /* salc */
6264
        if (CODE64(s))
6265
            goto illegal_op;
6266
        if (s->cc_op != CC_OP_DYNAMIC)
6267
            gen_op_set_cc_op(s->cc_op);
6268
        gen_compute_eflags_c(cpu_T[0]);
6269
        tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
6270
        gen_op_mov_reg_T0(OT_BYTE, R_EAX);
6271
        break;
6272
    case 0xe0: /* loopnz */
6273
    case 0xe1: /* loopz */
6274
    case 0xe2: /* loop */
6275
    case 0xe3: /* jecxz */
6276
        {
6277
            int l1, l2, l3;
6278

    
6279
            tval = (int8_t)insn_get(s, OT_BYTE);
6280
            next_eip = s->pc - s->cs_base;
6281
            tval += next_eip;
6282
            if (s->dflag == 0)
6283
                tval &= 0xffff;
6284

    
6285
            l1 = gen_new_label();
6286
            l2 = gen_new_label();
6287
            l3 = gen_new_label();
6288
            b &= 3;
6289
            switch(b) {
6290
            case 0: /* loopnz */
6291
            case 1: /* loopz */
6292
                if (s->cc_op != CC_OP_DYNAMIC)
6293
                    gen_op_set_cc_op(s->cc_op);
6294
                gen_op_add_reg_im(s->aflag, R_ECX, -1);
6295
                gen_op_jz_ecx(s->aflag, l3);
6296
                gen_compute_eflags(cpu_tmp0);
6297
                tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
6298
                if (b == 0) {
6299
                    tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
6300
                } else {
6301
                    tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
6302
                }
6303
                break;
6304
            case 2: /* loop */
6305
                gen_op_add_reg_im(s->aflag, R_ECX, -1);
6306
                gen_op_jnz_ecx(s->aflag, l1);
6307
                break;
6308
            default:
6309
            case 3: /* jcxz */
6310
                gen_op_jz_ecx(s->aflag, l1);
6311
                break;
6312
            }
6313

    
6314
            gen_set_label(l3);
6315
            gen_jmp_im(next_eip);
6316
            tcg_gen_br(l2);
6317

    
6318
            gen_set_label(l1);
6319
            gen_jmp_im(tval);
6320
            gen_set_label(l2);
6321
            gen_eob(s);
6322
        }
6323
        break;
6324
    case 0x130: /* wrmsr */
6325
    case 0x132: /* rdmsr */
6326
        if (s->cpl != 0) {
6327
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6328
        } else {
6329
            if (s->cc_op != CC_OP_DYNAMIC)
6330
                gen_op_set_cc_op(s->cc_op);
6331
            gen_jmp_im(pc_start - s->cs_base);
6332
            if (b & 2) {
6333
                tcg_gen_helper_0_0(helper_rdmsr);
6334
            } else {
6335
                tcg_gen_helper_0_0(helper_wrmsr);
6336
            }
6337
        }
6338
        break;
6339
    case 0x131: /* rdtsc */
6340
        if (s->cc_op != CC_OP_DYNAMIC)
6341
            gen_op_set_cc_op(s->cc_op);
6342
        gen_jmp_im(pc_start - s->cs_base);
6343
        tcg_gen_helper_0_0(helper_rdtsc);
6344
        break;
6345
    case 0x133: /* rdpmc */
6346
        if (s->cc_op != CC_OP_DYNAMIC)
6347
            gen_op_set_cc_op(s->cc_op);
6348
        gen_jmp_im(pc_start - s->cs_base);
6349
        tcg_gen_helper_0_0(helper_rdpmc);
6350
        break;
6351
    case 0x134: /* sysenter */
6352
        if (CODE64(s))
6353
            goto illegal_op;
6354
        if (!s->pe) {
6355
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6356
        } else {
6357
            if (s->cc_op != CC_OP_DYNAMIC) {
6358
                gen_op_set_cc_op(s->cc_op);
6359
                s->cc_op = CC_OP_DYNAMIC;
6360
            }
6361
            gen_jmp_im(pc_start - s->cs_base);
6362
            tcg_gen_helper_0_0(helper_sysenter);
6363
            gen_eob(s);
6364
        }
6365
        break;
6366
    case 0x135: /* sysexit */
6367
        if (CODE64(s))
6368
            goto illegal_op;
6369
        if (!s->pe) {
6370
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6371
        } else {
6372
            if (s->cc_op != CC_OP_DYNAMIC) {
6373
                gen_op_set_cc_op(s->cc_op);
6374
                s->cc_op = CC_OP_DYNAMIC;
6375
            }
6376
            gen_jmp_im(pc_start - s->cs_base);
6377
            tcg_gen_helper_0_0(helper_sysexit);
6378
            gen_eob(s);
6379
        }
6380
        break;
6381
#ifdef TARGET_X86_64
6382
    case 0x105: /* syscall */
6383
        /* XXX: is it usable in real mode ? */
6384
        if (s->cc_op != CC_OP_DYNAMIC) {
6385
            gen_op_set_cc_op(s->cc_op);
6386
            s->cc_op = CC_OP_DYNAMIC;
6387
        }
6388
        gen_jmp_im(pc_start - s->cs_base);
6389
        tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
6390
        gen_eob(s);
6391
        break;
6392
    case 0x107: /* sysret */
6393
        if (!s->pe) {
6394
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6395
        } else {
6396
            if (s->cc_op != CC_OP_DYNAMIC) {
6397
                gen_op_set_cc_op(s->cc_op);
6398
                s->cc_op = CC_OP_DYNAMIC;
6399
            }
6400
            gen_jmp_im(pc_start - s->cs_base);
6401
            tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
6402
            /* condition codes are modified only in long mode */
6403
            if (s->lma)
6404
                s->cc_op = CC_OP_EFLAGS;
6405
            gen_eob(s);
6406
        }
6407
        break;
6408
#endif
6409
    case 0x1a2: /* cpuid */
6410
        tcg_gen_helper_0_0(helper_cpuid);
6411
        break;
6412
    case 0xf4: /* hlt */
6413
        if (s->cpl != 0) {
6414
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6415
        } else {
6416
            if (s->cc_op != CC_OP_DYNAMIC)
6417
                gen_op_set_cc_op(s->cc_op);
6418
            gen_jmp_im(s->pc - s->cs_base);
6419
            tcg_gen_helper_0_0(helper_hlt);
6420
            s->is_jmp = 3;
6421
        }
6422
        break;
6423
    case 0x100:
6424
        modrm = ldub_code(s->pc++);
6425
        mod = (modrm >> 6) & 3;
6426
        op = (modrm >> 3) & 7;
6427
        switch(op) {
6428
        case 0: /* sldt */
6429
            if (!s->pe || s->vm86)
6430
                goto illegal_op;
6431
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
6432
            tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
6433
            ot = OT_WORD;
6434
            if (mod == 3)
6435
                ot += s->dflag;
6436
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6437
            break;
6438
        case 2: /* lldt */
6439
            if (!s->pe || s->vm86)
6440
                goto illegal_op;
6441
            if (s->cpl != 0) {
6442
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6443
            } else {
6444
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
6445
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6446
                gen_jmp_im(pc_start - s->cs_base);
6447
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6448
                tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
6449
            }
6450
            break;
6451
        case 1: /* str */
6452
            if (!s->pe || s->vm86)
6453
                goto illegal_op;
6454
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
6455
            tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
6456
            ot = OT_WORD;
6457
            if (mod == 3)
6458
                ot += s->dflag;
6459
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6460
            break;
6461
        case 3: /* ltr */
6462
            if (!s->pe || s->vm86)
6463
                goto illegal_op;
6464
            if (s->cpl != 0) {
6465
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6466
            } else {
6467
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
6468
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6469
                gen_jmp_im(pc_start - s->cs_base);
6470
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6471
                tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
6472
            }
6473
            break;
6474
        case 4: /* verr */
6475
        case 5: /* verw */
6476
            if (!s->pe || s->vm86)
6477
                goto illegal_op;
6478
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6479
            if (s->cc_op != CC_OP_DYNAMIC)
6480
                gen_op_set_cc_op(s->cc_op);
6481
            if (op == 4)
6482
                tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
6483
            else
6484
                tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
6485
            s->cc_op = CC_OP_EFLAGS;
6486
            break;
6487
        default:
6488
            goto illegal_op;
6489
        }
6490
        break;
6491
    case 0x101:
6492
        modrm = ldub_code(s->pc++);
6493
        mod = (modrm >> 6) & 3;
6494
        op = (modrm >> 3) & 7;
6495
        rm = modrm & 7;
6496
        switch(op) {
6497
        case 0: /* sgdt */
6498
            if (mod == 3)
6499
                goto illegal_op;
6500
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
6501
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6502
            tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
6503
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
6504
            gen_add_A0_im(s, 2);
6505
            tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
6506
            if (!s->dflag)
6507
                gen_op_andl_T0_im(0xffffff);
6508
            gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6509
            break;
6510
        case 1:
6511
            if (mod == 3) {
6512
                switch (rm) {
6513
                case 0: /* monitor */
6514
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6515
                        s->cpl != 0)
6516
                        goto illegal_op;
6517
                    gen_jmp_im(pc_start - s->cs_base);
6518
#ifdef TARGET_X86_64
6519
                    if (s->aflag == 2) {
6520
                        gen_op_movq_A0_reg(R_EAX);
6521
                    } else
6522
#endif
6523
                    {
6524
                        gen_op_movl_A0_reg(R_EAX);
6525
                        if (s->aflag == 0)
6526
                            gen_op_andl_A0_ffff();
6527
                    }
6528
                    gen_add_A0_ds_seg(s);
6529
                    tcg_gen_helper_0_1(helper_monitor, cpu_A0);
6530
                    break;
6531
                case 1: /* mwait */
6532
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6533
                        s->cpl != 0)
6534
                        goto illegal_op;
6535
                    if (s->cc_op != CC_OP_DYNAMIC) {
6536
                        gen_op_set_cc_op(s->cc_op);
6537
                        s->cc_op = CC_OP_DYNAMIC;
6538
                    }
6539
                    gen_jmp_im(s->pc - s->cs_base);
6540
                    tcg_gen_helper_0_0(helper_mwait);
6541
                    gen_eob(s);
6542
                    break;
6543
                default:
6544
                    goto illegal_op;
6545
                }
6546
            } else { /* sidt */
6547
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
6548
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6549
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
6550
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
6551
                gen_add_A0_im(s, 2);
6552
                tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
6553
                if (!s->dflag)
6554
                    gen_op_andl_T0_im(0xffffff);
6555
                gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6556
            }
6557
            break;
6558
        case 2: /* lgdt */
6559
        case 3: /* lidt */
6560
            if (mod == 3) {
6561
                if (s->cc_op != CC_OP_DYNAMIC)
6562
                    gen_op_set_cc_op(s->cc_op);
6563
                gen_jmp_im(pc_start - s->cs_base);
6564
                switch(rm) {
6565
                case 0: /* VMRUN */
6566
                    if (!(s->flags & HF_SVME_MASK) || !s->pe)
6567
                        goto illegal_op;
6568
                    if (s->cpl != 0) {
6569
                        gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6570
                        break;
6571
                    } else {
6572
                        tcg_gen_helper_0_1(helper_vmrun, 
6573
                                           tcg_const_i32(s->aflag));
6574
                        s->cc_op = CC_OP_EFLAGS;
6575
                        gen_eob(s);
6576
                    }
6577
                    break;
6578
                case 1: /* VMMCALL */
6579
                    if (!(s->flags & HF_SVME_MASK))
6580
                        goto illegal_op;
6581
                    tcg_gen_helper_0_0(helper_vmmcall);
6582
                    break;
6583
                case 2: /* VMLOAD */
6584
                    if (!(s->flags & HF_SVME_MASK) || !s->pe)
6585
                        goto illegal_op;
6586
                    if (s->cpl != 0) {
6587
                        gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6588
                        break;
6589
                    } else {
6590
                        tcg_gen_helper_0_1(helper_vmload,
6591
                                           tcg_const_i32(s->aflag));
6592
                    }
6593
                    break;
6594
                case 3: /* VMSAVE */
6595
                    if (!(s->flags & HF_SVME_MASK) || !s->pe)
6596
                        goto illegal_op;
6597
                    if (s->cpl != 0) {
6598
                        gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6599
                        break;
6600
                    } else {
6601
                        tcg_gen_helper_0_1(helper_vmsave,
6602
                                           tcg_const_i32(s->aflag));
6603
                    }
6604
                    break;
6605
                case 4: /* STGI */
6606
                    if ((!(s->flags & HF_SVME_MASK) &&
6607
                         !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) || 
6608
                        !s->pe)
6609
                        goto illegal_op;
6610
                    if (s->cpl != 0) {
6611
                        gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6612
                        break;
6613
                    } else {
6614
                        tcg_gen_helper_0_0(helper_stgi);
6615
                    }
6616
                    break;
6617
                case 5: /* CLGI */
6618
                    if (!(s->flags & HF_SVME_MASK) || !s->pe)
6619
                        goto illegal_op;
6620
                    if (s->cpl != 0) {
6621
                        gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6622
                        break;
6623
                    } else {
6624
                        tcg_gen_helper_0_0(helper_clgi);
6625
                    }
6626
                    break;
6627
                case 6: /* SKINIT */
6628
                    if ((!(s->flags & HF_SVME_MASK) && 
6629
                         !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) || 
6630
                        !s->pe)
6631
                        goto illegal_op;
6632
                    tcg_gen_helper_0_0(helper_skinit);
6633
                    break;
6634
                case 7: /* INVLPGA */
6635
                    if (!(s->flags & HF_SVME_MASK) || !s->pe)
6636
                        goto illegal_op;
6637
                    if (s->cpl != 0) {
6638
                        gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6639
                        break;
6640
                    } else {
6641
                        tcg_gen_helper_0_1(helper_invlpga,
6642
                                           tcg_const_i32(s->aflag));
6643
                    }
6644
                    break;
6645
                default:
6646
                    goto illegal_op;
6647
                }
6648
            } else if (s->cpl != 0) {
6649
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6650
            } else {
6651
                gen_svm_check_intercept(s, pc_start,
6652
                                        op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
6653
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6654
                gen_op_ld_T1_A0(OT_WORD + s->mem_index);
6655
                gen_add_A0_im(s, 2);
6656
                gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6657
                if (!s->dflag)
6658
                    gen_op_andl_T0_im(0xffffff);
6659
                if (op == 2) {
6660
                    tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
6661
                    tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
6662
                } else {
6663
                    tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
6664
                    tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
6665
                }
6666
            }
6667
            break;
6668
        case 4: /* smsw */
6669
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
6670
            tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
6671
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6672
            break;
6673
        case 6: /* lmsw */
6674
            if (s->cpl != 0) {
6675
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6676
            } else {
6677
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6678
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6679
                tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
6680
                gen_jmp_im(s->pc - s->cs_base);
6681
                gen_eob(s);
6682
            }
6683
            break;
6684
        case 7: /* invlpg */
6685
            if (s->cpl != 0) {
6686
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6687
            } else {
6688
                if (mod == 3) {
6689
#ifdef TARGET_X86_64
6690
                    if (CODE64(s) && rm == 0) {
6691
                        /* swapgs */
6692
                        tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
6693
                        tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
6694
                        tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
6695
                        tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
6696
                    } else
6697
#endif
6698
                    {
6699
                        goto illegal_op;
6700
                    }
6701
                } else {
6702
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6703
                    tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
6704
                    gen_jmp_im(s->pc - s->cs_base);
6705
                    gen_eob(s);
6706
                }
6707
            }
6708
            break;
6709
        default:
6710
            goto illegal_op;
6711
        }
6712
        break;
6713
    case 0x108: /* invd */
6714
    case 0x109: /* wbinvd */
6715
        if (s->cpl != 0) {
6716
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6717
        } else {
6718
            gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
6719
            /* nothing to do */
6720
        }
6721
        break;
6722
    case 0x63: /* arpl or movslS (x86_64) */
6723
#ifdef TARGET_X86_64
6724
        if (CODE64(s)) {
6725
            int d_ot;
6726
            /* d_ot is the size of destination */
6727
            d_ot = dflag + OT_WORD;
6728

    
6729
            modrm = ldub_code(s->pc++);
6730
            reg = ((modrm >> 3) & 7) | rex_r;
6731
            mod = (modrm >> 6) & 3;
6732
            rm = (modrm & 7) | REX_B(s);
6733

    
6734
            if (mod == 3) {
6735
                gen_op_mov_TN_reg(OT_LONG, 0, rm);
6736
                /* sign extend */
6737
                if (d_ot == OT_QUAD)
6738
                    tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
6739
                gen_op_mov_reg_T0(d_ot, reg);
6740
            } else {
6741
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6742
                if (d_ot == OT_QUAD) {
6743
                    gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6744
                } else {
6745
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6746
                }
6747
                gen_op_mov_reg_T0(d_ot, reg);
6748
            }
6749
        } else
6750
#endif
6751
        {
6752
            int label1;
6753
            TCGv t0, t1, t2;
6754

    
6755
            if (!s->pe || s->vm86)
6756
                goto illegal_op;
6757
            t0 = tcg_temp_local_new(TCG_TYPE_TL);
6758
            t1 = tcg_temp_local_new(TCG_TYPE_TL);
6759
            t2 = tcg_temp_local_new(TCG_TYPE_TL);
6760
            ot = OT_WORD;
6761
            modrm = ldub_code(s->pc++);
6762
            reg = (modrm >> 3) & 7;
6763
            mod = (modrm >> 6) & 3;
6764
            rm = modrm & 7;
6765
            if (mod != 3) {
6766
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6767
                gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6768
            } else {
6769
                gen_op_mov_v_reg(ot, t0, rm);
6770
            }
6771
            gen_op_mov_v_reg(ot, t1, reg);
6772
            tcg_gen_andi_tl(cpu_tmp0, t0, 3);
6773
            tcg_gen_andi_tl(t1, t1, 3);
6774
            tcg_gen_movi_tl(t2, 0);
6775
            label1 = gen_new_label();
6776
            tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
6777
            tcg_gen_andi_tl(t0, t0, ~3);
6778
            tcg_gen_or_tl(t0, t0, t1);
6779
            tcg_gen_movi_tl(t2, CC_Z);
6780
            gen_set_label(label1);
6781
            if (mod != 3) {
6782
                gen_op_st_v(ot + s->mem_index, t0, cpu_A0);
6783
            } else {
6784
                gen_op_mov_reg_v(ot, rm, t0);
6785
            }
6786
            if (s->cc_op != CC_OP_DYNAMIC)
6787
                gen_op_set_cc_op(s->cc_op);
6788
            gen_compute_eflags(cpu_cc_src);
6789
            tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
6790
            tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
6791
            s->cc_op = CC_OP_EFLAGS;
6792
            tcg_temp_free(t0);
6793
            tcg_temp_free(t1);
6794
            tcg_temp_free(t2);
6795
        }
6796
        break;
6797
    case 0x102: /* lar */
6798
    case 0x103: /* lsl */
6799
        {
6800
            int label1;
6801
            TCGv t0;
6802
            if (!s->pe || s->vm86)
6803
                goto illegal_op;
6804
            ot = dflag ? OT_LONG : OT_WORD;
6805
            modrm = ldub_code(s->pc++);
6806
            reg = ((modrm >> 3) & 7) | rex_r;
6807
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6808
            t0 = tcg_temp_local_new(TCG_TYPE_TL);
6809
            if (s->cc_op != CC_OP_DYNAMIC)
6810
                gen_op_set_cc_op(s->cc_op);
6811
            if (b == 0x102)
6812
                tcg_gen_helper_1_1(helper_lar, t0, cpu_T[0]);
6813
            else
6814
                tcg_gen_helper_1_1(helper_lsl, t0, cpu_T[0]);
6815
            tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
6816
            label1 = gen_new_label();
6817
            tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
6818
            gen_op_mov_reg_v(ot, reg, t0);
6819
            gen_set_label(label1);
6820
            s->cc_op = CC_OP_EFLAGS;
6821
            tcg_temp_free(t0);
6822
        }
6823
        break;
6824
    case 0x118:
6825
        modrm = ldub_code(s->pc++);
6826
        mod = (modrm >> 6) & 3;
6827
        op = (modrm >> 3) & 7;
6828
        switch(op) {
6829
        case 0: /* prefetchnta */
6830
        case 1: /* prefetchnt0 */
6831
        case 2: /* prefetchnt0 */
6832
        case 3: /* prefetchnt0 */
6833
            if (mod == 3)
6834
                goto illegal_op;
6835
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6836
            /* nothing more to do */
6837
            break;
6838
        default: /* nop (multi byte) */
6839
            gen_nop_modrm(s, modrm);
6840
            break;
6841
        }
6842
        break;
6843
    case 0x119 ... 0x11f: /* nop (multi byte) */
6844
        modrm = ldub_code(s->pc++);
6845
        gen_nop_modrm(s, modrm);
6846
        break;
6847
    case 0x120: /* mov reg, crN */
6848
    case 0x122: /* mov crN, reg */
6849
        if (s->cpl != 0) {
6850
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6851
        } else {
6852
            modrm = ldub_code(s->pc++);
6853
            if ((modrm & 0xc0) != 0xc0)
6854
                goto illegal_op;
6855
            rm = (modrm & 7) | REX_B(s);
6856
            reg = ((modrm >> 3) & 7) | rex_r;
6857
            if (CODE64(s))
6858
                ot = OT_QUAD;
6859
            else
6860
                ot = OT_LONG;
6861
            switch(reg) {
6862
            case 0:
6863
            case 2:
6864
            case 3:
6865
            case 4:
6866
            case 8:
6867
                if (s->cc_op != CC_OP_DYNAMIC)
6868
                    gen_op_set_cc_op(s->cc_op);
6869
                gen_jmp_im(pc_start - s->cs_base);
6870
                if (b & 2) {
6871
                    gen_op_mov_TN_reg(ot, 0, rm);
6872
                    tcg_gen_helper_0_2(helper_write_crN, 
6873
                                       tcg_const_i32(reg), cpu_T[0]);
6874
                    gen_jmp_im(s->pc - s->cs_base);
6875
                    gen_eob(s);
6876
                } else {
6877
                    tcg_gen_helper_1_1(helper_read_crN, 
6878
                                       cpu_T[0], tcg_const_i32(reg));
6879
                    gen_op_mov_reg_T0(ot, rm);
6880
                }
6881
                break;
6882
            default:
6883
                goto illegal_op;
6884
            }
6885
        }
6886
        break;
6887
    case 0x121: /* mov reg, drN */
6888
    case 0x123: /* mov drN, reg */
6889
        if (s->cpl != 0) {
6890
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6891
        } else {
6892
            modrm = ldub_code(s->pc++);
6893
            if ((modrm & 0xc0) != 0xc0)
6894
                goto illegal_op;
6895
            rm = (modrm & 7) | REX_B(s);
6896
            reg = ((modrm >> 3) & 7) | rex_r;
6897
            if (CODE64(s))
6898
                ot = OT_QUAD;
6899
            else
6900
                ot = OT_LONG;
6901
            /* XXX: do it dynamically with CR4.DE bit */
6902
            if (reg == 4 || reg == 5 || reg >= 8)
6903
                goto illegal_op;
6904
            if (b & 2) {
6905
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6906
                gen_op_mov_TN_reg(ot, 0, rm);
6907
                tcg_gen_helper_0_2(helper_movl_drN_T0,
6908
                                   tcg_const_i32(reg), cpu_T[0]);
6909
                gen_jmp_im(s->pc - s->cs_base);
6910
                gen_eob(s);
6911
            } else {
6912
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6913
                tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
6914
                gen_op_mov_reg_T0(ot, rm);
6915
            }
6916
        }
6917
        break;
6918
    case 0x106: /* clts */
6919
        if (s->cpl != 0) {
6920
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6921
        } else {
6922
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6923
            tcg_gen_helper_0_0(helper_clts);
6924
            /* abort block because static cpu state changed */
6925
            gen_jmp_im(s->pc - s->cs_base);
6926
            gen_eob(s);
6927
        }
6928
        break;
6929
    /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6930
    case 0x1c3: /* MOVNTI reg, mem */
6931
        if (!(s->cpuid_features & CPUID_SSE2))
6932
            goto illegal_op;
6933
        ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6934
        modrm = ldub_code(s->pc++);
6935
        mod = (modrm >> 6) & 3;
6936
        if (mod == 3)
6937
            goto illegal_op;
6938
        reg = ((modrm >> 3) & 7) | rex_r;
6939
        /* generate a generic store */
6940
        gen_ldst_modrm(s, modrm, ot, reg, 1);
6941
        break;
6942
    case 0x1ae:
6943
        modrm = ldub_code(s->pc++);
6944
        mod = (modrm >> 6) & 3;
6945
        op = (modrm >> 3) & 7;
6946
        switch(op) {
6947
        case 0: /* fxsave */
6948
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6949
                (s->flags & HF_EM_MASK))
6950
                goto illegal_op;
6951
            if (s->flags & HF_TS_MASK) {
6952
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6953
                break;
6954
            }
6955
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6956
            if (s->cc_op != CC_OP_DYNAMIC)
6957
                gen_op_set_cc_op(s->cc_op);
6958
            gen_jmp_im(pc_start - s->cs_base);
6959
            tcg_gen_helper_0_2(helper_fxsave, 
6960
                               cpu_A0, tcg_const_i32((s->dflag == 2)));
6961
            break;
6962
        case 1: /* fxrstor */
6963
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6964
                (s->flags & HF_EM_MASK))
6965
                goto illegal_op;
6966
            if (s->flags & HF_TS_MASK) {
6967
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6968
                break;
6969
            }
6970
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6971
            if (s->cc_op != CC_OP_DYNAMIC)
6972
                gen_op_set_cc_op(s->cc_op);
6973
            gen_jmp_im(pc_start - s->cs_base);
6974
            tcg_gen_helper_0_2(helper_fxrstor,
6975
                               cpu_A0, tcg_const_i32((s->dflag == 2)));
6976
            break;
6977
        case 2: /* ldmxcsr */
6978
        case 3: /* stmxcsr */
6979
            if (s->flags & HF_TS_MASK) {
6980
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6981
                break;
6982
            }
6983
            if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6984
                mod == 3)
6985
                goto illegal_op;
6986
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6987
            if (op == 2) {
6988
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6989
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
6990
            } else {
6991
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
6992
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
6993
            }
6994
            break;
6995
        case 5: /* lfence */
6996
        case 6: /* mfence */
6997
            if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6998
                goto illegal_op;
6999
            break;
7000
        case 7: /* sfence / clflush */
7001
            if ((modrm & 0xc7) == 0xc0) {
7002
                /* sfence */
7003
                /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
7004
                if (!(s->cpuid_features & CPUID_SSE))
7005
                    goto illegal_op;
7006
            } else {
7007
                /* clflush */
7008
                if (!(s->cpuid_features & CPUID_CLFLUSH))
7009
                    goto illegal_op;
7010
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7011
            }
7012
            break;
7013
        default:
7014
            goto illegal_op;
7015
        }
7016
        break;
7017
    case 0x10d: /* 3DNow! prefetch(w) */
7018
        modrm = ldub_code(s->pc++);
7019
        mod = (modrm >> 6) & 3;
7020
        if (mod == 3)
7021
            goto illegal_op;
7022
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7023
        /* ignore for now */
7024
        break;
7025
    case 0x1aa: /* rsm */
7026
        gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
7027
        if (!(s->flags & HF_SMM_MASK))
7028
            goto illegal_op;
7029
        if (s->cc_op != CC_OP_DYNAMIC) {
7030
            gen_op_set_cc_op(s->cc_op);
7031
            s->cc_op = CC_OP_DYNAMIC;
7032
        }
7033
        gen_jmp_im(s->pc - s->cs_base);
7034
        tcg_gen_helper_0_0(helper_rsm);
7035
        gen_eob(s);
7036
        break;
7037
    case 0x10e ... 0x10f:
7038
        /* 3DNow! instructions, ignore prefixes */
7039
        s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
7040
    case 0x110 ... 0x117:
7041
    case 0x128 ... 0x12f:
7042
    case 0x150 ... 0x177:
7043
    case 0x17c ... 0x17f:
7044
    case 0x1c2:
7045
    case 0x1c4 ... 0x1c6:
7046
    case 0x1d0 ... 0x1fe:
7047
        gen_sse(s, b, pc_start, rex_r);
7048
        break;
7049
    default:
7050
        goto illegal_op;
7051
    }
7052
    /* lock generation */
7053
    if (s->prefix & PREFIX_LOCK)
7054
        tcg_gen_helper_0_0(helper_unlock);
7055
    return s->pc;
7056
 illegal_op:
7057
    if (s->prefix & PREFIX_LOCK)
7058
        tcg_gen_helper_0_0(helper_unlock);
7059
    /* XXX: ensure that no lock was generated */
7060
    gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
7061
    return s->pc;
7062
}
7063

    
7064
void optimize_flags_init(void)
7065
{
7066
#if TCG_TARGET_REG_BITS == 32
7067
    assert(sizeof(CCTable) == (1 << 3));
7068
#else
7069
    assert(sizeof(CCTable) == (1 << 4));
7070
#endif
7071
    cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
7072
    cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
7073
                                   TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
7074
    cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
7075
                                    TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
7076
    cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
7077
                                    TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
7078
    cpu_cc_tmp = tcg_global_mem_new(TCG_TYPE_TL,
7079
                                    TCG_AREG0, offsetof(CPUState, cc_tmp), "cc_tmp");
7080

    
7081
    /* register helpers */
7082

    
7083
#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
7084
#include "helper.h"
7085
}
7086

    
7087
/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
7088
   basic block 'tb'. If search_pc is TRUE, also generate PC
7089
   information for each intermediate instruction. */
7090
static inline int gen_intermediate_code_internal(CPUState *env,
7091
                                                 TranslationBlock *tb,
7092
                                                 int search_pc)
7093
{
7094
    DisasContext dc1, *dc = &dc1;
7095
    target_ulong pc_ptr;
7096
    uint16_t *gen_opc_end;
7097
    int j, lj, cflags;
7098
    uint64_t flags;
7099
    target_ulong pc_start;
7100
    target_ulong cs_base;
7101

    
7102
    /* generate intermediate code */
7103
    pc_start = tb->pc;
7104
    cs_base = tb->cs_base;
7105
    flags = tb->flags;
7106
    cflags = tb->cflags;
7107

    
7108
    dc->pe = (flags >> HF_PE_SHIFT) & 1;
7109
    dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
7110
    dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
7111
    dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
7112
    dc->f_st = 0;
7113
    dc->vm86 = (flags >> VM_SHIFT) & 1;
7114
    dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
7115
    dc->iopl = (flags >> IOPL_SHIFT) & 3;
7116
    dc->tf = (flags >> TF_SHIFT) & 1;
7117
    dc->singlestep_enabled = env->singlestep_enabled;
7118
    dc->cc_op = CC_OP_DYNAMIC;
7119
    dc->cs_base = cs_base;
7120
    dc->tb = tb;
7121
    dc->popl_esp_hack = 0;
7122
    /* select memory access functions */
7123
    dc->mem_index = 0;
7124
    if (flags & HF_SOFTMMU_MASK) {
7125
        if (dc->cpl == 3)
7126
            dc->mem_index = 2 * 4;
7127
        else
7128
            dc->mem_index = 1 * 4;
7129
    }
7130
    dc->cpuid_features = env->cpuid_features;
7131
    dc->cpuid_ext_features = env->cpuid_ext_features;
7132
    dc->cpuid_ext2_features = env->cpuid_ext2_features;
7133
    dc->cpuid_ext3_features = env->cpuid_ext3_features;
7134
#ifdef TARGET_X86_64
7135
    dc->lma = (flags >> HF_LMA_SHIFT) & 1;
7136
    dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
7137
#endif
7138
    dc->flags = flags;
7139
    dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
7140
                    (flags & HF_INHIBIT_IRQ_MASK)
7141
#ifndef CONFIG_SOFTMMU
7142
                    || (flags & HF_SOFTMMU_MASK)
7143
#endif
7144
                    );
7145
#if 0
7146
    /* check addseg logic */
7147
    if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
7148
        printf("ERROR addseg\n");
7149
#endif
7150

    
7151
    cpu_T[0] = tcg_temp_new(TCG_TYPE_TL);
7152
    cpu_T[1] = tcg_temp_new(TCG_TYPE_TL);
7153
    cpu_A0 = tcg_temp_new(TCG_TYPE_TL);
7154
    cpu_T3 = tcg_temp_new(TCG_TYPE_TL);
7155

    
7156
    cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
7157
    cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
7158
    cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
7159
    cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
7160
    cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
7161
    cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
7162
    cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
7163
    cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
7164
    cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
7165

    
7166
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
7167

    
7168
    dc->is_jmp = DISAS_NEXT;
7169
    pc_ptr = pc_start;
7170
    lj = -1;
7171

    
7172
    for(;;) {
7173
        if (env->nb_breakpoints > 0) {
7174
            for(j = 0; j < env->nb_breakpoints; j++) {
7175
                if (env->breakpoints[j] == pc_ptr) {
7176
                    gen_debug(dc, pc_ptr - dc->cs_base);
7177
                    break;
7178
                }
7179
            }
7180
        }
7181
        if (search_pc) {
7182
            j = gen_opc_ptr - gen_opc_buf;
7183
            if (lj < j) {
7184
                lj++;
7185
                while (lj < j)
7186
                    gen_opc_instr_start[lj++] = 0;
7187
            }
7188
            gen_opc_pc[lj] = pc_ptr;
7189
            gen_opc_cc_op[lj] = dc->cc_op;
7190
            gen_opc_instr_start[lj] = 1;
7191
        }
7192
        pc_ptr = disas_insn(dc, pc_ptr);
7193
        /* stop translation if indicated */
7194
        if (dc->is_jmp)
7195
            break;
7196
        /* if single step mode, we generate only one instruction and
7197
           generate an exception */
7198
        /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
7199
           the flag and abort the translation to give the irqs a
7200
           change to be happen */
7201
        if (dc->tf || dc->singlestep_enabled ||
7202
            (flags & HF_INHIBIT_IRQ_MASK) ||
7203
            (cflags & CF_SINGLE_INSN)) {
7204
            gen_jmp_im(pc_ptr - dc->cs_base);
7205
            gen_eob(dc);
7206
            break;
7207
        }
7208
        /* if too long translation, stop generation too */
7209
        if (gen_opc_ptr >= gen_opc_end ||
7210
            (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
7211
            gen_jmp_im(pc_ptr - dc->cs_base);
7212
            gen_eob(dc);
7213
            break;
7214
        }
7215
    }
7216
    *gen_opc_ptr = INDEX_op_end;
7217
    /* we don't forget to fill the last values */
7218
    if (search_pc) {
7219
        j = gen_opc_ptr - gen_opc_buf;
7220
        lj++;
7221
        while (lj <= j)
7222
            gen_opc_instr_start[lj++] = 0;
7223
    }
7224

    
7225
#ifdef DEBUG_DISAS
7226
    if (loglevel & CPU_LOG_TB_CPU) {
7227
        cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
7228
    }
7229
    if (loglevel & CPU_LOG_TB_IN_ASM) {
7230
        int disas_flags;
7231
        fprintf(logfile, "----------------\n");
7232
        fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
7233
#ifdef TARGET_X86_64
7234
        if (dc->code64)
7235
            disas_flags = 2;
7236
        else
7237
#endif
7238
            disas_flags = !dc->code32;
7239
        target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
7240
        fprintf(logfile, "\n");
7241
    }
7242
#endif
7243

    
7244
    if (!search_pc)
7245
        tb->size = pc_ptr - pc_start;
7246
    return 0;
7247
}
7248

    
7249
int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
7250
{
7251
    return gen_intermediate_code_internal(env, tb, 0);
7252
}
7253

    
7254
int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
7255
{
7256
    return gen_intermediate_code_internal(env, tb, 1);
7257
}
7258

    
7259
void gen_pc_load(CPUState *env, TranslationBlock *tb,
7260
                unsigned long searched_pc, int pc_pos, void *puc)
7261
{
7262
    int cc_op;
7263
#ifdef DEBUG_DISAS
7264
    if (loglevel & CPU_LOG_TB_OP) {
7265
        int i;
7266
        fprintf(logfile, "RESTORE:\n");
7267
        for(i = 0;i <= pc_pos; i++) {
7268
            if (gen_opc_instr_start[i]) {
7269
                fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7270
            }
7271
        }
7272
        fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7273
                searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7274
                (uint32_t)tb->cs_base);
7275
    }
7276
#endif
7277
    env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7278
    cc_op = gen_opc_cc_op[pc_pos];
7279
    if (cc_op != CC_OP_DYNAMIC)
7280
        env->cc_op = cc_op;
7281
}