Statistics
| Branch: | Revision:

root / target-i386 / translate.c @ 12e26b75

History | View | Annotate | Download (234.9 kB)

1
/*
2
 *  i386 translation
3
 *
4
 *  Copyright (c) 2003 Fabrice Bellard
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20
#include <stdarg.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23
#include <string.h>
24
#include <inttypes.h>
25
#include <signal.h>
26
#include <assert.h>
27

    
28
#include "cpu.h"
29
#include "exec-all.h"
30
#include "disas.h"
31
#include "helper.h"
32
#include "tcg-op.h"
33

    
34
#define PREFIX_REPZ   0x01
35
#define PREFIX_REPNZ  0x02
36
#define PREFIX_LOCK   0x04
37
#define PREFIX_DATA   0x08
38
#define PREFIX_ADR    0x10
39

    
40
#ifdef TARGET_X86_64
41
#define X86_64_ONLY(x) x
42
#define X86_64_DEF(x...) x
43
#define CODE64(s) ((s)->code64)
44
#define REX_X(s) ((s)->rex_x)
45
#define REX_B(s) ((s)->rex_b)
46
/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
47
#if 1
48
#define BUGGY_64(x) NULL
49
#endif
50
#else
51
#define X86_64_ONLY(x) NULL
52
#define X86_64_DEF(x...)
53
#define CODE64(s) 0
54
#define REX_X(s) 0
55
#define REX_B(s) 0
56
#endif
57

    
58
//#define MACRO_TEST   1
59

    
60
/* global register indexes */
61
static TCGv cpu_env, cpu_T[2], cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst;
62
static TCGv cpu_T3;
63
/* local register indexes (only used inside old micro ops) */
64
static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
65
static TCGv cpu_tmp5, cpu_tmp6;
66

    
67
#ifdef TARGET_X86_64
68
static int x86_64_hregs;
69
#endif
70

    
71
typedef struct DisasContext {
72
    /* current insn context */
73
    int override; /* -1 if no override */
74
    int prefix;
75
    int aflag, dflag;
76
    target_ulong pc; /* pc = eip + cs_base */
77
    int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
78
                   static state change (stop translation) */
79
    /* current block context */
80
    target_ulong cs_base; /* base of CS segment */
81
    int pe;     /* protected mode */
82
    int code32; /* 32 bit code segment */
83
#ifdef TARGET_X86_64
84
    int lma;    /* long mode active */
85
    int code64; /* 64 bit code segment */
86
    int rex_x, rex_b;
87
#endif
88
    int ss32;   /* 32 bit stack segment */
89
    int cc_op;  /* current CC operation */
90
    int addseg; /* non zero if either DS/ES/SS have a non zero base */
91
    int f_st;   /* currently unused */
92
    int vm86;   /* vm86 mode */
93
    int cpl;
94
    int iopl;
95
    int tf;     /* TF cpu flag */
96
    int singlestep_enabled; /* "hardware" single step enabled */
97
    int jmp_opt; /* use direct block chaining for direct jumps */
98
    int mem_index; /* select memory access functions */
99
    uint64_t flags; /* all execution flags */
100
    struct TranslationBlock *tb;
101
    int popl_esp_hack; /* for correct popl with esp base handling */
102
    int rip_offset; /* only used in x86_64, but left for simplicity */
103
    int cpuid_features;
104
    int cpuid_ext_features;
105
    int cpuid_ext2_features;
106
    int cpuid_ext3_features;
107
} DisasContext;
108

    
109
static void gen_eob(DisasContext *s);
110
static void gen_jmp(DisasContext *s, target_ulong eip);
111
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
112

    
113
/* i386 arith/logic operations */
114
enum {
115
    OP_ADDL,
116
    OP_ORL,
117
    OP_ADCL,
118
    OP_SBBL,
119
    OP_ANDL,
120
    OP_SUBL,
121
    OP_XORL,
122
    OP_CMPL,
123
};
124

    
125
/* i386 shift ops */
126
enum {
127
    OP_ROL,
128
    OP_ROR,
129
    OP_RCL,
130
    OP_RCR,
131
    OP_SHL,
132
    OP_SHR,
133
    OP_SHL1, /* undocumented */
134
    OP_SAR = 7,
135
};
136

    
137
enum {
138
    JCC_O,
139
    JCC_B,
140
    JCC_Z,
141
    JCC_BE,
142
    JCC_S,
143
    JCC_P,
144
    JCC_L,
145
    JCC_LE,
146
};
147

    
148
/* operand size */
149
enum {
150
    OT_BYTE = 0,
151
    OT_WORD,
152
    OT_LONG,
153
    OT_QUAD,
154
};
155

    
156
enum {
157
    /* I386 int registers */
158
    OR_EAX,   /* MUST be even numbered */
159
    OR_ECX,
160
    OR_EDX,
161
    OR_EBX,
162
    OR_ESP,
163
    OR_EBP,
164
    OR_ESI,
165
    OR_EDI,
166

    
167
    OR_TMP0 = 16,    /* temporary operand register */
168
    OR_TMP1,
169
    OR_A0, /* temporary register used when doing address evaluation */
170
};
171

    
172
static inline void gen_op_movl_T0_0(void)
173
{
174
    tcg_gen_movi_tl(cpu_T[0], 0);
175
}
176

    
177
static inline void gen_op_movl_T0_im(int32_t val)
178
{
179
    tcg_gen_movi_tl(cpu_T[0], val);
180
}
181

    
182
static inline void gen_op_movl_T0_imu(uint32_t val)
183
{
184
    tcg_gen_movi_tl(cpu_T[0], val);
185
}
186

    
187
static inline void gen_op_movl_T1_im(int32_t val)
188
{
189
    tcg_gen_movi_tl(cpu_T[1], val);
190
}
191

    
192
static inline void gen_op_movl_T1_imu(uint32_t val)
193
{
194
    tcg_gen_movi_tl(cpu_T[1], val);
195
}
196

    
197
static inline void gen_op_movl_A0_im(uint32_t val)
198
{
199
    tcg_gen_movi_tl(cpu_A0, val);
200
}
201

    
202
#ifdef TARGET_X86_64
203
static inline void gen_op_movq_A0_im(int64_t val)
204
{
205
    tcg_gen_movi_tl(cpu_A0, val);
206
}
207
#endif
208

    
209
static inline void gen_movtl_T0_im(target_ulong val)
210
{
211
    tcg_gen_movi_tl(cpu_T[0], val);
212
}
213

    
214
static inline void gen_movtl_T1_im(target_ulong val)
215
{
216
    tcg_gen_movi_tl(cpu_T[1], val);
217
}
218

    
219
static inline void gen_op_andl_T0_ffff(void)
220
{
221
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
222
}
223

    
224
static inline void gen_op_andl_T0_im(uint32_t val)
225
{
226
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
227
}
228

    
229
static inline void gen_op_movl_T0_T1(void)
230
{
231
    tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
232
}
233

    
234
static inline void gen_op_andl_A0_ffff(void)
235
{
236
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
237
}
238

    
239
#ifdef TARGET_X86_64
240

    
241
#define NB_OP_SIZES 4
242

    
243
#else /* !TARGET_X86_64 */
244

    
245
#define NB_OP_SIZES 3
246

    
247
#endif /* !TARGET_X86_64 */
248

    
249
#if defined(WORDS_BIGENDIAN)
250
#define REG_B_OFFSET (sizeof(target_ulong) - 1)
251
#define REG_H_OFFSET (sizeof(target_ulong) - 2)
252
#define REG_W_OFFSET (sizeof(target_ulong) - 2)
253
#define REG_L_OFFSET (sizeof(target_ulong) - 4)
254
#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
255
#else
256
#define REG_B_OFFSET 0
257
#define REG_H_OFFSET 1
258
#define REG_W_OFFSET 0
259
#define REG_L_OFFSET 0
260
#define REG_LH_OFFSET 4
261
#endif
262

    
263
static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
264
{
265
    switch(ot) {
266
    case OT_BYTE:
267
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
268
            tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
269
        } else {
270
            tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
271
        }
272
        break;
273
    case OT_WORD:
274
        tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
275
        break;
276
#ifdef TARGET_X86_64
277
    case OT_LONG:
278
        tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
279
        /* high part of register set to zero */
280
        tcg_gen_movi_tl(cpu_tmp0, 0);
281
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
282
        break;
283
    default:
284
    case OT_QUAD:
285
        tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
286
        break;
287
#else
288
    default:
289
    case OT_LONG:
290
        tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
291
        break;
292
#endif
293
    }
294
}
295

    
296
static inline void gen_op_mov_reg_T0(int ot, int reg)
297
{
298
    gen_op_mov_reg_TN(ot, 0, reg);
299
}
300

    
301
static inline void gen_op_mov_reg_T1(int ot, int reg)
302
{
303
    gen_op_mov_reg_TN(ot, 1, reg);
304
}
305

    
306
static inline void gen_op_mov_reg_A0(int size, int reg)
307
{
308
    switch(size) {
309
    case 0:
310
        tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
311
        break;
312
#ifdef TARGET_X86_64
313
    case 1:
314
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
315
        /* high part of register set to zero */
316
        tcg_gen_movi_tl(cpu_tmp0, 0);
317
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
318
        break;
319
    default:
320
    case 2:
321
        tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
322
        break;
323
#else
324
    default:
325
    case 1:
326
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
327
        break;
328
#endif
329
    }
330
}
331

    
332
static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
333
{
334
    switch(ot) {
335
    case OT_BYTE:
336
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
337
            goto std_case;
338
        } else {
339
            tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
340
        }
341
        break;
342
    default:
343
    std_case:
344
        tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
345
        break;
346
    }
347
}
348

    
349
static inline void gen_op_movl_A0_reg(int reg)
350
{
351
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
352
}
353

    
354
static inline void gen_op_addl_A0_im(int32_t val)
355
{
356
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
357
#ifdef TARGET_X86_64
358
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
359
#endif
360
}
361

    
362
#ifdef TARGET_X86_64
363
static inline void gen_op_addq_A0_im(int64_t val)
364
{
365
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
366
}
367
#endif
368
    
369
static void gen_add_A0_im(DisasContext *s, int val)
370
{
371
#ifdef TARGET_X86_64
372
    if (CODE64(s))
373
        gen_op_addq_A0_im(val);
374
    else
375
#endif
376
        gen_op_addl_A0_im(val);
377
}
378

    
379
static inline void gen_op_addl_T0_T1(void)
380
{
381
    tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
382
}
383

    
384
static inline void gen_op_jmp_T0(void)
385
{
386
    tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
387
}
388

    
389
static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
390
{
391
    switch(size) {
392
    case 0:
393
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
394
        tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
395
        tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
396
        break;
397
    case 1:
398
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
399
        tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
400
#ifdef TARGET_X86_64
401
        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
402
#endif
403
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
404
        break;
405
#ifdef TARGET_X86_64
406
    case 2:
407
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
408
        tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
409
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
410
        break;
411
#endif
412
    }
413
}
414

    
415
static inline void gen_op_add_reg_T0(int size, int reg)
416
{
417
    switch(size) {
418
    case 0:
419
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
420
        tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
421
        tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
422
        break;
423
    case 1:
424
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
425
        tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
426
#ifdef TARGET_X86_64
427
        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
428
#endif
429
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
430
        break;
431
#ifdef TARGET_X86_64
432
    case 2:
433
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
434
        tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
435
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
436
        break;
437
#endif
438
    }
439
}
440

    
441
static inline void gen_op_set_cc_op(int32_t val)
442
{
443
    tcg_gen_movi_i32(cpu_cc_op, val);
444
}
445

    
446
static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
447
{
448
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
449
    if (shift != 0) 
450
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
451
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
452
#ifdef TARGET_X86_64
453
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
454
#endif
455
}
456

    
457
static inline void gen_op_movl_A0_seg(int reg)
458
{
459
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
460
}
461

    
462
static inline void gen_op_addl_A0_seg(int reg)
463
{
464
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
465
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
466
#ifdef TARGET_X86_64
467
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
468
#endif
469
}
470

    
471
#ifdef TARGET_X86_64
472
static inline void gen_op_movq_A0_seg(int reg)
473
{
474
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
475
}
476

    
477
static inline void gen_op_addq_A0_seg(int reg)
478
{
479
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
480
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
481
}
482

    
483
static inline void gen_op_movq_A0_reg(int reg)
484
{
485
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
486
}
487

    
488
static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
489
{
490
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
491
    if (shift != 0) 
492
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
493
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
494
}
495
#endif
496

    
497
static inline void gen_op_lds_T0_A0(int idx)
498
{
499
    int mem_index = (idx >> 2) - 1;
500
    switch(idx & 3) {
501
    case 0:
502
        tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
503
        break;
504
    case 1:
505
        tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
506
        break;
507
    default:
508
    case 2:
509
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
510
        break;
511
    }
512
}
513

    
514
/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
515
static inline void gen_op_ld_T0_A0(int idx)
516
{
517
    int mem_index = (idx >> 2) - 1;
518
    switch(idx & 3) {
519
    case 0:
520
        tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
521
        break;
522
    case 1:
523
        tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
524
        break;
525
    case 2:
526
        tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
527
        break;
528
    default:
529
    case 3:
530
        tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
531
        break;
532
    }
533
}
534

    
535
static inline void gen_op_ldu_T0_A0(int idx)
536
{
537
    gen_op_ld_T0_A0(idx);
538
}
539

    
540
static inline void gen_op_ld_T1_A0(int idx)
541
{
542
    int mem_index = (idx >> 2) - 1;
543
    switch(idx & 3) {
544
    case 0:
545
        tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
546
        break;
547
    case 1:
548
        tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
549
        break;
550
    case 2:
551
        tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
552
        break;
553
    default:
554
    case 3:
555
        tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
556
        break;
557
    }
558
}
559

    
560
static inline void gen_op_st_T0_A0(int idx)
561
{
562
    int mem_index = (idx >> 2) - 1;
563
    switch(idx & 3) {
564
    case 0:
565
        tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
566
        break;
567
    case 1:
568
        tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
569
        break;
570
    case 2:
571
        tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
572
        break;
573
    default:
574
    case 3:
575
        tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
576
        break;
577
    }
578
}
579

    
580
static inline void gen_op_st_T1_A0(int idx)
581
{
582
    int mem_index = (idx >> 2) - 1;
583
    switch(idx & 3) {
584
    case 0:
585
        tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
586
        break;
587
    case 1:
588
        tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
589
        break;
590
    case 2:
591
        tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
592
        break;
593
    default:
594
    case 3:
595
        tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
596
        break;
597
    }
598
}
599

    
600
static inline void gen_jmp_im(target_ulong pc)
601
{
602
    tcg_gen_movi_tl(cpu_tmp0, pc);
603
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
604
}
605

    
606
static inline void gen_string_movl_A0_ESI(DisasContext *s)
607
{
608
    int override;
609

    
610
    override = s->override;
611
#ifdef TARGET_X86_64
612
    if (s->aflag == 2) {
613
        if (override >= 0) {
614
            gen_op_movq_A0_seg(override);
615
            gen_op_addq_A0_reg_sN(0, R_ESI);
616
        } else {
617
            gen_op_movq_A0_reg(R_ESI);
618
        }
619
    } else
620
#endif
621
    if (s->aflag) {
622
        /* 32 bit address */
623
        if (s->addseg && override < 0)
624
            override = R_DS;
625
        if (override >= 0) {
626
            gen_op_movl_A0_seg(override);
627
            gen_op_addl_A0_reg_sN(0, R_ESI);
628
        } else {
629
            gen_op_movl_A0_reg(R_ESI);
630
        }
631
    } else {
632
        /* 16 address, always override */
633
        if (override < 0)
634
            override = R_DS;
635
        gen_op_movl_A0_reg(R_ESI);
636
        gen_op_andl_A0_ffff();
637
        gen_op_addl_A0_seg(override);
638
    }
639
}
640

    
641
static inline void gen_string_movl_A0_EDI(DisasContext *s)
642
{
643
#ifdef TARGET_X86_64
644
    if (s->aflag == 2) {
645
        gen_op_movq_A0_reg(R_EDI);
646
    } else
647
#endif
648
    if (s->aflag) {
649
        if (s->addseg) {
650
            gen_op_movl_A0_seg(R_ES);
651
            gen_op_addl_A0_reg_sN(0, R_EDI);
652
        } else {
653
            gen_op_movl_A0_reg(R_EDI);
654
        }
655
    } else {
656
        gen_op_movl_A0_reg(R_EDI);
657
        gen_op_andl_A0_ffff();
658
        gen_op_addl_A0_seg(R_ES);
659
    }
660
}
661

    
662
static inline void gen_op_movl_T0_Dshift(int ot) 
663
{
664
    tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
665
    tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
666
};
667

    
668
static void gen_extu(int ot, TCGv reg)
669
{
670
    switch(ot) {
671
    case OT_BYTE:
672
        tcg_gen_ext8u_tl(reg, reg);
673
        break;
674
    case OT_WORD:
675
        tcg_gen_ext16u_tl(reg, reg);
676
        break;
677
    case OT_LONG:
678
        tcg_gen_ext32u_tl(reg, reg);
679
        break;
680
    default:
681
        break;
682
    }
683
}
684

    
685
static void gen_exts(int ot, TCGv reg)
686
{
687
    switch(ot) {
688
    case OT_BYTE:
689
        tcg_gen_ext8s_tl(reg, reg);
690
        break;
691
    case OT_WORD:
692
        tcg_gen_ext16s_tl(reg, reg);
693
        break;
694
    case OT_LONG:
695
        tcg_gen_ext32s_tl(reg, reg);
696
        break;
697
    default:
698
        break;
699
    }
700
}
701

    
702
static inline void gen_op_jnz_ecx(int size, int label1)
703
{
704
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
705
    gen_extu(size + 1, cpu_tmp0);
706
    tcg_gen_brcond_tl(TCG_COND_NE, cpu_tmp0, tcg_const_tl(0), label1);
707
}
708

    
709
static inline void gen_op_jz_ecx(int size, int label1)
710
{
711
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
712
    gen_extu(size + 1, cpu_tmp0);
713
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_tmp0, tcg_const_tl(0), label1);
714
}
715

    
716
static void *helper_in_func[3] = {
717
    helper_inb,
718
    helper_inw,
719
    helper_inl,
720
};
721

    
722
static void *helper_out_func[3] = {
723
    helper_outb,
724
    helper_outw,
725
    helper_outl,
726
};
727

    
728
static void *gen_check_io_func[3] = {
729
    helper_check_iob,
730
    helper_check_iow,
731
    helper_check_iol,
732
};
733

    
734
static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
735
                         uint32_t svm_flags)
736
{
737
    int state_saved;
738
    target_ulong next_eip;
739

    
740
    state_saved = 0;
741
    if (s->pe && (s->cpl > s->iopl || s->vm86)) {
742
        if (s->cc_op != CC_OP_DYNAMIC)
743
            gen_op_set_cc_op(s->cc_op);
744
        gen_jmp_im(cur_eip);
745
        state_saved = 1;
746
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
747
        tcg_gen_helper_0_1(gen_check_io_func[ot],
748
                           cpu_tmp2_i32);
749
    }
750
    if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
751
        if (!state_saved) {
752
            if (s->cc_op != CC_OP_DYNAMIC)
753
                gen_op_set_cc_op(s->cc_op);
754
            gen_jmp_im(cur_eip);
755
            state_saved = 1;
756
        }
757
        svm_flags |= (1 << (4 + ot));
758
        next_eip = s->pc - s->cs_base;
759
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
760
        tcg_gen_helper_0_3(helper_svm_check_io,
761
                           cpu_tmp2_i32,
762
                           tcg_const_i32(svm_flags),
763
                           tcg_const_i32(next_eip - cur_eip));
764
    }
765
}
766

    
767
static inline void gen_movs(DisasContext *s, int ot)
768
{
769
    gen_string_movl_A0_ESI(s);
770
    gen_op_ld_T0_A0(ot + s->mem_index);
771
    gen_string_movl_A0_EDI(s);
772
    gen_op_st_T0_A0(ot + s->mem_index);
773
    gen_op_movl_T0_Dshift(ot);
774
    gen_op_add_reg_T0(s->aflag, R_ESI);
775
    gen_op_add_reg_T0(s->aflag, R_EDI);
776
}
777

    
778
static inline void gen_update_cc_op(DisasContext *s)
779
{
780
    if (s->cc_op != CC_OP_DYNAMIC) {
781
        gen_op_set_cc_op(s->cc_op);
782
        s->cc_op = CC_OP_DYNAMIC;
783
    }
784
}
785

    
786
static void gen_op_update1_cc(void)
787
{
788
    tcg_gen_discard_tl(cpu_cc_src);
789
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
790
}
791

    
792
static void gen_op_update2_cc(void)
793
{
794
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
795
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
796
}
797

    
798
static inline void gen_op_cmpl_T0_T1_cc(void)
799
{
800
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
801
    tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
802
}
803

    
804
static inline void gen_op_testl_T0_T1_cc(void)
805
{
806
    tcg_gen_discard_tl(cpu_cc_src);
807
    tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
808
}
809

    
810
static void gen_op_update_neg_cc(void)
811
{
812
    tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
813
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
814
}
815

    
816
/* compute eflags.C to reg */
817
static void gen_compute_eflags_c(TCGv reg)
818
{
819
#if TCG_TARGET_REG_BITS == 32
820
    tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
821
    tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 
822
                     (long)cc_table + offsetof(CCTable, compute_c));
823
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
824
    tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE, 
825
                 1, &cpu_tmp2_i32, 0, NULL);
826
#else
827
    tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
828
    tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
829
    tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64, 
830
                     (long)cc_table + offsetof(CCTable, compute_c));
831
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
832
    tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE, 
833
                 1, &cpu_tmp2_i32, 0, NULL);
834
#endif
835
    tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
836
}
837

    
838
/* compute all eflags to cc_src */
839
static void gen_compute_eflags(TCGv reg)
840
{
841
#if TCG_TARGET_REG_BITS == 32
842
    tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
843
    tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 
844
                     (long)cc_table + offsetof(CCTable, compute_all));
845
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
846
    tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE, 
847
                 1, &cpu_tmp2_i32, 0, NULL);
848
#else
849
    tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
850
    tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
851
    tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64, 
852
                     (long)cc_table + offsetof(CCTable, compute_all));
853
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
854
    tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE, 
855
                 1, &cpu_tmp2_i32, 0, NULL);
856
#endif
857
    tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
858
}
859

    
860
static inline void gen_setcc_slow_T0(int op)
861
{
862
    switch(op) {
863
    case JCC_O:
864
        gen_compute_eflags(cpu_T[0]);
865
        tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
866
        tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
867
        break;
868
    case JCC_B:
869
        gen_compute_eflags_c(cpu_T[0]);
870
        break;
871
    case JCC_Z:
872
        gen_compute_eflags(cpu_T[0]);
873
        tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
874
        tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
875
        break;
876
    case JCC_BE:
877
        gen_compute_eflags(cpu_tmp0);
878
        tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
879
        tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
880
        tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
881
        break;
882
    case JCC_S:
883
        gen_compute_eflags(cpu_T[0]);
884
        tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
885
        tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
886
        break;
887
    case JCC_P:
888
        gen_compute_eflags(cpu_T[0]);
889
        tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
890
        tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
891
        break;
892
    case JCC_L:
893
        gen_compute_eflags(cpu_tmp0);
894
        tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
895
        tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
896
        tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
897
        tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
898
        break;
899
    default:
900
    case JCC_LE:
901
        gen_compute_eflags(cpu_tmp0);
902
        tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
903
        tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
904
        tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
905
        tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
906
        tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
907
        tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
908
        break;
909
    }
910
}
911

    
912
/* return true if setcc_slow is not needed (WARNING: must be kept in
913
   sync with gen_jcc1) */
914
static int is_fast_jcc_case(DisasContext *s, int b)
915
{
916
    int jcc_op;
917
    jcc_op = (b >> 1) & 7;
918
    switch(s->cc_op) {
919
        /* we optimize the cmp/jcc case */
920
    case CC_OP_SUBB:
921
    case CC_OP_SUBW:
922
    case CC_OP_SUBL:
923
    case CC_OP_SUBQ:
924
        if (jcc_op == JCC_O || jcc_op == JCC_P)
925
            goto slow_jcc;
926
        break;
927

    
928
        /* some jumps are easy to compute */
929
    case CC_OP_ADDB:
930
    case CC_OP_ADDW:
931
    case CC_OP_ADDL:
932
    case CC_OP_ADDQ:
933

    
934
    case CC_OP_LOGICB:
935
    case CC_OP_LOGICW:
936
    case CC_OP_LOGICL:
937
    case CC_OP_LOGICQ:
938

    
939
    case CC_OP_INCB:
940
    case CC_OP_INCW:
941
    case CC_OP_INCL:
942
    case CC_OP_INCQ:
943

    
944
    case CC_OP_DECB:
945
    case CC_OP_DECW:
946
    case CC_OP_DECL:
947
    case CC_OP_DECQ:
948

    
949
    case CC_OP_SHLB:
950
    case CC_OP_SHLW:
951
    case CC_OP_SHLL:
952
    case CC_OP_SHLQ:
953
        if (jcc_op != JCC_Z && jcc_op != JCC_S)
954
            goto slow_jcc;
955
        break;
956
    default:
957
    slow_jcc:
958
        return 0;
959
    }
960
    return 1;
961
}
962

    
963
/* generate a conditional jump to label 'l1' according to jump opcode
964
   value 'b'. In the fast case, T0 is guaranted not to be used. */
965
static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
966
{
967
    int inv, jcc_op, size, cond;
968
    TCGv t0;
969

    
970
    inv = b & 1;
971
    jcc_op = (b >> 1) & 7;
972

    
973
    switch(cc_op) {
974
        /* we optimize the cmp/jcc case */
975
    case CC_OP_SUBB:
976
    case CC_OP_SUBW:
977
    case CC_OP_SUBL:
978
    case CC_OP_SUBQ:
979
        
980
        size = cc_op - CC_OP_SUBB;
981
        switch(jcc_op) {
982
        case JCC_Z:
983
        fast_jcc_z:
984
            switch(size) {
985
            case 0:
986
                tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
987
                t0 = cpu_tmp0;
988
                break;
989
            case 1:
990
                tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
991
                t0 = cpu_tmp0;
992
                break;
993
#ifdef TARGET_X86_64
994
            case 2:
995
                tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
996
                t0 = cpu_tmp0;
997
                break;
998
#endif
999
            default:
1000
                t0 = cpu_cc_dst;
1001
                break;
1002
            }
1003
            tcg_gen_brcond_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 
1004
                              tcg_const_tl(0), l1);
1005
            break;
1006
        case JCC_S:
1007
        fast_jcc_s:
1008
            switch(size) {
1009
            case 0:
1010
                tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
1011
                tcg_gen_brcond_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0, 
1012
                                  tcg_const_tl(0), l1);
1013
                break;
1014
            case 1:
1015
                tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1016
                tcg_gen_brcond_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0, 
1017
                                  tcg_const_tl(0), l1);
1018
                break;
1019
#ifdef TARGET_X86_64
1020
            case 2:
1021
                tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1022
                tcg_gen_brcond_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0, 
1023
                                  tcg_const_tl(0), l1);
1024
                break;
1025
#endif
1026
            default:
1027
                tcg_gen_brcond_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst, 
1028
                                  tcg_const_tl(0), l1);
1029
                break;
1030
            }
1031
            break;
1032
            
1033
        case JCC_B:
1034
            cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1035
            goto fast_jcc_b;
1036
        case JCC_BE:
1037
            cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1038
        fast_jcc_b:
1039
            tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1040
            switch(size) {
1041
            case 0:
1042
                t0 = cpu_tmp0;
1043
                tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1044
                tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1045
                break;
1046
            case 1:
1047
                t0 = cpu_tmp0;
1048
                tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1049
                tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1050
                break;
1051
#ifdef TARGET_X86_64
1052
            case 2:
1053
                t0 = cpu_tmp0;
1054
                tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1055
                tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1056
                break;
1057
#endif
1058
            default:
1059
                t0 = cpu_cc_src;
1060
                break;
1061
            }
1062
            tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1063
            break;
1064
            
1065
        case JCC_L:
1066
            cond = inv ? TCG_COND_GE : TCG_COND_LT;
1067
            goto fast_jcc_l;
1068
        case JCC_LE:
1069
            cond = inv ? TCG_COND_GT : TCG_COND_LE;
1070
        fast_jcc_l:
1071
            tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1072
            switch(size) {
1073
            case 0:
1074
                t0 = cpu_tmp0;
1075
                tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1076
                tcg_gen_ext8s_tl(t0, cpu_cc_src);
1077
                break;
1078
            case 1:
1079
                t0 = cpu_tmp0;
1080
                tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1081
                tcg_gen_ext16s_tl(t0, cpu_cc_src);
1082
                break;
1083
#ifdef TARGET_X86_64
1084
            case 2:
1085
                t0 = cpu_tmp0;
1086
                tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1087
                tcg_gen_ext32s_tl(t0, cpu_cc_src);
1088
                break;
1089
#endif
1090
            default:
1091
                t0 = cpu_cc_src;
1092
                break;
1093
            }
1094
            tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1095
            break;
1096
            
1097
        default:
1098
            goto slow_jcc;
1099
        }
1100
        break;
1101
        
1102
        /* some jumps are easy to compute */
1103
    case CC_OP_ADDB:
1104
    case CC_OP_ADDW:
1105
    case CC_OP_ADDL:
1106
    case CC_OP_ADDQ:
1107
        
1108
    case CC_OP_ADCB:
1109
    case CC_OP_ADCW:
1110
    case CC_OP_ADCL:
1111
    case CC_OP_ADCQ:
1112
        
1113
    case CC_OP_SBBB:
1114
    case CC_OP_SBBW:
1115
    case CC_OP_SBBL:
1116
    case CC_OP_SBBQ:
1117
        
1118
    case CC_OP_LOGICB:
1119
    case CC_OP_LOGICW:
1120
    case CC_OP_LOGICL:
1121
    case CC_OP_LOGICQ:
1122
        
1123
    case CC_OP_INCB:
1124
    case CC_OP_INCW:
1125
    case CC_OP_INCL:
1126
    case CC_OP_INCQ:
1127
        
1128
    case CC_OP_DECB:
1129
    case CC_OP_DECW:
1130
    case CC_OP_DECL:
1131
    case CC_OP_DECQ:
1132
        
1133
    case CC_OP_SHLB:
1134
    case CC_OP_SHLW:
1135
    case CC_OP_SHLL:
1136
    case CC_OP_SHLQ:
1137
        
1138
    case CC_OP_SARB:
1139
    case CC_OP_SARW:
1140
    case CC_OP_SARL:
1141
    case CC_OP_SARQ:
1142
        switch(jcc_op) {
1143
        case JCC_Z:
1144
            size = (cc_op - CC_OP_ADDB) & 3;
1145
            goto fast_jcc_z;
1146
        case JCC_S:
1147
            size = (cc_op - CC_OP_ADDB) & 3;
1148
            goto fast_jcc_s;
1149
        default:
1150
            goto slow_jcc;
1151
        }
1152
        break;
1153
    default:
1154
    slow_jcc:
1155
        gen_setcc_slow_T0(jcc_op);
1156
        tcg_gen_brcond_tl(inv ? TCG_COND_EQ : TCG_COND_NE, 
1157
                          cpu_T[0], tcg_const_tl(0), l1);
1158
        break;
1159
    }
1160
}
1161

    
1162
/* XXX: does not work with gdbstub "ice" single step - not a
1163
   serious problem */
1164
static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1165
{
1166
    int l1, l2;
1167

    
1168
    l1 = gen_new_label();
1169
    l2 = gen_new_label();
1170
    gen_op_jnz_ecx(s->aflag, l1);
1171
    gen_set_label(l2);
1172
    gen_jmp_tb(s, next_eip, 1);
1173
    gen_set_label(l1);
1174
    return l2;
1175
}
1176

    
1177
static inline void gen_stos(DisasContext *s, int ot)
1178
{
1179
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1180
    gen_string_movl_A0_EDI(s);
1181
    gen_op_st_T0_A0(ot + s->mem_index);
1182
    gen_op_movl_T0_Dshift(ot);
1183
    gen_op_add_reg_T0(s->aflag, R_EDI);
1184
}
1185

    
1186
static inline void gen_lods(DisasContext *s, int ot)
1187
{
1188
    gen_string_movl_A0_ESI(s);
1189
    gen_op_ld_T0_A0(ot + s->mem_index);
1190
    gen_op_mov_reg_T0(ot, R_EAX);
1191
    gen_op_movl_T0_Dshift(ot);
1192
    gen_op_add_reg_T0(s->aflag, R_ESI);
1193
}
1194

    
1195
static inline void gen_scas(DisasContext *s, int ot)
1196
{
1197
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1198
    gen_string_movl_A0_EDI(s);
1199
    gen_op_ld_T1_A0(ot + s->mem_index);
1200
    gen_op_cmpl_T0_T1_cc();
1201
    gen_op_movl_T0_Dshift(ot);
1202
    gen_op_add_reg_T0(s->aflag, R_EDI);
1203
}
1204

    
1205
static inline void gen_cmps(DisasContext *s, int ot)
1206
{
1207
    gen_string_movl_A0_ESI(s);
1208
    gen_op_ld_T0_A0(ot + s->mem_index);
1209
    gen_string_movl_A0_EDI(s);
1210
    gen_op_ld_T1_A0(ot + s->mem_index);
1211
    gen_op_cmpl_T0_T1_cc();
1212
    gen_op_movl_T0_Dshift(ot);
1213
    gen_op_add_reg_T0(s->aflag, R_ESI);
1214
    gen_op_add_reg_T0(s->aflag, R_EDI);
1215
}
1216

    
1217
static inline void gen_ins(DisasContext *s, int ot)
1218
{
1219
    gen_string_movl_A0_EDI(s);
1220
    /* Note: we must do this dummy write first to be restartable in
1221
       case of page fault. */
1222
    gen_op_movl_T0_0();
1223
    gen_op_st_T0_A0(ot + s->mem_index);
1224
    gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1225
    tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1226
    tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1227
    tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1228
    gen_op_st_T0_A0(ot + s->mem_index);
1229
    gen_op_movl_T0_Dshift(ot);
1230
    gen_op_add_reg_T0(s->aflag, R_EDI);
1231
}
1232

    
1233
static inline void gen_outs(DisasContext *s, int ot)
1234
{
1235
    gen_string_movl_A0_ESI(s);
1236
    gen_op_ld_T0_A0(ot + s->mem_index);
1237

    
1238
    gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1239
    tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1240
    tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1241
    tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1242
    tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1243

    
1244
    gen_op_movl_T0_Dshift(ot);
1245
    gen_op_add_reg_T0(s->aflag, R_ESI);
1246
}
1247

    
1248
/* same method as Valgrind : we generate jumps to current or next
1249
   instruction */
1250
#define GEN_REPZ(op)                                                          \
1251
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1252
                                 target_ulong cur_eip, target_ulong next_eip) \
1253
{                                                                             \
1254
    int l2;\
1255
    gen_update_cc_op(s);                                                      \
1256
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1257
    gen_ ## op(s, ot);                                                        \
1258
    gen_op_add_reg_im(s->aflag, R_ECX, -1);                                   \
1259
    /* a loop would cause two single step exceptions if ECX = 1               \
1260
       before rep string_insn */                                              \
1261
    if (!s->jmp_opt)                                                          \
1262
        gen_op_jz_ecx(s->aflag, l2);                                          \
1263
    gen_jmp(s, cur_eip);                                                      \
1264
}
1265

    
1266
#define GEN_REPZ2(op)                                                         \
1267
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1268
                                   target_ulong cur_eip,                      \
1269
                                   target_ulong next_eip,                     \
1270
                                   int nz)                                    \
1271
{                                                                             \
1272
    int l2;\
1273
    gen_update_cc_op(s);                                                      \
1274
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1275
    gen_ ## op(s, ot);                                                        \
1276
    gen_op_add_reg_im(s->aflag, R_ECX, -1);                                   \
1277
    gen_op_set_cc_op(CC_OP_SUBB + ot);                                        \
1278
    gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2);                \
1279
    if (!s->jmp_opt)                                                          \
1280
        gen_op_jz_ecx(s->aflag, l2);                                          \
1281
    gen_jmp(s, cur_eip);                                                      \
1282
}
1283

    
1284
GEN_REPZ(movs)
1285
GEN_REPZ(stos)
1286
GEN_REPZ(lods)
1287
GEN_REPZ(ins)
1288
GEN_REPZ(outs)
1289
GEN_REPZ2(scas)
1290
GEN_REPZ2(cmps)
1291

    
1292
static void *helper_fp_arith_ST0_FT0[8] = {
1293
    helper_fadd_ST0_FT0,
1294
    helper_fmul_ST0_FT0,
1295
    helper_fcom_ST0_FT0,
1296
    helper_fcom_ST0_FT0,
1297
    helper_fsub_ST0_FT0,
1298
    helper_fsubr_ST0_FT0,
1299
    helper_fdiv_ST0_FT0,
1300
    helper_fdivr_ST0_FT0,
1301
};
1302

    
1303
/* NOTE the exception in "r" op ordering */
1304
static void *helper_fp_arith_STN_ST0[8] = {
1305
    helper_fadd_STN_ST0,
1306
    helper_fmul_STN_ST0,
1307
    NULL,
1308
    NULL,
1309
    helper_fsubr_STN_ST0,
1310
    helper_fsub_STN_ST0,
1311
    helper_fdivr_STN_ST0,
1312
    helper_fdiv_STN_ST0,
1313
};
1314

    
1315
/* if d == OR_TMP0, it means memory operand (address in A0) */
1316
static void gen_op(DisasContext *s1, int op, int ot, int d)
1317
{
1318
    if (d != OR_TMP0) {
1319
        gen_op_mov_TN_reg(ot, 0, d);
1320
    } else {
1321
        gen_op_ld_T0_A0(ot + s1->mem_index);
1322
    }
1323
    switch(op) {
1324
    case OP_ADCL:
1325
        if (s1->cc_op != CC_OP_DYNAMIC)
1326
            gen_op_set_cc_op(s1->cc_op);
1327
        gen_compute_eflags_c(cpu_tmp4);
1328
        tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1329
        tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1330
        if (d != OR_TMP0)
1331
            gen_op_mov_reg_T0(ot, d);
1332
        else
1333
            gen_op_st_T0_A0(ot + s1->mem_index);
1334
        tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1335
        tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1336
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1337
        tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1338
        tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1339
        s1->cc_op = CC_OP_DYNAMIC;
1340
        break;
1341
    case OP_SBBL:
1342
        if (s1->cc_op != CC_OP_DYNAMIC)
1343
            gen_op_set_cc_op(s1->cc_op);
1344
        gen_compute_eflags_c(cpu_tmp4);
1345
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1346
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1347
        if (d != OR_TMP0)
1348
            gen_op_mov_reg_T0(ot, d);
1349
        else
1350
            gen_op_st_T0_A0(ot + s1->mem_index);
1351
        tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1352
        tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1353
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1354
        tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1355
        tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1356
        s1->cc_op = CC_OP_DYNAMIC;
1357
        break;
1358
    case OP_ADDL:
1359
        gen_op_addl_T0_T1();
1360
        if (d != OR_TMP0)
1361
            gen_op_mov_reg_T0(ot, d);
1362
        else
1363
            gen_op_st_T0_A0(ot + s1->mem_index);
1364
        gen_op_update2_cc();
1365
        s1->cc_op = CC_OP_ADDB + ot;
1366
        break;
1367
    case OP_SUBL:
1368
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1369
        if (d != OR_TMP0)
1370
            gen_op_mov_reg_T0(ot, d);
1371
        else
1372
            gen_op_st_T0_A0(ot + s1->mem_index);
1373
        gen_op_update2_cc();
1374
        s1->cc_op = CC_OP_SUBB + ot;
1375
        break;
1376
    default:
1377
    case OP_ANDL:
1378
        tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1379
        if (d != OR_TMP0)
1380
            gen_op_mov_reg_T0(ot, d);
1381
        else
1382
            gen_op_st_T0_A0(ot + s1->mem_index);
1383
        gen_op_update1_cc();
1384
        s1->cc_op = CC_OP_LOGICB + ot;
1385
        break;
1386
    case OP_ORL:
1387
        tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1388
        if (d != OR_TMP0)
1389
            gen_op_mov_reg_T0(ot, d);
1390
        else
1391
            gen_op_st_T0_A0(ot + s1->mem_index);
1392
        gen_op_update1_cc();
1393
        s1->cc_op = CC_OP_LOGICB + ot;
1394
        break;
1395
    case OP_XORL:
1396
        tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1397
        if (d != OR_TMP0)
1398
            gen_op_mov_reg_T0(ot, d);
1399
        else
1400
            gen_op_st_T0_A0(ot + s1->mem_index);
1401
        gen_op_update1_cc();
1402
        s1->cc_op = CC_OP_LOGICB + ot;
1403
        break;
1404
    case OP_CMPL:
1405
        gen_op_cmpl_T0_T1_cc();
1406
        s1->cc_op = CC_OP_SUBB + ot;
1407
        break;
1408
    }
1409
}
1410

    
1411
/* if d == OR_TMP0, it means memory operand (address in A0) */
1412
static void gen_inc(DisasContext *s1, int ot, int d, int c)
1413
{
1414
    if (d != OR_TMP0)
1415
        gen_op_mov_TN_reg(ot, 0, d);
1416
    else
1417
        gen_op_ld_T0_A0(ot + s1->mem_index);
1418
    if (s1->cc_op != CC_OP_DYNAMIC)
1419
        gen_op_set_cc_op(s1->cc_op);
1420
    if (c > 0) {
1421
        tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1422
        s1->cc_op = CC_OP_INCB + ot;
1423
    } else {
1424
        tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1425
        s1->cc_op = CC_OP_DECB + ot;
1426
    }
1427
    if (d != OR_TMP0)
1428
        gen_op_mov_reg_T0(ot, d);
1429
    else
1430
        gen_op_st_T0_A0(ot + s1->mem_index);
1431
    gen_compute_eflags_c(cpu_cc_src);
1432
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1433
}
1434

    
1435
/* XXX: add faster immediate case */
1436
static void gen_shift_rm_T1(DisasContext *s, int ot, int op1, 
1437
                            int is_right, int is_arith)
1438
{
1439
    target_ulong mask;
1440
    int shift_label;
1441
    
1442
    if (ot == OT_QUAD)
1443
        mask = 0x3f;
1444
    else
1445
        mask = 0x1f;
1446

    
1447
    /* load */
1448
    if (op1 == OR_TMP0)
1449
        gen_op_ld_T0_A0(ot + s->mem_index);
1450
    else
1451
        gen_op_mov_TN_reg(ot, 0, op1);
1452

    
1453
    tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1454

    
1455
    tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1456

    
1457
    if (is_right) {
1458
        if (is_arith) {
1459
            gen_exts(ot, cpu_T[0]);
1460
            tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1461
            tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1462
        } else {
1463
            gen_extu(ot, cpu_T[0]);
1464
            tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1465
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1466
        }
1467
    } else {
1468
        tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1469
        tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1470
    }
1471

    
1472
    /* store */
1473
    if (op1 == OR_TMP0)
1474
        gen_op_st_T0_A0(ot + s->mem_index);
1475
    else
1476
        gen_op_mov_reg_T0(ot, op1);
1477
        
1478
    /* update eflags if non zero shift */
1479
    if (s->cc_op != CC_OP_DYNAMIC)
1480
        gen_op_set_cc_op(s->cc_op);
1481

    
1482
    shift_label = gen_new_label();
1483
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), shift_label);
1484

    
1485
    tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1486
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1487
    if (is_right)
1488
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1489
    else
1490
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1491
        
1492
    gen_set_label(shift_label);
1493
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1494
}
1495

    
1496
static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1497
{
1498
    if (arg2 >= 0)
1499
        tcg_gen_shli_tl(ret, arg1, arg2);
1500
    else
1501
        tcg_gen_shri_tl(ret, arg1, -arg2);
1502
}
1503

    
1504
/* XXX: add faster immediate case */
1505
static void gen_rot_rm_T1(DisasContext *s, int ot, int op1, 
1506
                          int is_right)
1507
{
1508
    target_ulong mask;
1509
    int label1, label2, data_bits;
1510
    
1511
    if (ot == OT_QUAD)
1512
        mask = 0x3f;
1513
    else
1514
        mask = 0x1f;
1515

    
1516
    /* load */
1517
    if (op1 == OR_TMP0)
1518
        gen_op_ld_T0_A0(ot + s->mem_index);
1519
    else
1520
        gen_op_mov_TN_reg(ot, 0, op1);
1521

    
1522
    tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1523

    
1524
    /* Must test zero case to avoid using undefined behaviour in TCG
1525
       shifts. */
1526
    label1 = gen_new_label();
1527
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label1);
1528
    
1529
    if (ot <= OT_WORD)
1530
        tcg_gen_andi_tl(cpu_tmp0, cpu_T[1], (1 << (3 + ot)) - 1);
1531
    else
1532
        tcg_gen_mov_tl(cpu_tmp0, cpu_T[1]);
1533
    
1534
    gen_extu(ot, cpu_T[0]);
1535
    tcg_gen_mov_tl(cpu_T3, cpu_T[0]);
1536

    
1537
    data_bits = 8 << ot;
1538
    /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1539
       fix TCG definition) */
1540
    if (is_right) {
1541
        tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1542
        tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1543
        tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1544
    } else {
1545
        tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1546
        tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1547
        tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1548
    }
1549
    tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1550

    
1551
    gen_set_label(label1);
1552
    /* store */
1553
    if (op1 == OR_TMP0)
1554
        gen_op_st_T0_A0(ot + s->mem_index);
1555
    else
1556
        gen_op_mov_reg_T0(ot, op1);
1557
    
1558
    /* update eflags */
1559
    if (s->cc_op != CC_OP_DYNAMIC)
1560
        gen_op_set_cc_op(s->cc_op);
1561

    
1562
    label2 = gen_new_label();
1563
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label2);
1564

    
1565
    gen_compute_eflags(cpu_cc_src);
1566
    tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1567
    tcg_gen_xor_tl(cpu_tmp0, cpu_T3, cpu_T[0]);
1568
    tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1569
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1570
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1571
    if (is_right) {
1572
        tcg_gen_shri_tl(cpu_T[0], cpu_T[0], data_bits - 1);
1573
    }
1574
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_C);
1575
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
1576
    
1577
    tcg_gen_discard_tl(cpu_cc_dst);
1578
    tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1579
        
1580
    gen_set_label(label2);
1581
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1582
}
1583

    
1584
static void *helper_rotc[8] = {
1585
    helper_rclb,
1586
    helper_rclw,
1587
    helper_rcll,
1588
    X86_64_ONLY(helper_rclq),
1589
    helper_rcrb,
1590
    helper_rcrw,
1591
    helper_rcrl,
1592
    X86_64_ONLY(helper_rcrq),
1593
};
1594

    
1595
/* XXX: add faster immediate = 1 case */
1596
static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1, 
1597
                           int is_right)
1598
{
1599
    int label1;
1600

    
1601
    if (s->cc_op != CC_OP_DYNAMIC)
1602
        gen_op_set_cc_op(s->cc_op);
1603

    
1604
    /* load */
1605
    if (op1 == OR_TMP0)
1606
        gen_op_ld_T0_A0(ot + s->mem_index);
1607
    else
1608
        gen_op_mov_TN_reg(ot, 0, op1);
1609
    
1610
    tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
1611
                       cpu_T[0], cpu_T[0], cpu_T[1]);
1612
    /* store */
1613
    if (op1 == OR_TMP0)
1614
        gen_op_st_T0_A0(ot + s->mem_index);
1615
    else
1616
        gen_op_mov_reg_T0(ot, op1);
1617

    
1618
    /* update eflags */
1619
    label1 = gen_new_label();
1620
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(-1), label1);
1621

    
1622
    tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1623
    tcg_gen_discard_tl(cpu_cc_dst);
1624
    tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1625
        
1626
    gen_set_label(label1);
1627
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1628
}
1629

    
1630
/* XXX: add faster immediate case */
1631
static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1, 
1632
                                int is_right)
1633
{
1634
    int label1, label2, data_bits;
1635
    target_ulong mask;
1636

    
1637
    if (ot == OT_QUAD)
1638
        mask = 0x3f;
1639
    else
1640
        mask = 0x1f;
1641

    
1642
    /* load */
1643
    if (op1 == OR_TMP0)
1644
        gen_op_ld_T0_A0(ot + s->mem_index);
1645
    else
1646
        gen_op_mov_TN_reg(ot, 0, op1);
1647

    
1648
    tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
1649
    /* Must test zero case to avoid using undefined behaviour in TCG
1650
       shifts. */
1651
    label1 = gen_new_label();
1652
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
1653
    
1654
    tcg_gen_addi_tl(cpu_tmp5, cpu_T3, -1);
1655
    if (ot == OT_WORD) {
1656
        /* Note: we implement the Intel behaviour for shift count > 16 */
1657
        if (is_right) {
1658
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1659
            tcg_gen_shli_tl(cpu_tmp0, cpu_T[1], 16);
1660
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1661
            tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1662

    
1663
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1664
            
1665
            /* only needed if count > 16, but a test would complicate */
1666
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1667
            tcg_gen_shl_tl(cpu_tmp0, cpu_T[0], cpu_tmp5);
1668

    
1669
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1670

    
1671
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1672
        } else {
1673
            /* XXX: not optimal */
1674
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1675
            tcg_gen_shli_tl(cpu_T[1], cpu_T[1], 16);
1676
            tcg_gen_or_tl(cpu_T[1], cpu_T[1], cpu_T[0]);
1677
            tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1678
            
1679
            tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1680
            tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
1681
            tcg_gen_shr_tl(cpu_tmp6, cpu_T[1], cpu_tmp0);
1682
            tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
1683

    
1684
            tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1685
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1686
            tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1687
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1688
        }
1689
    } else {
1690
        data_bits = 8 << ot;
1691
        if (is_right) {
1692
            if (ot == OT_LONG)
1693
                tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1694

    
1695
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1696

    
1697
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1698
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1699
            tcg_gen_shl_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1700
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1701
            
1702
        } else {
1703
            if (ot == OT_LONG)
1704
                tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1705

    
1706
            tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1707
            
1708
            tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1709
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1710
            tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1711
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1712
        }
1713
    }
1714
    tcg_gen_mov_tl(cpu_T[1], cpu_tmp4);
1715

    
1716
    gen_set_label(label1);
1717
    /* store */
1718
    if (op1 == OR_TMP0)
1719
        gen_op_st_T0_A0(ot + s->mem_index);
1720
    else
1721
        gen_op_mov_reg_T0(ot, op1);
1722
    
1723
    /* update eflags */
1724
    if (s->cc_op != CC_OP_DYNAMIC)
1725
        gen_op_set_cc_op(s->cc_op);
1726

    
1727
    label2 = gen_new_label();
1728
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label2);
1729

    
1730
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1731
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1732
    if (is_right) {
1733
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1734
    } else {
1735
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1736
    }
1737
    gen_set_label(label2);
1738
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1739
}
1740

    
1741
static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1742
{
1743
    if (s != OR_TMP1)
1744
        gen_op_mov_TN_reg(ot, 1, s);
1745
    switch(op) {
1746
    case OP_ROL:
1747
        gen_rot_rm_T1(s1, ot, d, 0);
1748
        break;
1749
    case OP_ROR:
1750
        gen_rot_rm_T1(s1, ot, d, 1);
1751
        break;
1752
    case OP_SHL:
1753
    case OP_SHL1:
1754
        gen_shift_rm_T1(s1, ot, d, 0, 0);
1755
        break;
1756
    case OP_SHR:
1757
        gen_shift_rm_T1(s1, ot, d, 1, 0);
1758
        break;
1759
    case OP_SAR:
1760
        gen_shift_rm_T1(s1, ot, d, 1, 1);
1761
        break;
1762
    case OP_RCL:
1763
        gen_rotc_rm_T1(s1, ot, d, 0);
1764
        break;
1765
    case OP_RCR:
1766
        gen_rotc_rm_T1(s1, ot, d, 1);
1767
        break;
1768
    }
1769
}
1770

    
1771
static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1772
{
1773
    /* currently not optimized */
1774
    gen_op_movl_T1_im(c);
1775
    gen_shift(s1, op, ot, d, OR_TMP1);
1776
}
1777

    
1778
static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1779
{
1780
    target_long disp;
1781
    int havesib;
1782
    int base;
1783
    int index;
1784
    int scale;
1785
    int opreg;
1786
    int mod, rm, code, override, must_add_seg;
1787

    
1788
    override = s->override;
1789
    must_add_seg = s->addseg;
1790
    if (override >= 0)
1791
        must_add_seg = 1;
1792
    mod = (modrm >> 6) & 3;
1793
    rm = modrm & 7;
1794

    
1795
    if (s->aflag) {
1796

    
1797
        havesib = 0;
1798
        base = rm;
1799
        index = 0;
1800
        scale = 0;
1801

    
1802
        if (base == 4) {
1803
            havesib = 1;
1804
            code = ldub_code(s->pc++);
1805
            scale = (code >> 6) & 3;
1806
            index = ((code >> 3) & 7) | REX_X(s);
1807
            base = (code & 7);
1808
        }
1809
        base |= REX_B(s);
1810

    
1811
        switch (mod) {
1812
        case 0:
1813
            if ((base & 7) == 5) {
1814
                base = -1;
1815
                disp = (int32_t)ldl_code(s->pc);
1816
                s->pc += 4;
1817
                if (CODE64(s) && !havesib) {
1818
                    disp += s->pc + s->rip_offset;
1819
                }
1820
            } else {
1821
                disp = 0;
1822
            }
1823
            break;
1824
        case 1:
1825
            disp = (int8_t)ldub_code(s->pc++);
1826
            break;
1827
        default:
1828
        case 2:
1829
            disp = ldl_code(s->pc);
1830
            s->pc += 4;
1831
            break;
1832
        }
1833

    
1834
        if (base >= 0) {
1835
            /* for correct popl handling with esp */
1836
            if (base == 4 && s->popl_esp_hack)
1837
                disp += s->popl_esp_hack;
1838
#ifdef TARGET_X86_64
1839
            if (s->aflag == 2) {
1840
                gen_op_movq_A0_reg(base);
1841
                if (disp != 0) {
1842
                    gen_op_addq_A0_im(disp);
1843
                }
1844
            } else
1845
#endif
1846
            {
1847
                gen_op_movl_A0_reg(base);
1848
                if (disp != 0)
1849
                    gen_op_addl_A0_im(disp);
1850
            }
1851
        } else {
1852
#ifdef TARGET_X86_64
1853
            if (s->aflag == 2) {
1854
                gen_op_movq_A0_im(disp);
1855
            } else
1856
#endif
1857
            {
1858
                gen_op_movl_A0_im(disp);
1859
            }
1860
        }
1861
        /* XXX: index == 4 is always invalid */
1862
        if (havesib && (index != 4 || scale != 0)) {
1863
#ifdef TARGET_X86_64
1864
            if (s->aflag == 2) {
1865
                gen_op_addq_A0_reg_sN(scale, index);
1866
            } else
1867
#endif
1868
            {
1869
                gen_op_addl_A0_reg_sN(scale, index);
1870
            }
1871
        }
1872
        if (must_add_seg) {
1873
            if (override < 0) {
1874
                if (base == R_EBP || base == R_ESP)
1875
                    override = R_SS;
1876
                else
1877
                    override = R_DS;
1878
            }
1879
#ifdef TARGET_X86_64
1880
            if (s->aflag == 2) {
1881
                gen_op_addq_A0_seg(override);
1882
            } else
1883
#endif
1884
            {
1885
                gen_op_addl_A0_seg(override);
1886
            }
1887
        }
1888
    } else {
1889
        switch (mod) {
1890
        case 0:
1891
            if (rm == 6) {
1892
                disp = lduw_code(s->pc);
1893
                s->pc += 2;
1894
                gen_op_movl_A0_im(disp);
1895
                rm = 0; /* avoid SS override */
1896
                goto no_rm;
1897
            } else {
1898
                disp = 0;
1899
            }
1900
            break;
1901
        case 1:
1902
            disp = (int8_t)ldub_code(s->pc++);
1903
            break;
1904
        default:
1905
        case 2:
1906
            disp = lduw_code(s->pc);
1907
            s->pc += 2;
1908
            break;
1909
        }
1910
        switch(rm) {
1911
        case 0:
1912
            gen_op_movl_A0_reg(R_EBX);
1913
            gen_op_addl_A0_reg_sN(0, R_ESI);
1914
            break;
1915
        case 1:
1916
            gen_op_movl_A0_reg(R_EBX);
1917
            gen_op_addl_A0_reg_sN(0, R_EDI);
1918
            break;
1919
        case 2:
1920
            gen_op_movl_A0_reg(R_EBP);
1921
            gen_op_addl_A0_reg_sN(0, R_ESI);
1922
            break;
1923
        case 3:
1924
            gen_op_movl_A0_reg(R_EBP);
1925
            gen_op_addl_A0_reg_sN(0, R_EDI);
1926
            break;
1927
        case 4:
1928
            gen_op_movl_A0_reg(R_ESI);
1929
            break;
1930
        case 5:
1931
            gen_op_movl_A0_reg(R_EDI);
1932
            break;
1933
        case 6:
1934
            gen_op_movl_A0_reg(R_EBP);
1935
            break;
1936
        default:
1937
        case 7:
1938
            gen_op_movl_A0_reg(R_EBX);
1939
            break;
1940
        }
1941
        if (disp != 0)
1942
            gen_op_addl_A0_im(disp);
1943
        gen_op_andl_A0_ffff();
1944
    no_rm:
1945
        if (must_add_seg) {
1946
            if (override < 0) {
1947
                if (rm == 2 || rm == 3 || rm == 6)
1948
                    override = R_SS;
1949
                else
1950
                    override = R_DS;
1951
            }
1952
            gen_op_addl_A0_seg(override);
1953
        }
1954
    }
1955

    
1956
    opreg = OR_A0;
1957
    disp = 0;
1958
    *reg_ptr = opreg;
1959
    *offset_ptr = disp;
1960
}
1961

    
1962
static void gen_nop_modrm(DisasContext *s, int modrm)
1963
{
1964
    int mod, rm, base, code;
1965

    
1966
    mod = (modrm >> 6) & 3;
1967
    if (mod == 3)
1968
        return;
1969
    rm = modrm & 7;
1970

    
1971
    if (s->aflag) {
1972

    
1973
        base = rm;
1974

    
1975
        if (base == 4) {
1976
            code = ldub_code(s->pc++);
1977
            base = (code & 7);
1978
        }
1979

    
1980
        switch (mod) {
1981
        case 0:
1982
            if (base == 5) {
1983
                s->pc += 4;
1984
            }
1985
            break;
1986
        case 1:
1987
            s->pc++;
1988
            break;
1989
        default:
1990
        case 2:
1991
            s->pc += 4;
1992
            break;
1993
        }
1994
    } else {
1995
        switch (mod) {
1996
        case 0:
1997
            if (rm == 6) {
1998
                s->pc += 2;
1999
            }
2000
            break;
2001
        case 1:
2002
            s->pc++;
2003
            break;
2004
        default:
2005
        case 2:
2006
            s->pc += 2;
2007
            break;
2008
        }
2009
    }
2010
}
2011

    
2012
/* used for LEA and MOV AX, mem */
2013
static void gen_add_A0_ds_seg(DisasContext *s)
2014
{
2015
    int override, must_add_seg;
2016
    must_add_seg = s->addseg;
2017
    override = R_DS;
2018
    if (s->override >= 0) {
2019
        override = s->override;
2020
        must_add_seg = 1;
2021
    } else {
2022
        override = R_DS;
2023
    }
2024
    if (must_add_seg) {
2025
#ifdef TARGET_X86_64
2026
        if (CODE64(s)) {
2027
            gen_op_addq_A0_seg(override);
2028
        } else
2029
#endif
2030
        {
2031
            gen_op_addl_A0_seg(override);
2032
        }
2033
    }
2034
}
2035

    
2036
/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
2037
   OR_TMP0 */
2038
static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2039
{
2040
    int mod, rm, opreg, disp;
2041

    
2042
    mod = (modrm >> 6) & 3;
2043
    rm = (modrm & 7) | REX_B(s);
2044
    if (mod == 3) {
2045
        if (is_store) {
2046
            if (reg != OR_TMP0)
2047
                gen_op_mov_TN_reg(ot, 0, reg);
2048
            gen_op_mov_reg_T0(ot, rm);
2049
        } else {
2050
            gen_op_mov_TN_reg(ot, 0, rm);
2051
            if (reg != OR_TMP0)
2052
                gen_op_mov_reg_T0(ot, reg);
2053
        }
2054
    } else {
2055
        gen_lea_modrm(s, modrm, &opreg, &disp);
2056
        if (is_store) {
2057
            if (reg != OR_TMP0)
2058
                gen_op_mov_TN_reg(ot, 0, reg);
2059
            gen_op_st_T0_A0(ot + s->mem_index);
2060
        } else {
2061
            gen_op_ld_T0_A0(ot + s->mem_index);
2062
            if (reg != OR_TMP0)
2063
                gen_op_mov_reg_T0(ot, reg);
2064
        }
2065
    }
2066
}
2067

    
2068
static inline uint32_t insn_get(DisasContext *s, int ot)
2069
{
2070
    uint32_t ret;
2071

    
2072
    switch(ot) {
2073
    case OT_BYTE:
2074
        ret = ldub_code(s->pc);
2075
        s->pc++;
2076
        break;
2077
    case OT_WORD:
2078
        ret = lduw_code(s->pc);
2079
        s->pc += 2;
2080
        break;
2081
    default:
2082
    case OT_LONG:
2083
        ret = ldl_code(s->pc);
2084
        s->pc += 4;
2085
        break;
2086
    }
2087
    return ret;
2088
}
2089

    
2090
static inline int insn_const_size(unsigned int ot)
2091
{
2092
    if (ot <= OT_LONG)
2093
        return 1 << ot;
2094
    else
2095
        return 4;
2096
}
2097

    
2098
static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2099
{
2100
    TranslationBlock *tb;
2101
    target_ulong pc;
2102

    
2103
    pc = s->cs_base + eip;
2104
    tb = s->tb;
2105
    /* NOTE: we handle the case where the TB spans two pages here */
2106
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2107
        (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK))  {
2108
        /* jump to same page: we can use a direct jump */
2109
        tcg_gen_goto_tb(tb_num);
2110
        gen_jmp_im(eip);
2111
        tcg_gen_exit_tb((long)tb + tb_num);
2112
    } else {
2113
        /* jump to another page: currently not optimized */
2114
        gen_jmp_im(eip);
2115
        gen_eob(s);
2116
    }
2117
}
2118

    
2119
static inline void gen_jcc(DisasContext *s, int b,
2120
                           target_ulong val, target_ulong next_eip)
2121
{
2122
    int l1, l2, cc_op;
2123

    
2124
    cc_op = s->cc_op;
2125
    if (s->cc_op != CC_OP_DYNAMIC) {
2126
        gen_op_set_cc_op(s->cc_op);
2127
        s->cc_op = CC_OP_DYNAMIC;
2128
    }
2129
    if (s->jmp_opt) {
2130
        l1 = gen_new_label();
2131
        gen_jcc1(s, cc_op, b, l1);
2132
        
2133
        gen_goto_tb(s, 0, next_eip);
2134

    
2135
        gen_set_label(l1);
2136
        gen_goto_tb(s, 1, val);
2137
        s->is_jmp = 3;
2138
    } else {
2139

    
2140
        l1 = gen_new_label();
2141
        l2 = gen_new_label();
2142
        gen_jcc1(s, cc_op, b, l1);
2143

    
2144
        gen_jmp_im(next_eip);
2145
        tcg_gen_br(l2);
2146

    
2147
        gen_set_label(l1);
2148
        gen_jmp_im(val);
2149
        gen_set_label(l2);
2150
        gen_eob(s);
2151
    }
2152
}
2153

    
2154
static void gen_setcc(DisasContext *s, int b)
2155
{
2156
    int inv, jcc_op, l1;
2157

    
2158
    if (is_fast_jcc_case(s, b)) {
2159
        /* nominal case: we use a jump */
2160
        tcg_gen_movi_tl(cpu_T[0], 0);
2161
        l1 = gen_new_label();
2162
        gen_jcc1(s, s->cc_op, b ^ 1, l1);
2163
        tcg_gen_movi_tl(cpu_T[0], 1);
2164
        gen_set_label(l1);
2165
    } else {
2166
        /* slow case: it is more efficient not to generate a jump,
2167
           although it is questionnable whether this optimization is
2168
           worth to */
2169
        inv = b & 1;
2170
        jcc_op = (b >> 1) & 7;
2171
        if (s->cc_op != CC_OP_DYNAMIC)
2172
            gen_op_set_cc_op(s->cc_op);
2173
        gen_setcc_slow_T0(jcc_op);
2174
        if (inv) {
2175
            tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2176
        }
2177
    }
2178
}
2179

    
2180
static inline void gen_op_movl_T0_seg(int seg_reg)
2181
{
2182
    tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
2183
                     offsetof(CPUX86State,segs[seg_reg].selector));
2184
}
2185

    
2186
static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2187
{
2188
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2189
    tcg_gen_st32_tl(cpu_T[0], cpu_env, 
2190
                    offsetof(CPUX86State,segs[seg_reg].selector));
2191
    tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2192
    tcg_gen_st_tl(cpu_T[0], cpu_env, 
2193
                  offsetof(CPUX86State,segs[seg_reg].base));
2194
}
2195

    
2196
/* move T0 to seg_reg and compute if the CPU state may change. Never
2197
   call this function with seg_reg == R_CS */
2198
static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2199
{
2200
    if (s->pe && !s->vm86) {
2201
        /* XXX: optimize by finding processor state dynamically */
2202
        if (s->cc_op != CC_OP_DYNAMIC)
2203
            gen_op_set_cc_op(s->cc_op);
2204
        gen_jmp_im(cur_eip);
2205
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2206
        tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2207
        /* abort translation because the addseg value may change or
2208
           because ss32 may change. For R_SS, translation must always
2209
           stop as a special handling must be done to disable hardware
2210
           interrupts for the next instruction */
2211
        if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2212
            s->is_jmp = 3;
2213
    } else {
2214
        gen_op_movl_seg_T0_vm(seg_reg);
2215
        if (seg_reg == R_SS)
2216
            s->is_jmp = 3;
2217
    }
2218
}
2219

    
2220
static inline int svm_is_rep(int prefixes)
2221
{
2222
    return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2223
}
2224

    
2225
static inline int
2226
gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2227
                              uint32_t type, uint64_t param)
2228
{
2229
    if(!(s->flags & (INTERCEPT_SVM_MASK)))
2230
        /* no SVM activated */
2231
        return 0;
2232
    switch(type) {
2233
        /* CRx and DRx reads/writes */
2234
        case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2235
            if (s->cc_op != CC_OP_DYNAMIC) {
2236
                gen_op_set_cc_op(s->cc_op);
2237
            }
2238
            gen_jmp_im(pc_start - s->cs_base);
2239
            tcg_gen_helper_0_2(helper_svm_check_intercept_param, 
2240
                               tcg_const_i32(type), tcg_const_i64(param));
2241
            /* this is a special case as we do not know if the interception occurs
2242
               so we assume there was none */
2243
            return 0;
2244
        case SVM_EXIT_MSR:
2245
            if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2246
                if (s->cc_op != CC_OP_DYNAMIC) {
2247
                    gen_op_set_cc_op(s->cc_op);
2248
                }
2249
                gen_jmp_im(pc_start - s->cs_base);
2250
                tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2251
                                   tcg_const_i32(type), tcg_const_i64(param));
2252
                /* this is a special case as we do not know if the interception occurs
2253
                   so we assume there was none */
2254
                return 0;
2255
            }
2256
            break;
2257
        default:
2258
            if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2259
                if (s->cc_op != CC_OP_DYNAMIC) {
2260
                    gen_op_set_cc_op(s->cc_op);
2261
                }
2262
                gen_jmp_im(pc_start - s->cs_base);
2263
                tcg_gen_helper_0_2(helper_vmexit,
2264
                                   tcg_const_i32(type), tcg_const_i64(param));
2265
                /* we can optimize this one so TBs don't get longer
2266
                   than up to vmexit */
2267
                gen_eob(s);
2268
                return 1;
2269
            }
2270
    }
2271
    return 0;
2272
}
2273

    
2274
static inline int
2275
gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2276
{
2277
    return gen_svm_check_intercept_param(s, pc_start, type, 0);
2278
}
2279

    
2280
static inline void gen_stack_update(DisasContext *s, int addend)
2281
{
2282
#ifdef TARGET_X86_64
2283
    if (CODE64(s)) {
2284
        gen_op_add_reg_im(2, R_ESP, addend);
2285
    } else
2286
#endif
2287
    if (s->ss32) {
2288
        gen_op_add_reg_im(1, R_ESP, addend);
2289
    } else {
2290
        gen_op_add_reg_im(0, R_ESP, addend);
2291
    }
2292
}
2293

    
2294
/* generate a push. It depends on ss32, addseg and dflag */
2295
static void gen_push_T0(DisasContext *s)
2296
{
2297
#ifdef TARGET_X86_64
2298
    if (CODE64(s)) {
2299
        gen_op_movq_A0_reg(R_ESP);
2300
        if (s->dflag) {
2301
            gen_op_addq_A0_im(-8);
2302
            gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2303
        } else {
2304
            gen_op_addq_A0_im(-2);
2305
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2306
        }
2307
        gen_op_mov_reg_A0(2, R_ESP);
2308
    } else
2309
#endif
2310
    {
2311
        gen_op_movl_A0_reg(R_ESP);
2312
        if (!s->dflag)
2313
            gen_op_addl_A0_im(-2);
2314
        else
2315
            gen_op_addl_A0_im(-4);
2316
        if (s->ss32) {
2317
            if (s->addseg) {
2318
                tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2319
                gen_op_addl_A0_seg(R_SS);
2320
            }
2321
        } else {
2322
            gen_op_andl_A0_ffff();
2323
            tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2324
            gen_op_addl_A0_seg(R_SS);
2325
        }
2326
        gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2327
        if (s->ss32 && !s->addseg)
2328
            gen_op_mov_reg_A0(1, R_ESP);
2329
        else
2330
            gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2331
    }
2332
}
2333

    
2334
/* generate a push. It depends on ss32, addseg and dflag */
2335
/* slower version for T1, only used for call Ev */
2336
static void gen_push_T1(DisasContext *s)
2337
{
2338
#ifdef TARGET_X86_64
2339
    if (CODE64(s)) {
2340
        gen_op_movq_A0_reg(R_ESP);
2341
        if (s->dflag) {
2342
            gen_op_addq_A0_im(-8);
2343
            gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2344
        } else {
2345
            gen_op_addq_A0_im(-2);
2346
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2347
        }
2348
        gen_op_mov_reg_A0(2, R_ESP);
2349
    } else
2350
#endif
2351
    {
2352
        gen_op_movl_A0_reg(R_ESP);
2353
        if (!s->dflag)
2354
            gen_op_addl_A0_im(-2);
2355
        else
2356
            gen_op_addl_A0_im(-4);
2357
        if (s->ss32) {
2358
            if (s->addseg) {
2359
                gen_op_addl_A0_seg(R_SS);
2360
            }
2361
        } else {
2362
            gen_op_andl_A0_ffff();
2363
            gen_op_addl_A0_seg(R_SS);
2364
        }
2365
        gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2366

    
2367
        if (s->ss32 && !s->addseg)
2368
            gen_op_mov_reg_A0(1, R_ESP);
2369
        else
2370
            gen_stack_update(s, (-2) << s->dflag);
2371
    }
2372
}
2373

    
2374
/* two step pop is necessary for precise exceptions */
2375
static void gen_pop_T0(DisasContext *s)
2376
{
2377
#ifdef TARGET_X86_64
2378
    if (CODE64(s)) {
2379
        gen_op_movq_A0_reg(R_ESP);
2380
        gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2381
    } else
2382
#endif
2383
    {
2384
        gen_op_movl_A0_reg(R_ESP);
2385
        if (s->ss32) {
2386
            if (s->addseg)
2387
                gen_op_addl_A0_seg(R_SS);
2388
        } else {
2389
            gen_op_andl_A0_ffff();
2390
            gen_op_addl_A0_seg(R_SS);
2391
        }
2392
        gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2393
    }
2394
}
2395

    
2396
static void gen_pop_update(DisasContext *s)
2397
{
2398
#ifdef TARGET_X86_64
2399
    if (CODE64(s) && s->dflag) {
2400
        gen_stack_update(s, 8);
2401
    } else
2402
#endif
2403
    {
2404
        gen_stack_update(s, 2 << s->dflag);
2405
    }
2406
}
2407

    
2408
static void gen_stack_A0(DisasContext *s)
2409
{
2410
    gen_op_movl_A0_reg(R_ESP);
2411
    if (!s->ss32)
2412
        gen_op_andl_A0_ffff();
2413
    tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2414
    if (s->addseg)
2415
        gen_op_addl_A0_seg(R_SS);
2416
}
2417

    
2418
/* NOTE: wrap around in 16 bit not fully handled */
2419
static void gen_pusha(DisasContext *s)
2420
{
2421
    int i;
2422
    gen_op_movl_A0_reg(R_ESP);
2423
    gen_op_addl_A0_im(-16 <<  s->dflag);
2424
    if (!s->ss32)
2425
        gen_op_andl_A0_ffff();
2426
    tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2427
    if (s->addseg)
2428
        gen_op_addl_A0_seg(R_SS);
2429
    for(i = 0;i < 8; i++) {
2430
        gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2431
        gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2432
        gen_op_addl_A0_im(2 <<  s->dflag);
2433
    }
2434
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2435
}
2436

    
2437
/* NOTE: wrap around in 16 bit not fully handled */
2438
static void gen_popa(DisasContext *s)
2439
{
2440
    int i;
2441
    gen_op_movl_A0_reg(R_ESP);
2442
    if (!s->ss32)
2443
        gen_op_andl_A0_ffff();
2444
    tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2445
    tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 <<  s->dflag);
2446
    if (s->addseg)
2447
        gen_op_addl_A0_seg(R_SS);
2448
    for(i = 0;i < 8; i++) {
2449
        /* ESP is not reloaded */
2450
        if (i != 3) {
2451
            gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2452
            gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2453
        }
2454
        gen_op_addl_A0_im(2 <<  s->dflag);
2455
    }
2456
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2457
}
2458

    
2459
static void gen_enter(DisasContext *s, int esp_addend, int level)
2460
{
2461
    int ot, opsize;
2462

    
2463
    level &= 0x1f;
2464
#ifdef TARGET_X86_64
2465
    if (CODE64(s)) {
2466
        ot = s->dflag ? OT_QUAD : OT_WORD;
2467
        opsize = 1 << ot;
2468

    
2469
        gen_op_movl_A0_reg(R_ESP);
2470
        gen_op_addq_A0_im(-opsize);
2471
        tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2472

    
2473
        /* push bp */
2474
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2475
        gen_op_st_T0_A0(ot + s->mem_index);
2476
        if (level) {
2477
            /* XXX: must save state */
2478
            tcg_gen_helper_0_3(helper_enter64_level,
2479
                               tcg_const_i32(level),
2480
                               tcg_const_i32((ot == OT_QUAD)),
2481
                               cpu_T[1]);
2482
        }
2483
        gen_op_mov_reg_T1(ot, R_EBP);
2484
        tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2485
        gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2486
    } else
2487
#endif
2488
    {
2489
        ot = s->dflag + OT_WORD;
2490
        opsize = 2 << s->dflag;
2491

    
2492
        gen_op_movl_A0_reg(R_ESP);
2493
        gen_op_addl_A0_im(-opsize);
2494
        if (!s->ss32)
2495
            gen_op_andl_A0_ffff();
2496
        tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2497
        if (s->addseg)
2498
            gen_op_addl_A0_seg(R_SS);
2499
        /* push bp */
2500
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2501
        gen_op_st_T0_A0(ot + s->mem_index);
2502
        if (level) {
2503
            /* XXX: must save state */
2504
            tcg_gen_helper_0_3(helper_enter_level,
2505
                               tcg_const_i32(level),
2506
                               tcg_const_i32(s->dflag),
2507
                               cpu_T[1]);
2508
        }
2509
        gen_op_mov_reg_T1(ot, R_EBP);
2510
        tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2511
        gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2512
    }
2513
}
2514

    
2515
static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2516
{
2517
    if (s->cc_op != CC_OP_DYNAMIC)
2518
        gen_op_set_cc_op(s->cc_op);
2519
    gen_jmp_im(cur_eip);
2520
    tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
2521
    s->is_jmp = 3;
2522
}
2523

    
2524
/* an interrupt is different from an exception because of the
2525
   privilege checks */
2526
static void gen_interrupt(DisasContext *s, int intno,
2527
                          target_ulong cur_eip, target_ulong next_eip)
2528
{
2529
    if (s->cc_op != CC_OP_DYNAMIC)
2530
        gen_op_set_cc_op(s->cc_op);
2531
    gen_jmp_im(cur_eip);
2532
    tcg_gen_helper_0_2(helper_raise_interrupt, 
2533
                       tcg_const_i32(intno), 
2534
                       tcg_const_i32(next_eip - cur_eip));
2535
    s->is_jmp = 3;
2536
}
2537

    
2538
static void gen_debug(DisasContext *s, target_ulong cur_eip)
2539
{
2540
    if (s->cc_op != CC_OP_DYNAMIC)
2541
        gen_op_set_cc_op(s->cc_op);
2542
    gen_jmp_im(cur_eip);
2543
    tcg_gen_helper_0_0(helper_debug);
2544
    s->is_jmp = 3;
2545
}
2546

    
2547
/* generate a generic end of block. Trace exception is also generated
2548
   if needed */
2549
static void gen_eob(DisasContext *s)
2550
{
2551
    if (s->cc_op != CC_OP_DYNAMIC)
2552
        gen_op_set_cc_op(s->cc_op);
2553
    if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2554
        tcg_gen_helper_0_0(helper_reset_inhibit_irq);
2555
    }
2556
    if (s->singlestep_enabled) {
2557
        tcg_gen_helper_0_0(helper_debug);
2558
    } else if (s->tf) {
2559
        tcg_gen_helper_0_0(helper_single_step);
2560
    } else {
2561
        tcg_gen_exit_tb(0);
2562
    }
2563
    s->is_jmp = 3;
2564
}
2565

    
2566
/* generate a jump to eip. No segment change must happen before as a
2567
   direct call to the next block may occur */
2568
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2569
{
2570
    if (s->jmp_opt) {
2571
        if (s->cc_op != CC_OP_DYNAMIC) {
2572
            gen_op_set_cc_op(s->cc_op);
2573
            s->cc_op = CC_OP_DYNAMIC;
2574
        }
2575
        gen_goto_tb(s, tb_num, eip);
2576
        s->is_jmp = 3;
2577
    } else {
2578
        gen_jmp_im(eip);
2579
        gen_eob(s);
2580
    }
2581
}
2582

    
2583
static void gen_jmp(DisasContext *s, target_ulong eip)
2584
{
2585
    gen_jmp_tb(s, eip, 0);
2586
}
2587

    
2588
static inline void gen_ldq_env_A0(int idx, int offset)
2589
{
2590
    int mem_index = (idx >> 2) - 1;
2591
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2592
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
2593
}
2594

    
2595
static inline void gen_stq_env_A0(int idx, int offset)
2596
{
2597
    int mem_index = (idx >> 2) - 1;
2598
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2599
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2600
}
2601

    
2602
static inline void gen_ldo_env_A0(int idx, int offset)
2603
{
2604
    int mem_index = (idx >> 2) - 1;
2605
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2606
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2607
    tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2608
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2609
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2610
}
2611

    
2612
static inline void gen_sto_env_A0(int idx, int offset)
2613
{
2614
    int mem_index = (idx >> 2) - 1;
2615
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2616
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2617
    tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2618
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2619
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2620
}
2621

    
2622
static inline void gen_op_movo(int d_offset, int s_offset)
2623
{
2624
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2625
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2626
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2627
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
2628
}
2629

    
2630
static inline void gen_op_movq(int d_offset, int s_offset)
2631
{
2632
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2633
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2634
}
2635

    
2636
static inline void gen_op_movl(int d_offset, int s_offset)
2637
{
2638
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2639
    tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
2640
}
2641

    
2642
static inline void gen_op_movq_env_0(int d_offset)
2643
{
2644
    tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2645
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2646
}
2647

    
2648
#define SSE_SPECIAL ((void *)1)
2649
#define SSE_DUMMY ((void *)2)
2650

    
2651
#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2652
#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2653
                     helper_ ## x ## ss, helper_ ## x ## sd, }
2654

    
2655
static void *sse_op_table1[256][4] = {
2656
    /* 3DNow! extensions */
2657
    [0x0e] = { SSE_DUMMY }, /* femms */
2658
    [0x0f] = { SSE_DUMMY }, /* pf... */
2659
    /* pure SSE operations */
2660
    [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2661
    [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2662
    [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2663
    [0x13] = { SSE_SPECIAL, SSE_SPECIAL },  /* movlps, movlpd */
2664
    [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2665
    [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2666
    [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd, movshdup */
2667
    [0x17] = { SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd */
2668

    
2669
    [0x28] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2670
    [0x29] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2671
    [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2672
    [0x2b] = { SSE_SPECIAL, SSE_SPECIAL },  /* movntps, movntpd */
2673
    [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2674
    [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2675
    [0x2e] = { helper_ucomiss, helper_ucomisd },
2676
    [0x2f] = { helper_comiss, helper_comisd },
2677
    [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2678
    [0x51] = SSE_FOP(sqrt),
2679
    [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
2680
    [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
2681
    [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
2682
    [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
2683
    [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
2684
    [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
2685
    [0x58] = SSE_FOP(add),
2686
    [0x59] = SSE_FOP(mul),
2687
    [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
2688
               helper_cvtss2sd, helper_cvtsd2ss },
2689
    [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
2690
    [0x5c] = SSE_FOP(sub),
2691
    [0x5d] = SSE_FOP(min),
2692
    [0x5e] = SSE_FOP(div),
2693
    [0x5f] = SSE_FOP(max),
2694

    
2695
    [0xc2] = SSE_FOP(cmpeq),
2696
    [0xc6] = { helper_shufps, helper_shufpd },
2697

    
2698
    /* MMX ops and their SSE extensions */
2699
    [0x60] = MMX_OP2(punpcklbw),
2700
    [0x61] = MMX_OP2(punpcklwd),
2701
    [0x62] = MMX_OP2(punpckldq),
2702
    [0x63] = MMX_OP2(packsswb),
2703
    [0x64] = MMX_OP2(pcmpgtb),
2704
    [0x65] = MMX_OP2(pcmpgtw),
2705
    [0x66] = MMX_OP2(pcmpgtl),
2706
    [0x67] = MMX_OP2(packuswb),
2707
    [0x68] = MMX_OP2(punpckhbw),
2708
    [0x69] = MMX_OP2(punpckhwd),
2709
    [0x6a] = MMX_OP2(punpckhdq),
2710
    [0x6b] = MMX_OP2(packssdw),
2711
    [0x6c] = { NULL, helper_punpcklqdq_xmm },
2712
    [0x6d] = { NULL, helper_punpckhqdq_xmm },
2713
    [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2714
    [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2715
    [0x70] = { helper_pshufw_mmx,
2716
               helper_pshufd_xmm,
2717
               helper_pshufhw_xmm,
2718
               helper_pshuflw_xmm },
2719
    [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2720
    [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2721
    [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2722
    [0x74] = MMX_OP2(pcmpeqb),
2723
    [0x75] = MMX_OP2(pcmpeqw),
2724
    [0x76] = MMX_OP2(pcmpeql),
2725
    [0x77] = { SSE_DUMMY }, /* emms */
2726
    [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
2727
    [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
2728
    [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2729
    [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2730
    [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2731
    [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2732
    [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
2733
    [0xd1] = MMX_OP2(psrlw),
2734
    [0xd2] = MMX_OP2(psrld),
2735
    [0xd3] = MMX_OP2(psrlq),
2736
    [0xd4] = MMX_OP2(paddq),
2737
    [0xd5] = MMX_OP2(pmullw),
2738
    [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2739
    [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2740
    [0xd8] = MMX_OP2(psubusb),
2741
    [0xd9] = MMX_OP2(psubusw),
2742
    [0xda] = MMX_OP2(pminub),
2743
    [0xdb] = MMX_OP2(pand),
2744
    [0xdc] = MMX_OP2(paddusb),
2745
    [0xdd] = MMX_OP2(paddusw),
2746
    [0xde] = MMX_OP2(pmaxub),
2747
    [0xdf] = MMX_OP2(pandn),
2748
    [0xe0] = MMX_OP2(pavgb),
2749
    [0xe1] = MMX_OP2(psraw),
2750
    [0xe2] = MMX_OP2(psrad),
2751
    [0xe3] = MMX_OP2(pavgw),
2752
    [0xe4] = MMX_OP2(pmulhuw),
2753
    [0xe5] = MMX_OP2(pmulhw),
2754
    [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
2755
    [0xe7] = { SSE_SPECIAL , SSE_SPECIAL },  /* movntq, movntq */
2756
    [0xe8] = MMX_OP2(psubsb),
2757
    [0xe9] = MMX_OP2(psubsw),
2758
    [0xea] = MMX_OP2(pminsw),
2759
    [0xeb] = MMX_OP2(por),
2760
    [0xec] = MMX_OP2(paddsb),
2761
    [0xed] = MMX_OP2(paddsw),
2762
    [0xee] = MMX_OP2(pmaxsw),
2763
    [0xef] = MMX_OP2(pxor),
2764
    [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2765
    [0xf1] = MMX_OP2(psllw),
2766
    [0xf2] = MMX_OP2(pslld),
2767
    [0xf3] = MMX_OP2(psllq),
2768
    [0xf4] = MMX_OP2(pmuludq),
2769
    [0xf5] = MMX_OP2(pmaddwd),
2770
    [0xf6] = MMX_OP2(psadbw),
2771
    [0xf7] = MMX_OP2(maskmov),
2772
    [0xf8] = MMX_OP2(psubb),
2773
    [0xf9] = MMX_OP2(psubw),
2774
    [0xfa] = MMX_OP2(psubl),
2775
    [0xfb] = MMX_OP2(psubq),
2776
    [0xfc] = MMX_OP2(paddb),
2777
    [0xfd] = MMX_OP2(paddw),
2778
    [0xfe] = MMX_OP2(paddl),
2779
};
2780

    
2781
static void *sse_op_table2[3 * 8][2] = {
2782
    [0 + 2] = MMX_OP2(psrlw),
2783
    [0 + 4] = MMX_OP2(psraw),
2784
    [0 + 6] = MMX_OP2(psllw),
2785
    [8 + 2] = MMX_OP2(psrld),
2786
    [8 + 4] = MMX_OP2(psrad),
2787
    [8 + 6] = MMX_OP2(pslld),
2788
    [16 + 2] = MMX_OP2(psrlq),
2789
    [16 + 3] = { NULL, helper_psrldq_xmm },
2790
    [16 + 6] = MMX_OP2(psllq),
2791
    [16 + 7] = { NULL, helper_pslldq_xmm },
2792
};
2793

    
2794
static void *sse_op_table3[4 * 3] = {
2795
    helper_cvtsi2ss,
2796
    helper_cvtsi2sd,
2797
    X86_64_ONLY(helper_cvtsq2ss),
2798
    X86_64_ONLY(helper_cvtsq2sd),
2799

    
2800
    helper_cvttss2si,
2801
    helper_cvttsd2si,
2802
    X86_64_ONLY(helper_cvttss2sq),
2803
    X86_64_ONLY(helper_cvttsd2sq),
2804

    
2805
    helper_cvtss2si,
2806
    helper_cvtsd2si,
2807
    X86_64_ONLY(helper_cvtss2sq),
2808
    X86_64_ONLY(helper_cvtsd2sq),
2809
};
2810

    
2811
static void *sse_op_table4[8][4] = {
2812
    SSE_FOP(cmpeq),
2813
    SSE_FOP(cmplt),
2814
    SSE_FOP(cmple),
2815
    SSE_FOP(cmpunord),
2816
    SSE_FOP(cmpneq),
2817
    SSE_FOP(cmpnlt),
2818
    SSE_FOP(cmpnle),
2819
    SSE_FOP(cmpord),
2820
};
2821

    
2822
static void *sse_op_table5[256] = {
2823
    [0x0c] = helper_pi2fw,
2824
    [0x0d] = helper_pi2fd,
2825
    [0x1c] = helper_pf2iw,
2826
    [0x1d] = helper_pf2id,
2827
    [0x8a] = helper_pfnacc,
2828
    [0x8e] = helper_pfpnacc,
2829
    [0x90] = helper_pfcmpge,
2830
    [0x94] = helper_pfmin,
2831
    [0x96] = helper_pfrcp,
2832
    [0x97] = helper_pfrsqrt,
2833
    [0x9a] = helper_pfsub,
2834
    [0x9e] = helper_pfadd,
2835
    [0xa0] = helper_pfcmpgt,
2836
    [0xa4] = helper_pfmax,
2837
    [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2838
    [0xa7] = helper_movq, /* pfrsqit1 */
2839
    [0xaa] = helper_pfsubr,
2840
    [0xae] = helper_pfacc,
2841
    [0xb0] = helper_pfcmpeq,
2842
    [0xb4] = helper_pfmul,
2843
    [0xb6] = helper_movq, /* pfrcpit2 */
2844
    [0xb7] = helper_pmulhrw_mmx,
2845
    [0xbb] = helper_pswapd,
2846
    [0xbf] = helper_pavgb_mmx /* pavgusb */
2847
};
2848

    
2849
static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2850
{
2851
    int b1, op1_offset, op2_offset, is_xmm, val, ot;
2852
    int modrm, mod, rm, reg, reg_addr, offset_addr;
2853
    void *sse_op2;
2854

    
2855
    b &= 0xff;
2856
    if (s->prefix & PREFIX_DATA)
2857
        b1 = 1;
2858
    else if (s->prefix & PREFIX_REPZ)
2859
        b1 = 2;
2860
    else if (s->prefix & PREFIX_REPNZ)
2861
        b1 = 3;
2862
    else
2863
        b1 = 0;
2864
    sse_op2 = sse_op_table1[b][b1];
2865
    if (!sse_op2)
2866
        goto illegal_op;
2867
    if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2868
        is_xmm = 1;
2869
    } else {
2870
        if (b1 == 0) {
2871
            /* MMX case */
2872
            is_xmm = 0;
2873
        } else {
2874
            is_xmm = 1;
2875
        }
2876
    }
2877
    /* simple MMX/SSE operation */
2878
    if (s->flags & HF_TS_MASK) {
2879
        gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2880
        return;
2881
    }
2882
    if (s->flags & HF_EM_MASK) {
2883
    illegal_op:
2884
        gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2885
        return;
2886
    }
2887
    if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2888
        goto illegal_op;
2889
    if (b == 0x0e) {
2890
        if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
2891
            goto illegal_op;
2892
        /* femms */
2893
        tcg_gen_helper_0_0(helper_emms);
2894
        return;
2895
    }
2896
    if (b == 0x77) {
2897
        /* emms */
2898
        tcg_gen_helper_0_0(helper_emms);
2899
        return;
2900
    }
2901
    /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2902
       the static cpu state) */
2903
    if (!is_xmm) {
2904
        tcg_gen_helper_0_0(helper_enter_mmx);
2905
    }
2906

    
2907
    modrm = ldub_code(s->pc++);
2908
    reg = ((modrm >> 3) & 7);
2909
    if (is_xmm)
2910
        reg |= rex_r;
2911
    mod = (modrm >> 6) & 3;
2912
    if (sse_op2 == SSE_SPECIAL) {
2913
        b |= (b1 << 8);
2914
        switch(b) {
2915
        case 0x0e7: /* movntq */
2916
            if (mod == 3)
2917
                goto illegal_op;
2918
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2919
            gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2920
            break;
2921
        case 0x1e7: /* movntdq */
2922
        case 0x02b: /* movntps */
2923
        case 0x12b: /* movntps */
2924
        case 0x3f0: /* lddqu */
2925
            if (mod == 3)
2926
                goto illegal_op;
2927
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2928
            gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2929
            break;
2930
        case 0x6e: /* movd mm, ea */
2931
#ifdef TARGET_X86_64
2932
            if (s->dflag == 2) {
2933
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2934
                tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
2935
            } else
2936
#endif
2937
            {
2938
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2939
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
2940
                                 offsetof(CPUX86State,fpregs[reg].mmx));
2941
                tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
2942
            }
2943
            break;
2944
        case 0x16e: /* movd xmm, ea */
2945
#ifdef TARGET_X86_64
2946
            if (s->dflag == 2) {
2947
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2948
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
2949
                                 offsetof(CPUX86State,xmm_regs[reg]));
2950
                tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
2951
            } else
2952
#endif
2953
            {
2954
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2955
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
2956
                                 offsetof(CPUX86State,xmm_regs[reg]));
2957
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2958
                tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
2959
            }
2960
            break;
2961
        case 0x6f: /* movq mm, ea */
2962
            if (mod != 3) {
2963
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2964
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2965
            } else {
2966
                rm = (modrm & 7);
2967
                tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
2968
                               offsetof(CPUX86State,fpregs[rm].mmx));
2969
                tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
2970
                               offsetof(CPUX86State,fpregs[reg].mmx));
2971
            }
2972
            break;
2973
        case 0x010: /* movups */
2974
        case 0x110: /* movupd */
2975
        case 0x028: /* movaps */
2976
        case 0x128: /* movapd */
2977
        case 0x16f: /* movdqa xmm, ea */
2978
        case 0x26f: /* movdqu xmm, ea */
2979
            if (mod != 3) {
2980
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2981
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2982
            } else {
2983
                rm = (modrm & 7) | REX_B(s);
2984
                gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2985
                            offsetof(CPUX86State,xmm_regs[rm]));
2986
            }
2987
            break;
2988
        case 0x210: /* movss xmm, ea */
2989
            if (mod != 3) {
2990
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2991
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
2992
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2993
                gen_op_movl_T0_0();
2994
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2995
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2996
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2997
            } else {
2998
                rm = (modrm & 7) | REX_B(s);
2999
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3000
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3001
            }
3002
            break;
3003
        case 0x310: /* movsd xmm, ea */
3004
            if (mod != 3) {
3005
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3006
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3007
                gen_op_movl_T0_0();
3008
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3009
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3010
            } else {
3011
                rm = (modrm & 7) | REX_B(s);
3012
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3013
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3014
            }
3015
            break;
3016
        case 0x012: /* movlps */
3017
        case 0x112: /* movlpd */
3018
            if (mod != 3) {
3019
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3020
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3021
            } else {
3022
                /* movhlps */
3023
                rm = (modrm & 7) | REX_B(s);
3024
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3025
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3026
            }
3027
            break;
3028
        case 0x212: /* movsldup */
3029
            if (mod != 3) {
3030
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3031
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3032
            } else {
3033
                rm = (modrm & 7) | REX_B(s);
3034
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3035
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3036
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3037
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3038
            }
3039
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3040
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3041
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3042
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3043
            break;
3044
        case 0x312: /* movddup */
3045
            if (mod != 3) {
3046
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3047
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3048
            } else {
3049
                rm = (modrm & 7) | REX_B(s);
3050
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3051
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3052
            }
3053
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3054
                        offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3055
            break;
3056
        case 0x016: /* movhps */
3057
        case 0x116: /* movhpd */
3058
            if (mod != 3) {
3059
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3060
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3061
            } else {
3062
                /* movlhps */
3063
                rm = (modrm & 7) | REX_B(s);
3064
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3065
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3066
            }
3067
            break;
3068
        case 0x216: /* movshdup */
3069
            if (mod != 3) {
3070
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3071
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3072
            } else {
3073
                rm = (modrm & 7) | REX_B(s);
3074
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3075
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3076
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3077
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3078
            }
3079
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3080
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3081
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3082
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3083
            break;
3084
        case 0x7e: /* movd ea, mm */
3085
#ifdef TARGET_X86_64
3086
            if (s->dflag == 2) {
3087
                tcg_gen_ld_i64(cpu_T[0], cpu_env, 
3088
                               offsetof(CPUX86State,fpregs[reg].mmx));
3089
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3090
            } else
3091
#endif
3092
            {
3093
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
3094
                                 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3095
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3096
            }
3097
            break;
3098
        case 0x17e: /* movd ea, xmm */
3099
#ifdef TARGET_X86_64
3100
            if (s->dflag == 2) {
3101
                tcg_gen_ld_i64(cpu_T[0], cpu_env, 
3102
                               offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3103
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3104
            } else
3105
#endif
3106
            {
3107
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
3108
                                 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3109
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3110
            }
3111
            break;
3112
        case 0x27e: /* movq xmm, ea */
3113
            if (mod != 3) {
3114
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3115
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3116
            } else {
3117
                rm = (modrm & 7) | REX_B(s);
3118
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3119
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3120
            }
3121
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3122
            break;
3123
        case 0x7f: /* movq ea, mm */
3124
            if (mod != 3) {
3125
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3126
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3127
            } else {
3128
                rm = (modrm & 7);
3129
                gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3130
                            offsetof(CPUX86State,fpregs[reg].mmx));
3131
            }
3132
            break;
3133
        case 0x011: /* movups */
3134
        case 0x111: /* movupd */
3135
        case 0x029: /* movaps */
3136
        case 0x129: /* movapd */
3137
        case 0x17f: /* movdqa ea, xmm */
3138
        case 0x27f: /* movdqu ea, xmm */
3139
            if (mod != 3) {
3140
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3141
                gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3142
            } else {
3143
                rm = (modrm & 7) | REX_B(s);
3144
                gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3145
                            offsetof(CPUX86State,xmm_regs[reg]));
3146
            }
3147
            break;
3148
        case 0x211: /* movss ea, xmm */
3149
            if (mod != 3) {
3150
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3151
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3152
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
3153
            } else {
3154
                rm = (modrm & 7) | REX_B(s);
3155
                gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3156
                            offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3157
            }
3158
            break;
3159
        case 0x311: /* movsd ea, xmm */
3160
            if (mod != 3) {
3161
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3162
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3163
            } else {
3164
                rm = (modrm & 7) | REX_B(s);
3165
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3166
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3167
            }
3168
            break;
3169
        case 0x013: /* movlps */
3170
        case 0x113: /* movlpd */
3171
            if (mod != 3) {
3172
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3173
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3174
            } else {
3175
                goto illegal_op;
3176
            }
3177
            break;
3178
        case 0x017: /* movhps */
3179
        case 0x117: /* movhpd */
3180
            if (mod != 3) {
3181
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3182
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3183
            } else {
3184
                goto illegal_op;
3185
            }
3186
            break;
3187
        case 0x71: /* shift mm, im */
3188
        case 0x72:
3189
        case 0x73:
3190
        case 0x171: /* shift xmm, im */
3191
        case 0x172:
3192
        case 0x173:
3193
            val = ldub_code(s->pc++);
3194
            if (is_xmm) {
3195
                gen_op_movl_T0_im(val);
3196
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3197
                gen_op_movl_T0_0();
3198
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3199
                op1_offset = offsetof(CPUX86State,xmm_t0);
3200
            } else {
3201
                gen_op_movl_T0_im(val);
3202
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3203
                gen_op_movl_T0_0();
3204
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3205
                op1_offset = offsetof(CPUX86State,mmx_t0);
3206
            }
3207
            sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3208
            if (!sse_op2)
3209
                goto illegal_op;
3210
            if (is_xmm) {
3211
                rm = (modrm & 7) | REX_B(s);
3212
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3213
            } else {
3214
                rm = (modrm & 7);
3215
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3216
            }
3217
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3218
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3219
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3220
            break;
3221
        case 0x050: /* movmskps */
3222
            rm = (modrm & 7) | REX_B(s);
3223
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3224
                             offsetof(CPUX86State,xmm_regs[rm]));
3225
            tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3226
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3227
            gen_op_mov_reg_T0(OT_LONG, reg);
3228
            break;
3229
        case 0x150: /* movmskpd */
3230
            rm = (modrm & 7) | REX_B(s);
3231
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3232
                             offsetof(CPUX86State,xmm_regs[rm]));
3233
            tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3234
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3235
            gen_op_mov_reg_T0(OT_LONG, reg);
3236
            break;
3237
        case 0x02a: /* cvtpi2ps */
3238
        case 0x12a: /* cvtpi2pd */
3239
            tcg_gen_helper_0_0(helper_enter_mmx);
3240
            if (mod != 3) {
3241
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3242
                op2_offset = offsetof(CPUX86State,mmx_t0);
3243
                gen_ldq_env_A0(s->mem_index, op2_offset);
3244
            } else {
3245
                rm = (modrm & 7);
3246
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3247
            }
3248
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3249
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3250
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3251
            switch(b >> 8) {
3252
            case 0x0:
3253
                tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3254
                break;
3255
            default:
3256
            case 0x1:
3257
                tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3258
                break;
3259
            }
3260
            break;
3261
        case 0x22a: /* cvtsi2ss */
3262
        case 0x32a: /* cvtsi2sd */
3263
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3264
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3265
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3266
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3267
            sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3268
            tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3269
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3270
            break;
3271
        case 0x02c: /* cvttps2pi */
3272
        case 0x12c: /* cvttpd2pi */
3273
        case 0x02d: /* cvtps2pi */
3274
        case 0x12d: /* cvtpd2pi */
3275
            tcg_gen_helper_0_0(helper_enter_mmx);
3276
            if (mod != 3) {
3277
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3278
                op2_offset = offsetof(CPUX86State,xmm_t0);
3279
                gen_ldo_env_A0(s->mem_index, op2_offset);
3280
            } else {
3281
                rm = (modrm & 7) | REX_B(s);
3282
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3283
            }
3284
            op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3285
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3286
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3287
            switch(b) {
3288
            case 0x02c:
3289
                tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3290
                break;
3291
            case 0x12c:
3292
                tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3293
                break;
3294
            case 0x02d:
3295
                tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3296
                break;
3297
            case 0x12d:
3298
                tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3299
                break;
3300
            }
3301
            break;
3302
        case 0x22c: /* cvttss2si */
3303
        case 0x32c: /* cvttsd2si */
3304
        case 0x22d: /* cvtss2si */
3305
        case 0x32d: /* cvtsd2si */
3306
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3307
            if (mod != 3) {
3308
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3309
                if ((b >> 8) & 1) {
3310
                    gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3311
                } else {
3312
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3313
                    tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3314
                }
3315
                op2_offset = offsetof(CPUX86State,xmm_t0);
3316
            } else {
3317
                rm = (modrm & 7) | REX_B(s);
3318
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3319
            }
3320
            sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3321
                                    (b & 1) * 4];
3322
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3323
            if (ot == OT_LONG) {
3324
                tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
3325
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3326
            } else {
3327
                tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3328
            }
3329
            gen_op_mov_reg_T0(ot, reg);
3330
            break;
3331
        case 0xc4: /* pinsrw */
3332
        case 0x1c4:
3333
            s->rip_offset = 1;
3334
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3335
            val = ldub_code(s->pc++);
3336
            if (b1) {
3337
                val &= 7;
3338
                tcg_gen_st16_tl(cpu_T[0], cpu_env,
3339
                                offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3340
            } else {
3341
                val &= 3;
3342
                tcg_gen_st16_tl(cpu_T[0], cpu_env,
3343
                                offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3344
            }
3345
            break;
3346
        case 0xc5: /* pextrw */
3347
        case 0x1c5:
3348
            if (mod != 3)
3349
                goto illegal_op;
3350
            val = ldub_code(s->pc++);
3351
            if (b1) {
3352
                val &= 7;
3353
                rm = (modrm & 7) | REX_B(s);
3354
                tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3355
                                 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3356
            } else {
3357
                val &= 3;
3358
                rm = (modrm & 7);
3359
                tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3360
                                offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3361
            }
3362
            reg = ((modrm >> 3) & 7) | rex_r;
3363
            gen_op_mov_reg_T0(OT_LONG, reg);
3364
            break;
3365
        case 0x1d6: /* movq ea, xmm */
3366
            if (mod != 3) {
3367
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3368
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3369
            } else {
3370
                rm = (modrm & 7) | REX_B(s);
3371
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3372
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3373
                gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3374
            }
3375
            break;
3376
        case 0x2d6: /* movq2dq */
3377
            tcg_gen_helper_0_0(helper_enter_mmx);
3378
            rm = (modrm & 7);
3379
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3380
                        offsetof(CPUX86State,fpregs[rm].mmx));
3381
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3382
            break;
3383
        case 0x3d6: /* movdq2q */
3384
            tcg_gen_helper_0_0(helper_enter_mmx);
3385
            rm = (modrm & 7) | REX_B(s);
3386
            gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3387
                        offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3388
            break;
3389
        case 0xd7: /* pmovmskb */
3390
        case 0x1d7:
3391
            if (mod != 3)
3392
                goto illegal_op;
3393
            if (b1) {
3394
                rm = (modrm & 7) | REX_B(s);
3395
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3396
                tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
3397
            } else {
3398
                rm = (modrm & 7);
3399
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3400
                tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
3401
            }
3402
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3403
            reg = ((modrm >> 3) & 7) | rex_r;
3404
            gen_op_mov_reg_T0(OT_LONG, reg);
3405
            break;
3406
        default:
3407
            goto illegal_op;
3408
        }
3409
    } else {
3410
        /* generic MMX or SSE operation */
3411
        switch(b) {
3412
        case 0x70: /* pshufx insn */
3413
        case 0xc6: /* pshufx insn */
3414
        case 0xc2: /* compare insns */
3415
            s->rip_offset = 1;
3416
            break;
3417
        default:
3418
            break;
3419
        }
3420
        if (is_xmm) {
3421
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3422
            if (mod != 3) {
3423
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3424
                op2_offset = offsetof(CPUX86State,xmm_t0);
3425
                if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3426
                                b == 0xc2)) {
3427
                    /* specific case for SSE single instructions */
3428
                    if (b1 == 2) {
3429
                        /* 32 bit access */
3430
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3431
                        tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3432
                    } else {
3433
                        /* 64 bit access */
3434
                        gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3435
                    }
3436
                } else {
3437
                    gen_ldo_env_A0(s->mem_index, op2_offset);
3438
                }
3439
            } else {
3440
                rm = (modrm & 7) | REX_B(s);
3441
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3442
            }
3443
        } else {
3444
            op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3445
            if (mod != 3) {
3446
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3447
                op2_offset = offsetof(CPUX86State,mmx_t0);
3448
                gen_ldq_env_A0(s->mem_index, op2_offset);
3449
            } else {
3450
                rm = (modrm & 7);
3451
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3452
            }
3453
        }
3454
        switch(b) {
3455
        case 0x0f: /* 3DNow! data insns */
3456
            if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3457
                goto illegal_op;
3458
            val = ldub_code(s->pc++);
3459
            sse_op2 = sse_op_table5[val];
3460
            if (!sse_op2)
3461
                goto illegal_op;
3462
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3463
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3464
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3465
            break;
3466
        case 0x70: /* pshufx insn */
3467
        case 0xc6: /* pshufx insn */
3468
            val = ldub_code(s->pc++);
3469
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3470
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3471
            tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3472
            break;
3473
        case 0xc2:
3474
            /* compare insns */
3475
            val = ldub_code(s->pc++);
3476
            if (val >= 8)
3477
                goto illegal_op;
3478
            sse_op2 = sse_op_table4[val][b1];
3479
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3480
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3481
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3482
            break;
3483
        case 0xf7:
3484
            /* maskmov : we must prepare A0 */
3485
            if (mod != 3)
3486
                goto illegal_op;
3487
#ifdef TARGET_X86_64
3488
            if (s->aflag == 2) {
3489
                gen_op_movq_A0_reg(R_EDI);
3490
            } else
3491
#endif
3492
            {
3493
                gen_op_movl_A0_reg(R_EDI);
3494
                if (s->aflag == 0)
3495
                    gen_op_andl_A0_ffff();
3496
            }
3497
            gen_add_A0_ds_seg(s);
3498

    
3499
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3500
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3501
            tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
3502
            break;
3503
        default:
3504
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3505
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3506
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3507
            break;
3508
        }
3509
        if (b == 0x2e || b == 0x2f) {
3510
            s->cc_op = CC_OP_EFLAGS;
3511
        }
3512
    }
3513
}
3514

    
3515
/* convert one instruction. s->is_jmp is set if the translation must
3516
   be stopped. Return the next pc value */
3517
static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3518
{
3519
    int b, prefixes, aflag, dflag;
3520
    int shift, ot;
3521
    int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3522
    target_ulong next_eip, tval;
3523
    int rex_w, rex_r;
3524

    
3525
    s->pc = pc_start;
3526
    prefixes = 0;
3527
    aflag = s->code32;
3528
    dflag = s->code32;
3529
    s->override = -1;
3530
    rex_w = -1;
3531
    rex_r = 0;
3532
#ifdef TARGET_X86_64
3533
    s->rex_x = 0;
3534
    s->rex_b = 0;
3535
    x86_64_hregs = 0;
3536
#endif
3537
    s->rip_offset = 0; /* for relative ip address */
3538
 next_byte:
3539
    b = ldub_code(s->pc);
3540
    s->pc++;
3541
    /* check prefixes */
3542
#ifdef TARGET_X86_64
3543
    if (CODE64(s)) {
3544
        switch (b) {
3545
        case 0xf3:
3546
            prefixes |= PREFIX_REPZ;
3547
            goto next_byte;
3548
        case 0xf2:
3549
            prefixes |= PREFIX_REPNZ;
3550
            goto next_byte;
3551
        case 0xf0:
3552
            prefixes |= PREFIX_LOCK;
3553
            goto next_byte;
3554
        case 0x2e:
3555
            s->override = R_CS;
3556
            goto next_byte;
3557
        case 0x36:
3558
            s->override = R_SS;
3559
            goto next_byte;
3560
        case 0x3e:
3561
            s->override = R_DS;
3562
            goto next_byte;
3563
        case 0x26:
3564
            s->override = R_ES;
3565
            goto next_byte;
3566
        case 0x64:
3567
            s->override = R_FS;
3568
            goto next_byte;
3569
        case 0x65:
3570
            s->override = R_GS;
3571
            goto next_byte;
3572
        case 0x66:
3573
            prefixes |= PREFIX_DATA;
3574
            goto next_byte;
3575
        case 0x67:
3576
            prefixes |= PREFIX_ADR;
3577
            goto next_byte;
3578
        case 0x40 ... 0x4f:
3579
            /* REX prefix */
3580
            rex_w = (b >> 3) & 1;
3581
            rex_r = (b & 0x4) << 1;
3582
            s->rex_x = (b & 0x2) << 2;
3583
            REX_B(s) = (b & 0x1) << 3;
3584
            x86_64_hregs = 1; /* select uniform byte register addressing */
3585
            goto next_byte;
3586
        }
3587
        if (rex_w == 1) {
3588
            /* 0x66 is ignored if rex.w is set */
3589
            dflag = 2;
3590
        } else {
3591
            if (prefixes & PREFIX_DATA)
3592
                dflag ^= 1;
3593
        }
3594
        if (!(prefixes & PREFIX_ADR))
3595
            aflag = 2;
3596
    } else
3597
#endif
3598
    {
3599
        switch (b) {
3600
        case 0xf3:
3601
            prefixes |= PREFIX_REPZ;
3602
            goto next_byte;
3603
        case 0xf2:
3604
            prefixes |= PREFIX_REPNZ;
3605
            goto next_byte;
3606
        case 0xf0:
3607
            prefixes |= PREFIX_LOCK;
3608
            goto next_byte;
3609
        case 0x2e:
3610
            s->override = R_CS;
3611
            goto next_byte;
3612
        case 0x36:
3613
            s->override = R_SS;
3614
            goto next_byte;
3615
        case 0x3e:
3616
            s->override = R_DS;
3617
            goto next_byte;
3618
        case 0x26:
3619
            s->override = R_ES;
3620
            goto next_byte;
3621
        case 0x64:
3622
            s->override = R_FS;
3623
            goto next_byte;
3624
        case 0x65:
3625
            s->override = R_GS;
3626
            goto next_byte;
3627
        case 0x66:
3628
            prefixes |= PREFIX_DATA;
3629
            goto next_byte;
3630
        case 0x67:
3631
            prefixes |= PREFIX_ADR;
3632
            goto next_byte;
3633
        }
3634
        if (prefixes & PREFIX_DATA)
3635
            dflag ^= 1;
3636
        if (prefixes & PREFIX_ADR)
3637
            aflag ^= 1;
3638
    }
3639

    
3640
    s->prefix = prefixes;
3641
    s->aflag = aflag;
3642
    s->dflag = dflag;
3643

    
3644
    /* lock generation */
3645
    if (prefixes & PREFIX_LOCK)
3646
        tcg_gen_helper_0_0(helper_lock);
3647

    
3648
    /* now check op code */
3649
 reswitch:
3650
    switch(b) {
3651
    case 0x0f:
3652
        /**************************/
3653
        /* extended op code */
3654
        b = ldub_code(s->pc++) | 0x100;
3655
        goto reswitch;
3656

    
3657
        /**************************/
3658
        /* arith & logic */
3659
    case 0x00 ... 0x05:
3660
    case 0x08 ... 0x0d:
3661
    case 0x10 ... 0x15:
3662
    case 0x18 ... 0x1d:
3663
    case 0x20 ... 0x25:
3664
    case 0x28 ... 0x2d:
3665
    case 0x30 ... 0x35:
3666
    case 0x38 ... 0x3d:
3667
        {
3668
            int op, f, val;
3669
            op = (b >> 3) & 7;
3670
            f = (b >> 1) & 3;
3671

    
3672
            if ((b & 1) == 0)
3673
                ot = OT_BYTE;
3674
            else
3675
                ot = dflag + OT_WORD;
3676

    
3677
            switch(f) {
3678
            case 0: /* OP Ev, Gv */
3679
                modrm = ldub_code(s->pc++);
3680
                reg = ((modrm >> 3) & 7) | rex_r;
3681
                mod = (modrm >> 6) & 3;
3682
                rm = (modrm & 7) | REX_B(s);
3683
                if (mod != 3) {
3684
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3685
                    opreg = OR_TMP0;
3686
                } else if (op == OP_XORL && rm == reg) {
3687
                xor_zero:
3688
                    /* xor reg, reg optimisation */
3689
                    gen_op_movl_T0_0();
3690
                    s->cc_op = CC_OP_LOGICB + ot;
3691
                    gen_op_mov_reg_T0(ot, reg);
3692
                    gen_op_update1_cc();
3693
                    break;
3694
                } else {
3695
                    opreg = rm;
3696
                }
3697
                gen_op_mov_TN_reg(ot, 1, reg);
3698
                gen_op(s, op, ot, opreg);
3699
                break;
3700
            case 1: /* OP Gv, Ev */
3701
                modrm = ldub_code(s->pc++);
3702
                mod = (modrm >> 6) & 3;
3703
                reg = ((modrm >> 3) & 7) | rex_r;
3704
                rm = (modrm & 7) | REX_B(s);
3705
                if (mod != 3) {
3706
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3707
                    gen_op_ld_T1_A0(ot + s->mem_index);
3708
                } else if (op == OP_XORL && rm == reg) {
3709
                    goto xor_zero;
3710
                } else {
3711
                    gen_op_mov_TN_reg(ot, 1, rm);
3712
                }
3713
                gen_op(s, op, ot, reg);
3714
                break;
3715
            case 2: /* OP A, Iv */
3716
                val = insn_get(s, ot);
3717
                gen_op_movl_T1_im(val);
3718
                gen_op(s, op, ot, OR_EAX);
3719
                break;
3720
            }
3721
        }
3722
        break;
3723

    
3724
    case 0x80: /* GRP1 */
3725
    case 0x81:
3726
    case 0x82:
3727
    case 0x83:
3728
        {
3729
            int val;
3730

    
3731
            if ((b & 1) == 0)
3732
                ot = OT_BYTE;
3733
            else
3734
                ot = dflag + OT_WORD;
3735

    
3736
            modrm = ldub_code(s->pc++);
3737
            mod = (modrm >> 6) & 3;
3738
            rm = (modrm & 7) | REX_B(s);
3739
            op = (modrm >> 3) & 7;
3740

    
3741
            if (mod != 3) {
3742
                if (b == 0x83)
3743
                    s->rip_offset = 1;
3744
                else
3745
                    s->rip_offset = insn_const_size(ot);
3746
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3747
                opreg = OR_TMP0;
3748
            } else {
3749
                opreg = rm;
3750
            }
3751

    
3752
            switch(b) {
3753
            default:
3754
            case 0x80:
3755
            case 0x81:
3756
            case 0x82:
3757
                val = insn_get(s, ot);
3758
                break;
3759
            case 0x83:
3760
                val = (int8_t)insn_get(s, OT_BYTE);
3761
                break;
3762
            }
3763
            gen_op_movl_T1_im(val);
3764
            gen_op(s, op, ot, opreg);
3765
        }
3766
        break;
3767

    
3768
        /**************************/
3769
        /* inc, dec, and other misc arith */
3770
    case 0x40 ... 0x47: /* inc Gv */
3771
        ot = dflag ? OT_LONG : OT_WORD;
3772
        gen_inc(s, ot, OR_EAX + (b & 7), 1);
3773
        break;
3774
    case 0x48 ... 0x4f: /* dec Gv */
3775
        ot = dflag ? OT_LONG : OT_WORD;
3776
        gen_inc(s, ot, OR_EAX + (b & 7), -1);
3777
        break;
3778
    case 0xf6: /* GRP3 */
3779
    case 0xf7:
3780
        if ((b & 1) == 0)
3781
            ot = OT_BYTE;
3782
        else
3783
            ot = dflag + OT_WORD;
3784

    
3785
        modrm = ldub_code(s->pc++);
3786
        mod = (modrm >> 6) & 3;
3787
        rm = (modrm & 7) | REX_B(s);
3788
        op = (modrm >> 3) & 7;
3789
        if (mod != 3) {
3790
            if (op == 0)
3791
                s->rip_offset = insn_const_size(ot);
3792
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3793
            gen_op_ld_T0_A0(ot + s->mem_index);
3794
        } else {
3795
            gen_op_mov_TN_reg(ot, 0, rm);
3796
        }
3797

    
3798
        switch(op) {
3799
        case 0: /* test */
3800
            val = insn_get(s, ot);
3801
            gen_op_movl_T1_im(val);
3802
            gen_op_testl_T0_T1_cc();
3803
            s->cc_op = CC_OP_LOGICB + ot;
3804
            break;
3805
        case 2: /* not */
3806
            tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
3807
            if (mod != 3) {
3808
                gen_op_st_T0_A0(ot + s->mem_index);
3809
            } else {
3810
                gen_op_mov_reg_T0(ot, rm);
3811
            }
3812
            break;
3813
        case 3: /* neg */
3814
            tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
3815
            if (mod != 3) {
3816
                gen_op_st_T0_A0(ot + s->mem_index);
3817
            } else {
3818
                gen_op_mov_reg_T0(ot, rm);
3819
            }
3820
            gen_op_update_neg_cc();
3821
            s->cc_op = CC_OP_SUBB + ot;
3822
            break;
3823
        case 4: /* mul */
3824
            switch(ot) {
3825
            case OT_BYTE:
3826
                gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
3827
                tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
3828
                tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
3829
                /* XXX: use 32 bit mul which could be faster */
3830
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3831
                gen_op_mov_reg_T0(OT_WORD, R_EAX);
3832
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3833
                tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
3834
                s->cc_op = CC_OP_MULB;
3835
                break;
3836
            case OT_WORD:
3837
                gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
3838
                tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
3839
                tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
3840
                /* XXX: use 32 bit mul which could be faster */
3841
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3842
                gen_op_mov_reg_T0(OT_WORD, R_EAX);
3843
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3844
                tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
3845
                gen_op_mov_reg_T0(OT_WORD, R_EDX);
3846
                tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3847
                s->cc_op = CC_OP_MULW;
3848
                break;
3849
            default:
3850
            case OT_LONG:
3851
#ifdef TARGET_X86_64
3852
                gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3853
                tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
3854
                tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
3855
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3856
                gen_op_mov_reg_T0(OT_LONG, R_EAX);
3857
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3858
                tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
3859
                gen_op_mov_reg_T0(OT_LONG, R_EDX);
3860
                tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3861
#else
3862
                {
3863
                    TCGv t0, t1;
3864
                    t0 = tcg_temp_new(TCG_TYPE_I64);
3865
                    t1 = tcg_temp_new(TCG_TYPE_I64);
3866
                    gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3867
                    tcg_gen_extu_i32_i64(t0, cpu_T[0]);
3868
                    tcg_gen_extu_i32_i64(t1, cpu_T[1]);
3869
                    tcg_gen_mul_i64(t0, t0, t1);
3870
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3871
                    gen_op_mov_reg_T0(OT_LONG, R_EAX);
3872
                    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3873
                    tcg_gen_shri_i64(t0, t0, 32);
3874
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3875
                    gen_op_mov_reg_T0(OT_LONG, R_EDX);
3876
                    tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3877
                }
3878
#endif
3879
                s->cc_op = CC_OP_MULL;
3880
                break;
3881
#ifdef TARGET_X86_64
3882
            case OT_QUAD:
3883
                tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
3884
                s->cc_op = CC_OP_MULQ;
3885
                break;
3886
#endif
3887
            }
3888
            break;
3889
        case 5: /* imul */
3890
            switch(ot) {
3891
            case OT_BYTE:
3892
                gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
3893
                tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
3894
                tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
3895
                /* XXX: use 32 bit mul which could be faster */
3896
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3897
                gen_op_mov_reg_T0(OT_WORD, R_EAX);
3898
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3899
                tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
3900
                tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3901
                s->cc_op = CC_OP_MULB;
3902
                break;
3903
            case OT_WORD:
3904
                gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
3905
                tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
3906
                tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
3907
                /* XXX: use 32 bit mul which could be faster */
3908
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3909
                gen_op_mov_reg_T0(OT_WORD, R_EAX);
3910
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3911
                tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
3912
                tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3913
                tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
3914
                gen_op_mov_reg_T0(OT_WORD, R_EDX);
3915
                s->cc_op = CC_OP_MULW;
3916
                break;
3917
            default:
3918
            case OT_LONG:
3919
#ifdef TARGET_X86_64
3920
                gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3921
                tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
3922
                tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
3923
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3924
                gen_op_mov_reg_T0(OT_LONG, R_EAX);
3925
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3926
                tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
3927
                tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3928
                tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
3929
                gen_op_mov_reg_T0(OT_LONG, R_EDX);
3930
#else
3931
                {
3932
                    TCGv t0, t1;
3933
                    t0 = tcg_temp_new(TCG_TYPE_I64);
3934
                    t1 = tcg_temp_new(TCG_TYPE_I64);
3935
                    gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3936
                    tcg_gen_ext_i32_i64(t0, cpu_T[0]);
3937
                    tcg_gen_ext_i32_i64(t1, cpu_T[1]);
3938
                    tcg_gen_mul_i64(t0, t0, t1);
3939
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3940
                    gen_op_mov_reg_T0(OT_LONG, R_EAX);
3941
                    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3942
                    tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
3943
                    tcg_gen_shri_i64(t0, t0, 32);
3944
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3945
                    gen_op_mov_reg_T0(OT_LONG, R_EDX);
3946
                    tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3947
                }
3948
#endif
3949
                s->cc_op = CC_OP_MULL;
3950
                break;
3951
#ifdef TARGET_X86_64
3952
            case OT_QUAD:
3953
                tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
3954
                s->cc_op = CC_OP_MULQ;
3955
                break;
3956
#endif
3957
            }
3958
            break;
3959
        case 6: /* div */
3960
            switch(ot) {
3961
            case OT_BYTE:
3962
                gen_jmp_im(pc_start - s->cs_base);
3963
                tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
3964
                break;
3965
            case OT_WORD:
3966
                gen_jmp_im(pc_start - s->cs_base);
3967
                tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
3968
                break;
3969
            default:
3970
            case OT_LONG:
3971
                gen_jmp_im(pc_start - s->cs_base);
3972
                tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
3973
                break;
3974
#ifdef TARGET_X86_64
3975
            case OT_QUAD:
3976
                gen_jmp_im(pc_start - s->cs_base);
3977
                tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
3978
                break;
3979
#endif
3980
            }
3981
            break;
3982
        case 7: /* idiv */
3983
            switch(ot) {
3984
            case OT_BYTE:
3985
                gen_jmp_im(pc_start - s->cs_base);
3986
                tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
3987
                break;
3988
            case OT_WORD:
3989
                gen_jmp_im(pc_start - s->cs_base);
3990
                tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
3991
                break;
3992
            default:
3993
            case OT_LONG:
3994
                gen_jmp_im(pc_start - s->cs_base);
3995
                tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
3996
                break;
3997
#ifdef TARGET_X86_64
3998
            case OT_QUAD:
3999
                gen_jmp_im(pc_start - s->cs_base);
4000
                tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
4001
                break;
4002
#endif
4003
            }
4004
            break;
4005
        default:
4006
            goto illegal_op;
4007
        }
4008
        break;
4009

    
4010
    case 0xfe: /* GRP4 */
4011
    case 0xff: /* GRP5 */
4012
        if ((b & 1) == 0)
4013
            ot = OT_BYTE;
4014
        else
4015
            ot = dflag + OT_WORD;
4016

    
4017
        modrm = ldub_code(s->pc++);
4018
        mod = (modrm >> 6) & 3;
4019
        rm = (modrm & 7) | REX_B(s);
4020
        op = (modrm >> 3) & 7;
4021
        if (op >= 2 && b == 0xfe) {
4022
            goto illegal_op;
4023
        }
4024
        if (CODE64(s)) {
4025
            if (op == 2 || op == 4) {
4026
                /* operand size for jumps is 64 bit */
4027
                ot = OT_QUAD;
4028
            } else if (op == 3 || op == 5) {
4029
                /* for call calls, the operand is 16 or 32 bit, even
4030
                   in long mode */
4031
                ot = dflag ? OT_LONG : OT_WORD;
4032
            } else if (op == 6) {
4033
                /* default push size is 64 bit */
4034
                ot = dflag ? OT_QUAD : OT_WORD;
4035
            }
4036
        }
4037
        if (mod != 3) {
4038
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4039
            if (op >= 2 && op != 3 && op != 5)
4040
                gen_op_ld_T0_A0(ot + s->mem_index);
4041
        } else {
4042
            gen_op_mov_TN_reg(ot, 0, rm);
4043
        }
4044

    
4045
        switch(op) {
4046
        case 0: /* inc Ev */
4047
            if (mod != 3)
4048
                opreg = OR_TMP0;
4049
            else
4050
                opreg = rm;
4051
            gen_inc(s, ot, opreg, 1);
4052
            break;
4053
        case 1: /* dec Ev */
4054
            if (mod != 3)
4055
                opreg = OR_TMP0;
4056
            else
4057
                opreg = rm;
4058
            gen_inc(s, ot, opreg, -1);
4059
            break;
4060
        case 2: /* call Ev */
4061
            /* XXX: optimize if memory (no 'and' is necessary) */
4062
            if (s->dflag == 0)
4063
                gen_op_andl_T0_ffff();
4064
            next_eip = s->pc - s->cs_base;
4065
            gen_movtl_T1_im(next_eip);
4066
            gen_push_T1(s);
4067
            gen_op_jmp_T0();
4068
            gen_eob(s);
4069
            break;
4070
        case 3: /* lcall Ev */
4071
            gen_op_ld_T1_A0(ot + s->mem_index);
4072
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4073
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4074
        do_lcall:
4075
            if (s->pe && !s->vm86) {
4076
                if (s->cc_op != CC_OP_DYNAMIC)
4077
                    gen_op_set_cc_op(s->cc_op);
4078
                gen_jmp_im(pc_start - s->cs_base);
4079
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4080
                tcg_gen_helper_0_4(helper_lcall_protected,
4081
                                   cpu_tmp2_i32, cpu_T[1],
4082
                                   tcg_const_i32(dflag), 
4083
                                   tcg_const_i32(s->pc - pc_start));
4084
            } else {
4085
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4086
                tcg_gen_helper_0_4(helper_lcall_real,
4087
                                   cpu_tmp2_i32, cpu_T[1],
4088
                                   tcg_const_i32(dflag), 
4089
                                   tcg_const_i32(s->pc - s->cs_base));
4090
            }
4091
            gen_eob(s);
4092
            break;
4093
        case 4: /* jmp Ev */
4094
            if (s->dflag == 0)
4095
                gen_op_andl_T0_ffff();
4096
            gen_op_jmp_T0();
4097
            gen_eob(s);
4098
            break;
4099
        case 5: /* ljmp Ev */
4100
            gen_op_ld_T1_A0(ot + s->mem_index);
4101
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4102
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4103
        do_ljmp:
4104
            if (s->pe && !s->vm86) {
4105
                if (s->cc_op != CC_OP_DYNAMIC)
4106
                    gen_op_set_cc_op(s->cc_op);
4107
                gen_jmp_im(pc_start - s->cs_base);
4108
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4109
                tcg_gen_helper_0_3(helper_ljmp_protected,
4110
                                   cpu_tmp2_i32,
4111
                                   cpu_T[1],
4112
                                   tcg_const_i32(s->pc - pc_start));
4113
            } else {
4114
                gen_op_movl_seg_T0_vm(R_CS);
4115
                gen_op_movl_T0_T1();
4116
                gen_op_jmp_T0();
4117
            }
4118
            gen_eob(s);
4119
            break;
4120
        case 6: /* push Ev */
4121
            gen_push_T0(s);
4122
            break;
4123
        default:
4124
            goto illegal_op;
4125
        }
4126
        break;
4127

    
4128
    case 0x84: /* test Ev, Gv */
4129
    case 0x85:
4130
        if ((b & 1) == 0)
4131
            ot = OT_BYTE;
4132
        else
4133
            ot = dflag + OT_WORD;
4134

    
4135
        modrm = ldub_code(s->pc++);
4136
        mod = (modrm >> 6) & 3;
4137
        rm = (modrm & 7) | REX_B(s);
4138
        reg = ((modrm >> 3) & 7) | rex_r;
4139

    
4140
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4141
        gen_op_mov_TN_reg(ot, 1, reg);
4142
        gen_op_testl_T0_T1_cc();
4143
        s->cc_op = CC_OP_LOGICB + ot;
4144
        break;
4145

    
4146
    case 0xa8: /* test eAX, Iv */
4147
    case 0xa9:
4148
        if ((b & 1) == 0)
4149
            ot = OT_BYTE;
4150
        else
4151
            ot = dflag + OT_WORD;
4152
        val = insn_get(s, ot);
4153

    
4154
        gen_op_mov_TN_reg(ot, 0, OR_EAX);
4155
        gen_op_movl_T1_im(val);
4156
        gen_op_testl_T0_T1_cc();
4157
        s->cc_op = CC_OP_LOGICB + ot;
4158
        break;
4159

    
4160
    case 0x98: /* CWDE/CBW */
4161
#ifdef TARGET_X86_64
4162
        if (dflag == 2) {
4163
            gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4164
            tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4165
            gen_op_mov_reg_T0(OT_QUAD, R_EAX);
4166
        } else
4167
#endif
4168
        if (dflag == 1) {
4169
            gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4170
            tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4171
            gen_op_mov_reg_T0(OT_LONG, R_EAX);
4172
        } else {
4173
            gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
4174
            tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4175
            gen_op_mov_reg_T0(OT_WORD, R_EAX);
4176
        }
4177
        break;
4178
    case 0x99: /* CDQ/CWD */
4179
#ifdef TARGET_X86_64
4180
        if (dflag == 2) {
4181
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4182
            tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
4183
            gen_op_mov_reg_T0(OT_QUAD, R_EDX);
4184
        } else
4185
#endif
4186
        if (dflag == 1) {
4187
            gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4188
            tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4189
            tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
4190
            gen_op_mov_reg_T0(OT_LONG, R_EDX);
4191
        } else {
4192
            gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4193
            tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4194
            tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
4195
            gen_op_mov_reg_T0(OT_WORD, R_EDX);
4196
        }
4197
        break;
4198
    case 0x1af: /* imul Gv, Ev */
4199
    case 0x69: /* imul Gv, Ev, I */
4200
    case 0x6b:
4201
        ot = dflag + OT_WORD;
4202
        modrm = ldub_code(s->pc++);
4203
        reg = ((modrm >> 3) & 7) | rex_r;
4204
        if (b == 0x69)
4205
            s->rip_offset = insn_const_size(ot);
4206
        else if (b == 0x6b)
4207
            s->rip_offset = 1;
4208
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4209
        if (b == 0x69) {
4210
            val = insn_get(s, ot);
4211
            gen_op_movl_T1_im(val);
4212
        } else if (b == 0x6b) {
4213
            val = (int8_t)insn_get(s, OT_BYTE);
4214
            gen_op_movl_T1_im(val);
4215
        } else {
4216
            gen_op_mov_TN_reg(ot, 1, reg);
4217
        }
4218

    
4219
#ifdef TARGET_X86_64
4220
        if (ot == OT_QUAD) {
4221
            tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
4222
        } else
4223
#endif
4224
        if (ot == OT_LONG) {
4225
#ifdef TARGET_X86_64
4226
                tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4227
                tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4228
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4229
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4230
                tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4231
                tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4232
#else
4233
                {
4234
                    TCGv t0, t1;
4235
                    t0 = tcg_temp_new(TCG_TYPE_I64);
4236
                    t1 = tcg_temp_new(TCG_TYPE_I64);
4237
                    tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4238
                    tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4239
                    tcg_gen_mul_i64(t0, t0, t1);
4240
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4241
                    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4242
                    tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4243
                    tcg_gen_shri_i64(t0, t0, 32);
4244
                    tcg_gen_trunc_i64_i32(cpu_T[1], t0);
4245
                    tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
4246
                }
4247
#endif
4248
        } else {
4249
            tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4250
            tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4251
            /* XXX: use 32 bit mul which could be faster */
4252
            tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4253
            tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4254
            tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4255
            tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4256
        }
4257
        gen_op_mov_reg_T0(ot, reg);
4258
        s->cc_op = CC_OP_MULB + ot;
4259
        break;
4260
    case 0x1c0:
4261
    case 0x1c1: /* xadd Ev, Gv */
4262
        if ((b & 1) == 0)
4263
            ot = OT_BYTE;
4264
        else
4265
            ot = dflag + OT_WORD;
4266
        modrm = ldub_code(s->pc++);
4267
        reg = ((modrm >> 3) & 7) | rex_r;
4268
        mod = (modrm >> 6) & 3;
4269
        if (mod == 3) {
4270
            rm = (modrm & 7) | REX_B(s);
4271
            gen_op_mov_TN_reg(ot, 0, reg);
4272
            gen_op_mov_TN_reg(ot, 1, rm);
4273
            gen_op_addl_T0_T1();
4274
            gen_op_mov_reg_T1(ot, reg);
4275
            gen_op_mov_reg_T0(ot, rm);
4276
        } else {
4277
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4278
            gen_op_mov_TN_reg(ot, 0, reg);
4279
            gen_op_ld_T1_A0(ot + s->mem_index);
4280
            gen_op_addl_T0_T1();
4281
            gen_op_st_T0_A0(ot + s->mem_index);
4282
            gen_op_mov_reg_T1(ot, reg);
4283
        }
4284
        gen_op_update2_cc();
4285
        s->cc_op = CC_OP_ADDB + ot;
4286
        break;
4287
    case 0x1b0:
4288
    case 0x1b1: /* cmpxchg Ev, Gv */
4289
        {
4290
            int label1, label2;
4291

    
4292
            if ((b & 1) == 0)
4293
                ot = OT_BYTE;
4294
            else
4295
                ot = dflag + OT_WORD;
4296
            modrm = ldub_code(s->pc++);
4297
            reg = ((modrm >> 3) & 7) | rex_r;
4298
            mod = (modrm >> 6) & 3;
4299
            gen_op_mov_TN_reg(ot, 1, reg);
4300
            if (mod == 3) {
4301
                rm = (modrm & 7) | REX_B(s);
4302
                gen_op_mov_TN_reg(ot, 0, rm);
4303
            } else {
4304
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4305
                gen_op_ld_T0_A0(ot + s->mem_index);
4306
                rm = 0; /* avoid warning */
4307
            }
4308
            label1 = gen_new_label();
4309
            tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_EAX]));
4310
            tcg_gen_sub_tl(cpu_T3, cpu_T3, cpu_T[0]);
4311
            gen_extu(ot, cpu_T3);
4312
            tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
4313
            if (mod == 3) {
4314
                label2 = gen_new_label();
4315
                gen_op_mov_reg_T0(ot, R_EAX);
4316
                tcg_gen_br(label2);
4317
                gen_set_label(label1);
4318
                gen_op_mov_reg_T1(ot, rm);
4319
                gen_set_label(label2);
4320
            } else {
4321
                tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
4322
                gen_op_mov_reg_T0(ot, R_EAX);
4323
                gen_set_label(label1);
4324
                /* always store */
4325
                gen_op_st_T1_A0(ot + s->mem_index);
4326
            }
4327
            tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4328
            tcg_gen_mov_tl(cpu_cc_dst, cpu_T3);
4329
            s->cc_op = CC_OP_SUBB + ot;
4330
        }
4331
        break;
4332
    case 0x1c7: /* cmpxchg8b */
4333
        modrm = ldub_code(s->pc++);
4334
        mod = (modrm >> 6) & 3;
4335
        if ((mod == 3) || ((modrm & 0x38) != 0x8))
4336
            goto illegal_op;
4337
#ifdef TARGET_X86_64
4338
        if (dflag == 2) {
4339
            if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
4340
                goto illegal_op;
4341
            gen_jmp_im(pc_start - s->cs_base);
4342
            if (s->cc_op != CC_OP_DYNAMIC)
4343
                gen_op_set_cc_op(s->cc_op);
4344
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4345
            tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
4346
        } else
4347
#endif        
4348
        {
4349
            if (!(s->cpuid_features & CPUID_CX8))
4350
                goto illegal_op;
4351
            gen_jmp_im(pc_start - s->cs_base);
4352
            if (s->cc_op != CC_OP_DYNAMIC)
4353
                gen_op_set_cc_op(s->cc_op);
4354
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4355
            tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
4356
        }
4357
        s->cc_op = CC_OP_EFLAGS;
4358
        break;
4359

    
4360
        /**************************/
4361
        /* push/pop */
4362
    case 0x50 ... 0x57: /* push */
4363
        gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
4364
        gen_push_T0(s);
4365
        break;
4366
    case 0x58 ... 0x5f: /* pop */
4367
        if (CODE64(s)) {
4368
            ot = dflag ? OT_QUAD : OT_WORD;
4369
        } else {
4370
            ot = dflag + OT_WORD;
4371
        }
4372
        gen_pop_T0(s);
4373
        /* NOTE: order is important for pop %sp */
4374
        gen_pop_update(s);
4375
        gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
4376
        break;
4377
    case 0x60: /* pusha */
4378
        if (CODE64(s))
4379
            goto illegal_op;
4380
        gen_pusha(s);
4381
        break;
4382
    case 0x61: /* popa */
4383
        if (CODE64(s))
4384
            goto illegal_op;
4385
        gen_popa(s);
4386
        break;
4387
    case 0x68: /* push Iv */
4388
    case 0x6a:
4389
        if (CODE64(s)) {
4390
            ot = dflag ? OT_QUAD : OT_WORD;
4391
        } else {
4392
            ot = dflag + OT_WORD;
4393
        }
4394
        if (b == 0x68)
4395
            val = insn_get(s, ot);
4396
        else
4397
            val = (int8_t)insn_get(s, OT_BYTE);
4398
        gen_op_movl_T0_im(val);
4399
        gen_push_T0(s);
4400
        break;
4401
    case 0x8f: /* pop Ev */
4402
        if (CODE64(s)) {
4403
            ot = dflag ? OT_QUAD : OT_WORD;
4404
        } else {
4405
            ot = dflag + OT_WORD;
4406
        }
4407
        modrm = ldub_code(s->pc++);
4408
        mod = (modrm >> 6) & 3;
4409
        gen_pop_T0(s);
4410
        if (mod == 3) {
4411
            /* NOTE: order is important for pop %sp */
4412
            gen_pop_update(s);
4413
            rm = (modrm & 7) | REX_B(s);
4414
            gen_op_mov_reg_T0(ot, rm);
4415
        } else {
4416
            /* NOTE: order is important too for MMU exceptions */
4417
            s->popl_esp_hack = 1 << ot;
4418
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4419
            s->popl_esp_hack = 0;
4420
            gen_pop_update(s);
4421
        }
4422
        break;
4423
    case 0xc8: /* enter */
4424
        {
4425
            int level;
4426
            val = lduw_code(s->pc);
4427
            s->pc += 2;
4428
            level = ldub_code(s->pc++);
4429
            gen_enter(s, val, level);
4430
        }
4431
        break;
4432
    case 0xc9: /* leave */
4433
        /* XXX: exception not precise (ESP is updated before potential exception) */
4434
        if (CODE64(s)) {
4435
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4436
            gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4437
        } else if (s->ss32) {
4438
            gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4439
            gen_op_mov_reg_T0(OT_LONG, R_ESP);
4440
        } else {
4441
            gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4442
            gen_op_mov_reg_T0(OT_WORD, R_ESP);
4443
        }
4444
        gen_pop_T0(s);
4445
        if (CODE64(s)) {
4446
            ot = dflag ? OT_QUAD : OT_WORD;
4447
        } else {
4448
            ot = dflag + OT_WORD;
4449
        }
4450
        gen_op_mov_reg_T0(ot, R_EBP);
4451
        gen_pop_update(s);
4452
        break;
4453
    case 0x06: /* push es */
4454
    case 0x0e: /* push cs */
4455
    case 0x16: /* push ss */
4456
    case 0x1e: /* push ds */
4457
        if (CODE64(s))
4458
            goto illegal_op;
4459
        gen_op_movl_T0_seg(b >> 3);
4460
        gen_push_T0(s);
4461
        break;
4462
    case 0x1a0: /* push fs */
4463
    case 0x1a8: /* push gs */
4464
        gen_op_movl_T0_seg((b >> 3) & 7);
4465
        gen_push_T0(s);
4466
        break;
4467
    case 0x07: /* pop es */
4468
    case 0x17: /* pop ss */
4469
    case 0x1f: /* pop ds */
4470
        if (CODE64(s))
4471
            goto illegal_op;
4472
        reg = b >> 3;
4473
        gen_pop_T0(s);
4474
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4475
        gen_pop_update(s);
4476
        if (reg == R_SS) {
4477
            /* if reg == SS, inhibit interrupts/trace. */
4478
            /* If several instructions disable interrupts, only the
4479
               _first_ does it */
4480
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4481
                tcg_gen_helper_0_0(helper_set_inhibit_irq);
4482
            s->tf = 0;
4483
        }
4484
        if (s->is_jmp) {
4485
            gen_jmp_im(s->pc - s->cs_base);
4486
            gen_eob(s);
4487
        }
4488
        break;
4489
    case 0x1a1: /* pop fs */
4490
    case 0x1a9: /* pop gs */
4491
        gen_pop_T0(s);
4492
        gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4493
        gen_pop_update(s);
4494
        if (s->is_jmp) {
4495
            gen_jmp_im(s->pc - s->cs_base);
4496
            gen_eob(s);
4497
        }
4498
        break;
4499

    
4500
        /**************************/
4501
        /* mov */
4502
    case 0x88:
4503
    case 0x89: /* mov Gv, Ev */
4504
        if ((b & 1) == 0)
4505
            ot = OT_BYTE;
4506
        else
4507
            ot = dflag + OT_WORD;
4508
        modrm = ldub_code(s->pc++);
4509
        reg = ((modrm >> 3) & 7) | rex_r;
4510

    
4511
        /* generate a generic store */
4512
        gen_ldst_modrm(s, modrm, ot, reg, 1);
4513
        break;
4514
    case 0xc6:
4515
    case 0xc7: /* mov Ev, Iv */
4516
        if ((b & 1) == 0)
4517
            ot = OT_BYTE;
4518
        else
4519
            ot = dflag + OT_WORD;
4520
        modrm = ldub_code(s->pc++);
4521
        mod = (modrm >> 6) & 3;
4522
        if (mod != 3) {
4523
            s->rip_offset = insn_const_size(ot);
4524
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4525
        }
4526
        val = insn_get(s, ot);
4527
        gen_op_movl_T0_im(val);
4528
        if (mod != 3)
4529
            gen_op_st_T0_A0(ot + s->mem_index);
4530
        else
4531
            gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4532
        break;
4533
    case 0x8a:
4534
    case 0x8b: /* mov Ev, Gv */
4535
        if ((b & 1) == 0)
4536
            ot = OT_BYTE;
4537
        else
4538
            ot = OT_WORD + dflag;
4539
        modrm = ldub_code(s->pc++);
4540
        reg = ((modrm >> 3) & 7) | rex_r;
4541

    
4542
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4543
        gen_op_mov_reg_T0(ot, reg);
4544
        break;
4545
    case 0x8e: /* mov seg, Gv */
4546
        modrm = ldub_code(s->pc++);
4547
        reg = (modrm >> 3) & 7;
4548
        if (reg >= 6 || reg == R_CS)
4549
            goto illegal_op;
4550
        gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4551
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4552
        if (reg == R_SS) {
4553
            /* if reg == SS, inhibit interrupts/trace */
4554
            /* If several instructions disable interrupts, only the
4555
               _first_ does it */
4556
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4557
                tcg_gen_helper_0_0(helper_set_inhibit_irq);
4558
            s->tf = 0;
4559
        }
4560
        if (s->is_jmp) {
4561
            gen_jmp_im(s->pc - s->cs_base);
4562
            gen_eob(s);
4563
        }
4564
        break;
4565
    case 0x8c: /* mov Gv, seg */
4566
        modrm = ldub_code(s->pc++);
4567
        reg = (modrm >> 3) & 7;
4568
        mod = (modrm >> 6) & 3;
4569
        if (reg >= 6)
4570
            goto illegal_op;
4571
        gen_op_movl_T0_seg(reg);
4572
        if (mod == 3)
4573
            ot = OT_WORD + dflag;
4574
        else
4575
            ot = OT_WORD;
4576
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4577
        break;
4578

    
4579
    case 0x1b6: /* movzbS Gv, Eb */
4580
    case 0x1b7: /* movzwS Gv, Eb */
4581
    case 0x1be: /* movsbS Gv, Eb */
4582
    case 0x1bf: /* movswS Gv, Eb */
4583
        {
4584
            int d_ot;
4585
            /* d_ot is the size of destination */
4586
            d_ot = dflag + OT_WORD;
4587
            /* ot is the size of source */
4588
            ot = (b & 1) + OT_BYTE;
4589
            modrm = ldub_code(s->pc++);
4590
            reg = ((modrm >> 3) & 7) | rex_r;
4591
            mod = (modrm >> 6) & 3;
4592
            rm = (modrm & 7) | REX_B(s);
4593

    
4594
            if (mod == 3) {
4595
                gen_op_mov_TN_reg(ot, 0, rm);
4596
                switch(ot | (b & 8)) {
4597
                case OT_BYTE:
4598
                    tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4599
                    break;
4600
                case OT_BYTE | 8:
4601
                    tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4602
                    break;
4603
                case OT_WORD:
4604
                    tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4605
                    break;
4606
                default:
4607
                case OT_WORD | 8:
4608
                    tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4609
                    break;
4610
                }
4611
                gen_op_mov_reg_T0(d_ot, reg);
4612
            } else {
4613
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4614
                if (b & 8) {
4615
                    gen_op_lds_T0_A0(ot + s->mem_index);
4616
                } else {
4617
                    gen_op_ldu_T0_A0(ot + s->mem_index);
4618
                }
4619
                gen_op_mov_reg_T0(d_ot, reg);
4620
            }
4621
        }
4622
        break;
4623

    
4624
    case 0x8d: /* lea */
4625
        ot = dflag + OT_WORD;
4626
        modrm = ldub_code(s->pc++);
4627
        mod = (modrm >> 6) & 3;
4628
        if (mod == 3)
4629
            goto illegal_op;
4630
        reg = ((modrm >> 3) & 7) | rex_r;
4631
        /* we must ensure that no segment is added */
4632
        s->override = -1;
4633
        val = s->addseg;
4634
        s->addseg = 0;
4635
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4636
        s->addseg = val;
4637
        gen_op_mov_reg_A0(ot - OT_WORD, reg);
4638
        break;
4639

    
4640
    case 0xa0: /* mov EAX, Ov */
4641
    case 0xa1:
4642
    case 0xa2: /* mov Ov, EAX */
4643
    case 0xa3:
4644
        {
4645
            target_ulong offset_addr;
4646

    
4647
            if ((b & 1) == 0)
4648
                ot = OT_BYTE;
4649
            else
4650
                ot = dflag + OT_WORD;
4651
#ifdef TARGET_X86_64
4652
            if (s->aflag == 2) {
4653
                offset_addr = ldq_code(s->pc);
4654
                s->pc += 8;
4655
                gen_op_movq_A0_im(offset_addr);
4656
            } else
4657
#endif
4658
            {
4659
                if (s->aflag) {
4660
                    offset_addr = insn_get(s, OT_LONG);
4661
                } else {
4662
                    offset_addr = insn_get(s, OT_WORD);
4663
                }
4664
                gen_op_movl_A0_im(offset_addr);
4665
            }
4666
            gen_add_A0_ds_seg(s);
4667
            if ((b & 2) == 0) {
4668
                gen_op_ld_T0_A0(ot + s->mem_index);
4669
                gen_op_mov_reg_T0(ot, R_EAX);
4670
            } else {
4671
                gen_op_mov_TN_reg(ot, 0, R_EAX);
4672
                gen_op_st_T0_A0(ot + s->mem_index);
4673
            }
4674
        }
4675
        break;
4676
    case 0xd7: /* xlat */
4677
#ifdef TARGET_X86_64
4678
        if (s->aflag == 2) {
4679
            gen_op_movq_A0_reg(R_EBX);
4680
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4681
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4682
            tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4683
        } else
4684
#endif
4685
        {
4686
            gen_op_movl_A0_reg(R_EBX);
4687
            gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4688
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4689
            tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4690
            if (s->aflag == 0)
4691
                gen_op_andl_A0_ffff();
4692
            else
4693
                tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
4694
        }
4695
        gen_add_A0_ds_seg(s);
4696
        gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4697
        gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4698
        break;
4699
    case 0xb0 ... 0xb7: /* mov R, Ib */
4700
        val = insn_get(s, OT_BYTE);
4701
        gen_op_movl_T0_im(val);
4702
        gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4703
        break;
4704
    case 0xb8 ... 0xbf: /* mov R, Iv */
4705
#ifdef TARGET_X86_64
4706
        if (dflag == 2) {
4707
            uint64_t tmp;
4708
            /* 64 bit case */
4709
            tmp = ldq_code(s->pc);
4710
            s->pc += 8;
4711
            reg = (b & 7) | REX_B(s);
4712
            gen_movtl_T0_im(tmp);
4713
            gen_op_mov_reg_T0(OT_QUAD, reg);
4714
        } else
4715
#endif
4716
        {
4717
            ot = dflag ? OT_LONG : OT_WORD;
4718
            val = insn_get(s, ot);
4719
            reg = (b & 7) | REX_B(s);
4720
            gen_op_movl_T0_im(val);
4721
            gen_op_mov_reg_T0(ot, reg);
4722
        }
4723
        break;
4724

    
4725
    case 0x91 ... 0x97: /* xchg R, EAX */
4726
        ot = dflag + OT_WORD;
4727
        reg = (b & 7) | REX_B(s);
4728
        rm = R_EAX;
4729
        goto do_xchg_reg;
4730
    case 0x86:
4731
    case 0x87: /* xchg Ev, Gv */
4732
        if ((b & 1) == 0)
4733
            ot = OT_BYTE;
4734
        else
4735
            ot = dflag + OT_WORD;
4736
        modrm = ldub_code(s->pc++);
4737
        reg = ((modrm >> 3) & 7) | rex_r;
4738
        mod = (modrm >> 6) & 3;
4739
        if (mod == 3) {
4740
            rm = (modrm & 7) | REX_B(s);
4741
        do_xchg_reg:
4742
            gen_op_mov_TN_reg(ot, 0, reg);
4743
            gen_op_mov_TN_reg(ot, 1, rm);
4744
            gen_op_mov_reg_T0(ot, rm);
4745
            gen_op_mov_reg_T1(ot, reg);
4746
        } else {
4747
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4748
            gen_op_mov_TN_reg(ot, 0, reg);
4749
            /* for xchg, lock is implicit */
4750
            if (!(prefixes & PREFIX_LOCK))
4751
                tcg_gen_helper_0_0(helper_lock);
4752
            gen_op_ld_T1_A0(ot + s->mem_index);
4753
            gen_op_st_T0_A0(ot + s->mem_index);
4754
            if (!(prefixes & PREFIX_LOCK))
4755
                tcg_gen_helper_0_0(helper_unlock);
4756
            gen_op_mov_reg_T1(ot, reg);
4757
        }
4758
        break;
4759
    case 0xc4: /* les Gv */
4760
        if (CODE64(s))
4761
            goto illegal_op;
4762
        op = R_ES;
4763
        goto do_lxx;
4764
    case 0xc5: /* lds Gv */
4765
        if (CODE64(s))
4766
            goto illegal_op;
4767
        op = R_DS;
4768
        goto do_lxx;
4769
    case 0x1b2: /* lss Gv */
4770
        op = R_SS;
4771
        goto do_lxx;
4772
    case 0x1b4: /* lfs Gv */
4773
        op = R_FS;
4774
        goto do_lxx;
4775
    case 0x1b5: /* lgs Gv */
4776
        op = R_GS;
4777
    do_lxx:
4778
        ot = dflag ? OT_LONG : OT_WORD;
4779
        modrm = ldub_code(s->pc++);
4780
        reg = ((modrm >> 3) & 7) | rex_r;
4781
        mod = (modrm >> 6) & 3;
4782
        if (mod == 3)
4783
            goto illegal_op;
4784
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4785
        gen_op_ld_T1_A0(ot + s->mem_index);
4786
        gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4787
        /* load the segment first to handle exceptions properly */
4788
        gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4789
        gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4790
        /* then put the data */
4791
        gen_op_mov_reg_T1(ot, reg);
4792
        if (s->is_jmp) {
4793
            gen_jmp_im(s->pc - s->cs_base);
4794
            gen_eob(s);
4795
        }
4796
        break;
4797

    
4798
        /************************/
4799
        /* shifts */
4800
    case 0xc0:
4801
    case 0xc1:
4802
        /* shift Ev,Ib */
4803
        shift = 2;
4804
    grp2:
4805
        {
4806
            if ((b & 1) == 0)
4807
                ot = OT_BYTE;
4808
            else
4809
                ot = dflag + OT_WORD;
4810

    
4811
            modrm = ldub_code(s->pc++);
4812
            mod = (modrm >> 6) & 3;
4813
            op = (modrm >> 3) & 7;
4814

    
4815
            if (mod != 3) {
4816
                if (shift == 2) {
4817
                    s->rip_offset = 1;
4818
                }
4819
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4820
                opreg = OR_TMP0;
4821
            } else {
4822
                opreg = (modrm & 7) | REX_B(s);
4823
            }
4824

    
4825
            /* simpler op */
4826
            if (shift == 0) {
4827
                gen_shift(s, op, ot, opreg, OR_ECX);
4828
            } else {
4829
                if (shift == 2) {
4830
                    shift = ldub_code(s->pc++);
4831
                }
4832
                gen_shifti(s, op, ot, opreg, shift);
4833
            }
4834
        }
4835
        break;
4836
    case 0xd0:
4837
    case 0xd1:
4838
        /* shift Ev,1 */
4839
        shift = 1;
4840
        goto grp2;
4841
    case 0xd2:
4842
    case 0xd3:
4843
        /* shift Ev,cl */
4844
        shift = 0;
4845
        goto grp2;
4846

    
4847
    case 0x1a4: /* shld imm */
4848
        op = 0;
4849
        shift = 1;
4850
        goto do_shiftd;
4851
    case 0x1a5: /* shld cl */
4852
        op = 0;
4853
        shift = 0;
4854
        goto do_shiftd;
4855
    case 0x1ac: /* shrd imm */
4856
        op = 1;
4857
        shift = 1;
4858
        goto do_shiftd;
4859
    case 0x1ad: /* shrd cl */
4860
        op = 1;
4861
        shift = 0;
4862
    do_shiftd:
4863
        ot = dflag + OT_WORD;
4864
        modrm = ldub_code(s->pc++);
4865
        mod = (modrm >> 6) & 3;
4866
        rm = (modrm & 7) | REX_B(s);
4867
        reg = ((modrm >> 3) & 7) | rex_r;
4868
        if (mod != 3) {
4869
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4870
            opreg = OR_TMP0;
4871
        } else {
4872
            opreg = rm;
4873
        }
4874
        gen_op_mov_TN_reg(ot, 1, reg);
4875

    
4876
        if (shift) {
4877
            val = ldub_code(s->pc++);
4878
            tcg_gen_movi_tl(cpu_T3, val);
4879
        } else {
4880
            tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
4881
        }
4882
        gen_shiftd_rm_T1_T3(s, ot, opreg, op);
4883
        break;
4884

    
4885
        /************************/
4886
        /* floats */
4887
    case 0xd8 ... 0xdf:
4888
        if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4889
            /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4890
            /* XXX: what to do if illegal op ? */
4891
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4892
            break;
4893
        }
4894
        modrm = ldub_code(s->pc++);
4895
        mod = (modrm >> 6) & 3;
4896
        rm = modrm & 7;
4897
        op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4898
        if (mod != 3) {
4899
            /* memory op */
4900
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4901
            switch(op) {
4902
            case 0x00 ... 0x07: /* fxxxs */
4903
            case 0x10 ... 0x17: /* fixxxl */
4904
            case 0x20 ... 0x27: /* fxxxl */
4905
            case 0x30 ... 0x37: /* fixxx */
4906
                {
4907
                    int op1;
4908
                    op1 = op & 7;
4909

    
4910
                    switch(op >> 4) {
4911
                    case 0:
4912
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4913
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4914
                        tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
4915
                        break;
4916
                    case 1:
4917
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4918
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4919
                        tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4920
                        break;
4921
                    case 2:
4922
                        tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
4923
                                          (s->mem_index >> 2) - 1);
4924
                        tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
4925
                        break;
4926
                    case 3:
4927
                    default:
4928
                        gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4929
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4930
                        tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4931
                        break;
4932
                    }
4933

    
4934
                    tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
4935
                    if (op1 == 3) {
4936
                        /* fcomp needs pop */
4937
                        tcg_gen_helper_0_0(helper_fpop);
4938
                    }
4939
                }
4940
                break;
4941
            case 0x08: /* flds */
4942
            case 0x0a: /* fsts */
4943
            case 0x0b: /* fstps */
4944
            case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4945
            case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4946
            case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4947
                switch(op & 7) {
4948
                case 0:
4949
                    switch(op >> 4) {
4950
                    case 0:
4951
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4952
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4953
                        tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
4954
                        break;
4955
                    case 1:
4956
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4957
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4958
                        tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4959
                        break;
4960
                    case 2:
4961
                        tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
4962
                                          (s->mem_index >> 2) - 1);
4963
                        tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
4964
                        break;
4965
                    case 3:
4966
                    default:
4967
                        gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4968
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4969
                        tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4970
                        break;
4971
                    }
4972
                    break;
4973
                case 1:
4974
                    /* XXX: the corresponding CPUID bit must be tested ! */
4975
                    switch(op >> 4) {
4976
                    case 1:
4977
                        tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
4978
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4979
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4980
                        break;
4981
                    case 2:
4982
                        tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
4983
                        tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
4984
                                          (s->mem_index >> 2) - 1);
4985
                        break;
4986
                    case 3:
4987
                    default:
4988
                        tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
4989
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4990
                        gen_op_st_T0_A0(OT_WORD + s->mem_index);
4991
                        break;
4992
                    }
4993
                    tcg_gen_helper_0_0(helper_fpop);
4994
                    break;
4995
                default:
4996
                    switch(op >> 4) {
4997
                    case 0:
4998
                        tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
4999
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5000
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
5001
                        break;
5002
                    case 1:
5003
                        tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
5004
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5005
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
5006
                        break;
5007
                    case 2:
5008
                        tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
5009
                        tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
5010
                                          (s->mem_index >> 2) - 1);
5011
                        break;
5012
                    case 3:
5013
                    default:
5014
                        tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
5015
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5016
                        gen_op_st_T0_A0(OT_WORD + s->mem_index);
5017
                        break;
5018
                    }
5019
                    if ((op & 7) == 3)
5020
                        tcg_gen_helper_0_0(helper_fpop);
5021
                    break;
5022
                }
5023
                break;
5024
            case 0x0c: /* fldenv mem */
5025
                if (s->cc_op != CC_OP_DYNAMIC)
5026
                    gen_op_set_cc_op(s->cc_op);
5027
                gen_jmp_im(pc_start - s->cs_base);
5028
                tcg_gen_helper_0_2(helper_fldenv, 
5029
                                   cpu_A0, tcg_const_i32(s->dflag));
5030
                break;
5031
            case 0x0d: /* fldcw mem */
5032
                gen_op_ld_T0_A0(OT_WORD + s->mem_index);
5033
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5034
                tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
5035
                break;
5036
            case 0x0e: /* fnstenv mem */
5037
                if (s->cc_op != CC_OP_DYNAMIC)
5038
                    gen_op_set_cc_op(s->cc_op);
5039
                gen_jmp_im(pc_start - s->cs_base);
5040
                tcg_gen_helper_0_2(helper_fstenv,
5041
                                   cpu_A0, tcg_const_i32(s->dflag));
5042
                break;
5043
            case 0x0f: /* fnstcw mem */
5044
                tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
5045
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5046
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
5047
                break;
5048
            case 0x1d: /* fldt mem */
5049
                if (s->cc_op != CC_OP_DYNAMIC)
5050
                    gen_op_set_cc_op(s->cc_op);
5051
                gen_jmp_im(pc_start - s->cs_base);
5052
                tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
5053
                break;
5054
            case 0x1f: /* fstpt mem */
5055
                if (s->cc_op != CC_OP_DYNAMIC)
5056
                    gen_op_set_cc_op(s->cc_op);
5057
                gen_jmp_im(pc_start - s->cs_base);
5058
                tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
5059
                tcg_gen_helper_0_0(helper_fpop);
5060
                break;
5061
            case 0x2c: /* frstor mem */
5062
                if (s->cc_op != CC_OP_DYNAMIC)
5063
                    gen_op_set_cc_op(s->cc_op);
5064
                gen_jmp_im(pc_start - s->cs_base);
5065
                tcg_gen_helper_0_2(helper_frstor,
5066
                                   cpu_A0, tcg_const_i32(s->dflag));
5067
                break;
5068
            case 0x2e: /* fnsave mem */
5069
                if (s->cc_op != CC_OP_DYNAMIC)
5070
                    gen_op_set_cc_op(s->cc_op);
5071
                gen_jmp_im(pc_start - s->cs_base);
5072
                tcg_gen_helper_0_2(helper_fsave,
5073
                                   cpu_A0, tcg_const_i32(s->dflag));
5074
                break;
5075
            case 0x2f: /* fnstsw mem */
5076
                tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5077
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5078
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
5079
                break;
5080
            case 0x3c: /* fbld */
5081
                if (s->cc_op != CC_OP_DYNAMIC)
5082
                    gen_op_set_cc_op(s->cc_op);
5083
                gen_jmp_im(pc_start - s->cs_base);
5084
                tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
5085
                break;
5086
            case 0x3e: /* fbstp */
5087
                if (s->cc_op != CC_OP_DYNAMIC)
5088
                    gen_op_set_cc_op(s->cc_op);
5089
                gen_jmp_im(pc_start - s->cs_base);
5090
                tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
5091
                tcg_gen_helper_0_0(helper_fpop);
5092
                break;
5093
            case 0x3d: /* fildll */
5094
                tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
5095
                                  (s->mem_index >> 2) - 1);
5096
                tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
5097
                break;
5098
            case 0x3f: /* fistpll */
5099
                tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
5100
                tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
5101
                                  (s->mem_index >> 2) - 1);
5102
                tcg_gen_helper_0_0(helper_fpop);
5103
                break;
5104
            default:
5105
                goto illegal_op;
5106
            }
5107
        } else {
5108
            /* register float ops */
5109
            opreg = rm;
5110

    
5111
            switch(op) {
5112
            case 0x08: /* fld sti */
5113
                tcg_gen_helper_0_0(helper_fpush);
5114
                tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
5115
                break;
5116
            case 0x09: /* fxchg sti */
5117
            case 0x29: /* fxchg4 sti, undocumented op */
5118
            case 0x39: /* fxchg7 sti, undocumented op */
5119
                tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
5120
                break;
5121
            case 0x0a: /* grp d9/2 */
5122
                switch(rm) {
5123
                case 0: /* fnop */
5124
                    /* check exceptions (FreeBSD FPU probe) */
5125
                    if (s->cc_op != CC_OP_DYNAMIC)
5126
                        gen_op_set_cc_op(s->cc_op);
5127
                    gen_jmp_im(pc_start - s->cs_base);
5128
                    tcg_gen_helper_0_0(helper_fwait);
5129
                    break;
5130
                default:
5131
                    goto illegal_op;
5132
                }
5133
                break;
5134
            case 0x0c: /* grp d9/4 */
5135
                switch(rm) {
5136
                case 0: /* fchs */
5137
                    tcg_gen_helper_0_0(helper_fchs_ST0);
5138
                    break;
5139
                case 1: /* fabs */
5140
                    tcg_gen_helper_0_0(helper_fabs_ST0);
5141
                    break;
5142
                case 4: /* ftst */
5143
                    tcg_gen_helper_0_0(helper_fldz_FT0);
5144
                    tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5145
                    break;
5146
                case 5: /* fxam */
5147
                    tcg_gen_helper_0_0(helper_fxam_ST0);
5148
                    break;
5149
                default:
5150
                    goto illegal_op;
5151
                }
5152
                break;
5153
            case 0x0d: /* grp d9/5 */
5154
                {
5155
                    switch(rm) {
5156
                    case 0:
5157
                        tcg_gen_helper_0_0(helper_fpush);
5158
                        tcg_gen_helper_0_0(helper_fld1_ST0);
5159
                        break;
5160
                    case 1:
5161
                        tcg_gen_helper_0_0(helper_fpush);
5162
                        tcg_gen_helper_0_0(helper_fldl2t_ST0);
5163
                        break;
5164
                    case 2:
5165
                        tcg_gen_helper_0_0(helper_fpush);
5166
                        tcg_gen_helper_0_0(helper_fldl2e_ST0);
5167
                        break;
5168
                    case 3:
5169
                        tcg_gen_helper_0_0(helper_fpush);
5170
                        tcg_gen_helper_0_0(helper_fldpi_ST0);
5171
                        break;
5172
                    case 4:
5173
                        tcg_gen_helper_0_0(helper_fpush);
5174
                        tcg_gen_helper_0_0(helper_fldlg2_ST0);
5175
                        break;
5176
                    case 5:
5177
                        tcg_gen_helper_0_0(helper_fpush);
5178
                        tcg_gen_helper_0_0(helper_fldln2_ST0);
5179
                        break;
5180
                    case 6:
5181
                        tcg_gen_helper_0_0(helper_fpush);
5182
                        tcg_gen_helper_0_0(helper_fldz_ST0);
5183
                        break;
5184
                    default:
5185
                        goto illegal_op;
5186
                    }
5187
                }
5188
                break;
5189
            case 0x0e: /* grp d9/6 */
5190
                switch(rm) {
5191
                case 0: /* f2xm1 */
5192
                    tcg_gen_helper_0_0(helper_f2xm1);
5193
                    break;
5194
                case 1: /* fyl2x */
5195
                    tcg_gen_helper_0_0(helper_fyl2x);
5196
                    break;
5197
                case 2: /* fptan */
5198
                    tcg_gen_helper_0_0(helper_fptan);
5199
                    break;
5200
                case 3: /* fpatan */
5201
                    tcg_gen_helper_0_0(helper_fpatan);
5202
                    break;
5203
                case 4: /* fxtract */
5204
                    tcg_gen_helper_0_0(helper_fxtract);
5205
                    break;
5206
                case 5: /* fprem1 */
5207
                    tcg_gen_helper_0_0(helper_fprem1);
5208
                    break;
5209
                case 6: /* fdecstp */
5210
                    tcg_gen_helper_0_0(helper_fdecstp);
5211
                    break;
5212
                default:
5213
                case 7: /* fincstp */
5214
                    tcg_gen_helper_0_0(helper_fincstp);
5215
                    break;
5216
                }
5217
                break;
5218
            case 0x0f: /* grp d9/7 */
5219
                switch(rm) {
5220
                case 0: /* fprem */
5221
                    tcg_gen_helper_0_0(helper_fprem);
5222
                    break;
5223
                case 1: /* fyl2xp1 */
5224
                    tcg_gen_helper_0_0(helper_fyl2xp1);
5225
                    break;
5226
                case 2: /* fsqrt */
5227
                    tcg_gen_helper_0_0(helper_fsqrt);
5228
                    break;
5229
                case 3: /* fsincos */
5230
                    tcg_gen_helper_0_0(helper_fsincos);
5231
                    break;
5232
                case 5: /* fscale */
5233
                    tcg_gen_helper_0_0(helper_fscale);
5234
                    break;
5235
                case 4: /* frndint */
5236
                    tcg_gen_helper_0_0(helper_frndint);
5237
                    break;
5238
                case 6: /* fsin */
5239
                    tcg_gen_helper_0_0(helper_fsin);
5240
                    break;
5241
                default:
5242
                case 7: /* fcos */
5243
                    tcg_gen_helper_0_0(helper_fcos);
5244
                    break;
5245
                }
5246
                break;
5247
            case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5248
            case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5249
            case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5250
                {
5251
                    int op1;
5252

    
5253
                    op1 = op & 7;
5254
                    if (op >= 0x20) {
5255
                        tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
5256
                        if (op >= 0x30)
5257
                            tcg_gen_helper_0_0(helper_fpop);
5258
                    } else {
5259
                        tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5260
                        tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5261
                    }
5262
                }
5263
                break;
5264
            case 0x02: /* fcom */
5265
            case 0x22: /* fcom2, undocumented op */
5266
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5267
                tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5268
                break;
5269
            case 0x03: /* fcomp */
5270
            case 0x23: /* fcomp3, undocumented op */
5271
            case 0x32: /* fcomp5, undocumented op */
5272
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5273
                tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5274
                tcg_gen_helper_0_0(helper_fpop);
5275
                break;
5276
            case 0x15: /* da/5 */
5277
                switch(rm) {
5278
                case 1: /* fucompp */
5279
                    tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5280
                    tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5281
                    tcg_gen_helper_0_0(helper_fpop);
5282
                    tcg_gen_helper_0_0(helper_fpop);
5283
                    break;
5284
                default:
5285
                    goto illegal_op;
5286
                }
5287
                break;
5288
            case 0x1c:
5289
                switch(rm) {
5290
                case 0: /* feni (287 only, just do nop here) */
5291
                    break;
5292
                case 1: /* fdisi (287 only, just do nop here) */
5293
                    break;
5294
                case 2: /* fclex */
5295
                    tcg_gen_helper_0_0(helper_fclex);
5296
                    break;
5297
                case 3: /* fninit */
5298
                    tcg_gen_helper_0_0(helper_fninit);
5299
                    break;
5300
                case 4: /* fsetpm (287 only, just do nop here) */
5301
                    break;
5302
                default:
5303
                    goto illegal_op;
5304
                }
5305
                break;
5306
            case 0x1d: /* fucomi */
5307
                if (s->cc_op != CC_OP_DYNAMIC)
5308
                    gen_op_set_cc_op(s->cc_op);
5309
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5310
                tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5311
                s->cc_op = CC_OP_EFLAGS;
5312
                break;
5313
            case 0x1e: /* fcomi */
5314
                if (s->cc_op != CC_OP_DYNAMIC)
5315
                    gen_op_set_cc_op(s->cc_op);
5316
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5317
                tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5318
                s->cc_op = CC_OP_EFLAGS;
5319
                break;
5320
            case 0x28: /* ffree sti */
5321
                tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5322
                break;
5323
            case 0x2a: /* fst sti */
5324
                tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5325
                break;
5326
            case 0x2b: /* fstp sti */
5327
            case 0x0b: /* fstp1 sti, undocumented op */
5328
            case 0x3a: /* fstp8 sti, undocumented op */
5329
            case 0x3b: /* fstp9 sti, undocumented op */
5330
                tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5331
                tcg_gen_helper_0_0(helper_fpop);
5332
                break;
5333
            case 0x2c: /* fucom st(i) */
5334
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5335
                tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5336
                break;
5337
            case 0x2d: /* fucomp st(i) */
5338
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5339
                tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5340
                tcg_gen_helper_0_0(helper_fpop);
5341
                break;
5342
            case 0x33: /* de/3 */
5343
                switch(rm) {
5344
                case 1: /* fcompp */
5345
                    tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5346
                    tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5347
                    tcg_gen_helper_0_0(helper_fpop);
5348
                    tcg_gen_helper_0_0(helper_fpop);
5349
                    break;
5350
                default:
5351
                    goto illegal_op;
5352
                }
5353
                break;
5354
            case 0x38: /* ffreep sti, undocumented op */
5355
                tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5356
                tcg_gen_helper_0_0(helper_fpop);
5357
                break;
5358
            case 0x3c: /* df/4 */
5359
                switch(rm) {
5360
                case 0:
5361
                    tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5362
                    tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5363
                    gen_op_mov_reg_T0(OT_WORD, R_EAX);
5364
                    break;
5365
                default:
5366
                    goto illegal_op;
5367
                }
5368
                break;
5369
            case 0x3d: /* fucomip */
5370
                if (s->cc_op != CC_OP_DYNAMIC)
5371
                    gen_op_set_cc_op(s->cc_op);
5372
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5373
                tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5374
                tcg_gen_helper_0_0(helper_fpop);
5375
                s->cc_op = CC_OP_EFLAGS;
5376
                break;
5377
            case 0x3e: /* fcomip */
5378
                if (s->cc_op != CC_OP_DYNAMIC)
5379
                    gen_op_set_cc_op(s->cc_op);
5380
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5381
                tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5382
                tcg_gen_helper_0_0(helper_fpop);
5383
                s->cc_op = CC_OP_EFLAGS;
5384
                break;
5385
            case 0x10 ... 0x13: /* fcmovxx */
5386
            case 0x18 ... 0x1b:
5387
                {
5388
                    int op1, l1;
5389
                    const static uint8_t fcmov_cc[8] = {
5390
                        (JCC_B << 1),
5391
                        (JCC_Z << 1),
5392
                        (JCC_BE << 1),
5393
                        (JCC_P << 1),
5394
                    };
5395
                    op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
5396
                    gen_setcc(s, op1);
5397
                    l1 = gen_new_label();
5398
                    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), l1);
5399
                    tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
5400
                    gen_set_label(l1);
5401
                }
5402
                break;
5403
            default:
5404
                goto illegal_op;
5405
            }
5406
        }
5407
        break;
5408
        /************************/
5409
        /* string ops */
5410

    
5411
    case 0xa4: /* movsS */
5412
    case 0xa5:
5413
        if ((b & 1) == 0)
5414
            ot = OT_BYTE;
5415
        else
5416
            ot = dflag + OT_WORD;
5417

    
5418
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5419
            gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5420
        } else {
5421
            gen_movs(s, ot);
5422
        }
5423
        break;
5424

    
5425
    case 0xaa: /* stosS */
5426
    case 0xab:
5427
        if ((b & 1) == 0)
5428
            ot = OT_BYTE;
5429
        else
5430
            ot = dflag + OT_WORD;
5431

    
5432
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5433
            gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5434
        } else {
5435
            gen_stos(s, ot);
5436
        }
5437
        break;
5438
    case 0xac: /* lodsS */
5439
    case 0xad:
5440
        if ((b & 1) == 0)
5441
            ot = OT_BYTE;
5442
        else
5443
            ot = dflag + OT_WORD;
5444
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5445
            gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5446
        } else {
5447
            gen_lods(s, ot);
5448
        }
5449
        break;
5450
    case 0xae: /* scasS */
5451
    case 0xaf:
5452
        if ((b & 1) == 0)
5453
            ot = OT_BYTE;
5454
        else
5455
            ot = dflag + OT_WORD;
5456
        if (prefixes & PREFIX_REPNZ) {
5457
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5458
        } else if (prefixes & PREFIX_REPZ) {
5459
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5460
        } else {
5461
            gen_scas(s, ot);
5462
            s->cc_op = CC_OP_SUBB + ot;
5463
        }
5464
        break;
5465

    
5466
    case 0xa6: /* cmpsS */
5467
    case 0xa7:
5468
        if ((b & 1) == 0)
5469
            ot = OT_BYTE;
5470
        else
5471
            ot = dflag + OT_WORD;
5472
        if (prefixes & PREFIX_REPNZ) {
5473
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5474
        } else if (prefixes & PREFIX_REPZ) {
5475
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5476
        } else {
5477
            gen_cmps(s, ot);
5478
            s->cc_op = CC_OP_SUBB + ot;
5479
        }
5480
        break;
5481
    case 0x6c: /* insS */
5482
    case 0x6d:
5483
        if ((b & 1) == 0)
5484
            ot = OT_BYTE;
5485
        else
5486
            ot = dflag ? OT_LONG : OT_WORD;
5487
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5488
        gen_op_andl_T0_ffff();
5489
        gen_check_io(s, ot, pc_start - s->cs_base, 
5490
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
5491
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5492
            gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5493
        } else {
5494
            gen_ins(s, ot);
5495
        }
5496
        break;
5497
    case 0x6e: /* outsS */
5498
    case 0x6f:
5499
        if ((b & 1) == 0)
5500
            ot = OT_BYTE;
5501
        else
5502
            ot = dflag ? OT_LONG : OT_WORD;
5503
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5504
        gen_op_andl_T0_ffff();
5505
        gen_check_io(s, ot, pc_start - s->cs_base,
5506
                     svm_is_rep(prefixes) | 4);
5507
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5508
            gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5509
        } else {
5510
            gen_outs(s, ot);
5511
        }
5512
        break;
5513

    
5514
        /************************/
5515
        /* port I/O */
5516

    
5517
    case 0xe4:
5518
    case 0xe5:
5519
        if ((b & 1) == 0)
5520
            ot = OT_BYTE;
5521
        else
5522
            ot = dflag ? OT_LONG : OT_WORD;
5523
        val = ldub_code(s->pc++);
5524
        gen_op_movl_T0_im(val);
5525
        gen_check_io(s, ot, pc_start - s->cs_base,
5526
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5527
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5528
        tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5529
        gen_op_mov_reg_T1(ot, R_EAX);
5530
        break;
5531
    case 0xe6:
5532
    case 0xe7:
5533
        if ((b & 1) == 0)
5534
            ot = OT_BYTE;
5535
        else
5536
            ot = dflag ? OT_LONG : OT_WORD;
5537
        val = ldub_code(s->pc++);
5538
        gen_op_movl_T0_im(val);
5539
        gen_check_io(s, ot, pc_start - s->cs_base,
5540
                     svm_is_rep(prefixes));
5541
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5542

    
5543
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5544
        tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5545
        tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5546
        tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5547
        break;
5548
    case 0xec:
5549
    case 0xed:
5550
        if ((b & 1) == 0)
5551
            ot = OT_BYTE;
5552
        else
5553
            ot = dflag ? OT_LONG : OT_WORD;
5554
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5555
        gen_op_andl_T0_ffff();
5556
        gen_check_io(s, ot, pc_start - s->cs_base,
5557
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5558
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5559
        tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5560
        gen_op_mov_reg_T1(ot, R_EAX);
5561
        break;
5562
    case 0xee:
5563
    case 0xef:
5564
        if ((b & 1) == 0)
5565
            ot = OT_BYTE;
5566
        else
5567
            ot = dflag ? OT_LONG : OT_WORD;
5568
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5569
        gen_op_andl_T0_ffff();
5570
        gen_check_io(s, ot, pc_start - s->cs_base,
5571
                     svm_is_rep(prefixes));
5572
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5573

    
5574
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5575
        tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5576
        tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5577
        tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5578
        break;
5579

    
5580
        /************************/
5581
        /* control */
5582
    case 0xc2: /* ret im */
5583
        val = ldsw_code(s->pc);
5584
        s->pc += 2;
5585
        gen_pop_T0(s);
5586
        if (CODE64(s) && s->dflag)
5587
            s->dflag = 2;
5588
        gen_stack_update(s, val + (2 << s->dflag));
5589
        if (s->dflag == 0)
5590
            gen_op_andl_T0_ffff();
5591
        gen_op_jmp_T0();
5592
        gen_eob(s);
5593
        break;
5594
    case 0xc3: /* ret */
5595
        gen_pop_T0(s);
5596
        gen_pop_update(s);
5597
        if (s->dflag == 0)
5598
            gen_op_andl_T0_ffff();
5599
        gen_op_jmp_T0();
5600
        gen_eob(s);
5601
        break;
5602
    case 0xca: /* lret im */
5603
        val = ldsw_code(s->pc);
5604
        s->pc += 2;
5605
    do_lret:
5606
        if (s->pe && !s->vm86) {
5607
            if (s->cc_op != CC_OP_DYNAMIC)
5608
                gen_op_set_cc_op(s->cc_op);
5609
            gen_jmp_im(pc_start - s->cs_base);
5610
            tcg_gen_helper_0_2(helper_lret_protected,
5611
                               tcg_const_i32(s->dflag), 
5612
                               tcg_const_i32(val));
5613
        } else {
5614
            gen_stack_A0(s);
5615
            /* pop offset */
5616
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5617
            if (s->dflag == 0)
5618
                gen_op_andl_T0_ffff();
5619
            /* NOTE: keeping EIP updated is not a problem in case of
5620
               exception */
5621
            gen_op_jmp_T0();
5622
            /* pop selector */
5623
            gen_op_addl_A0_im(2 << s->dflag);
5624
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5625
            gen_op_movl_seg_T0_vm(R_CS);
5626
            /* add stack offset */
5627
            gen_stack_update(s, val + (4 << s->dflag));
5628
        }
5629
        gen_eob(s);
5630
        break;
5631
    case 0xcb: /* lret */
5632
        val = 0;
5633
        goto do_lret;
5634
    case 0xcf: /* iret */
5635
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5636
            break;
5637
        if (!s->pe) {
5638
            /* real mode */
5639
            tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5640
            s->cc_op = CC_OP_EFLAGS;
5641
        } else if (s->vm86) {
5642
            if (s->iopl != 3) {
5643
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5644
            } else {
5645
                tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5646
                s->cc_op = CC_OP_EFLAGS;
5647
            }
5648
        } else {
5649
            if (s->cc_op != CC_OP_DYNAMIC)
5650
                gen_op_set_cc_op(s->cc_op);
5651
            gen_jmp_im(pc_start - s->cs_base);
5652
            tcg_gen_helper_0_2(helper_iret_protected,
5653
                               tcg_const_i32(s->dflag), 
5654
                               tcg_const_i32(s->pc - s->cs_base));
5655
            s->cc_op = CC_OP_EFLAGS;
5656
        }
5657
        gen_eob(s);
5658
        break;
5659
    case 0xe8: /* call im */
5660
        {
5661
            if (dflag)
5662
                tval = (int32_t)insn_get(s, OT_LONG);
5663
            else
5664
                tval = (int16_t)insn_get(s, OT_WORD);
5665
            next_eip = s->pc - s->cs_base;
5666
            tval += next_eip;
5667
            if (s->dflag == 0)
5668
                tval &= 0xffff;
5669
            gen_movtl_T0_im(next_eip);
5670
            gen_push_T0(s);
5671
            gen_jmp(s, tval);
5672
        }
5673
        break;
5674
    case 0x9a: /* lcall im */
5675
        {
5676
            unsigned int selector, offset;
5677

    
5678
            if (CODE64(s))
5679
                goto illegal_op;
5680
            ot = dflag ? OT_LONG : OT_WORD;
5681
            offset = insn_get(s, ot);
5682
            selector = insn_get(s, OT_WORD);
5683

    
5684
            gen_op_movl_T0_im(selector);
5685
            gen_op_movl_T1_imu(offset);
5686
        }
5687
        goto do_lcall;
5688
    case 0xe9: /* jmp im */
5689
        if (dflag)
5690
            tval = (int32_t)insn_get(s, OT_LONG);
5691
        else
5692
            tval = (int16_t)insn_get(s, OT_WORD);
5693
        tval += s->pc - s->cs_base;
5694
        if (s->dflag == 0)
5695
            tval &= 0xffff;
5696
        gen_jmp(s, tval);
5697
        break;
5698
    case 0xea: /* ljmp im */
5699
        {
5700
            unsigned int selector, offset;
5701

    
5702
            if (CODE64(s))
5703
                goto illegal_op;
5704
            ot = dflag ? OT_LONG : OT_WORD;
5705
            offset = insn_get(s, ot);
5706
            selector = insn_get(s, OT_WORD);
5707

    
5708
            gen_op_movl_T0_im(selector);
5709
            gen_op_movl_T1_imu(offset);
5710
        }
5711
        goto do_ljmp;
5712
    case 0xeb: /* jmp Jb */
5713
        tval = (int8_t)insn_get(s, OT_BYTE);
5714
        tval += s->pc - s->cs_base;
5715
        if (s->dflag == 0)
5716
            tval &= 0xffff;
5717
        gen_jmp(s, tval);
5718
        break;
5719
    case 0x70 ... 0x7f: /* jcc Jb */
5720
        tval = (int8_t)insn_get(s, OT_BYTE);
5721
        goto do_jcc;
5722
    case 0x180 ... 0x18f: /* jcc Jv */
5723
        if (dflag) {
5724
            tval = (int32_t)insn_get(s, OT_LONG);
5725
        } else {
5726
            tval = (int16_t)insn_get(s, OT_WORD);
5727
        }
5728
    do_jcc:
5729
        next_eip = s->pc - s->cs_base;
5730
        tval += next_eip;
5731
        if (s->dflag == 0)
5732
            tval &= 0xffff;
5733
        gen_jcc(s, b, tval, next_eip);
5734
        break;
5735

    
5736
    case 0x190 ... 0x19f: /* setcc Gv */
5737
        modrm = ldub_code(s->pc++);
5738
        gen_setcc(s, b);
5739
        gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5740
        break;
5741
    case 0x140 ... 0x14f: /* cmov Gv, Ev */
5742
        {
5743
            int l1;
5744
            ot = dflag + OT_WORD;
5745
            modrm = ldub_code(s->pc++);
5746
            reg = ((modrm >> 3) & 7) | rex_r;
5747
            mod = (modrm >> 6) & 3;
5748
            if (mod != 3) {
5749
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5750
                gen_op_ld_T1_A0(ot + s->mem_index);
5751
            } else {
5752
                rm = (modrm & 7) | REX_B(s);
5753
                gen_op_mov_TN_reg(ot, 1, rm);
5754
            }
5755
            if (s->cc_op != CC_OP_DYNAMIC)
5756
                gen_op_set_cc_op(s->cc_op);
5757
#ifdef TARGET_X86_64
5758
            if (ot == OT_LONG) {
5759
                /* XXX: specific Intel behaviour ? */
5760
                l1 = gen_new_label();
5761
                gen_jcc1(s, s->cc_op, b ^ 1, l1);
5762
                tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
5763
                gen_set_label(l1);
5764
                tcg_gen_movi_tl(cpu_tmp0, 0);
5765
                tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
5766
            } else
5767
#endif
5768
            {
5769
                l1 = gen_new_label();
5770
                gen_jcc1(s, s->cc_op, b ^ 1, l1);
5771
                gen_op_mov_reg_T1(ot, reg);
5772
                gen_set_label(l1);
5773
            }
5774
        }
5775
        break;
5776

    
5777
        /************************/
5778
        /* flags */
5779
    case 0x9c: /* pushf */
5780
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5781
            break;
5782
        if (s->vm86 && s->iopl != 3) {
5783
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5784
        } else {
5785
            if (s->cc_op != CC_OP_DYNAMIC)
5786
                gen_op_set_cc_op(s->cc_op);
5787
            tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
5788
            gen_push_T0(s);
5789
        }
5790
        break;
5791
    case 0x9d: /* popf */
5792
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5793
            break;
5794
        if (s->vm86 && s->iopl != 3) {
5795
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5796
        } else {
5797
            gen_pop_T0(s);
5798
            if (s->cpl == 0) {
5799
                if (s->dflag) {
5800
                    tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5801
                                       tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
5802
                } else {
5803
                    tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5804
                                       tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
5805
                }
5806
            } else {
5807
                if (s->cpl <= s->iopl) {
5808
                    if (s->dflag) {
5809
                        tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5810
                                           tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
5811
                    } else {
5812
                        tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5813
                                           tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
5814
                    }
5815
                } else {
5816
                    if (s->dflag) {
5817
                        tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5818
                                           tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
5819
                    } else {
5820
                        tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
5821
                                           tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
5822
                    }
5823
                }
5824
            }
5825
            gen_pop_update(s);
5826
            s->cc_op = CC_OP_EFLAGS;
5827
            /* abort translation because TF flag may change */
5828
            gen_jmp_im(s->pc - s->cs_base);
5829
            gen_eob(s);
5830
        }
5831
        break;
5832
    case 0x9e: /* sahf */
5833
        if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
5834
            goto illegal_op;
5835
        gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5836
        if (s->cc_op != CC_OP_DYNAMIC)
5837
            gen_op_set_cc_op(s->cc_op);
5838
        gen_compute_eflags(cpu_cc_src);
5839
        tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
5840
        tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
5841
        tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
5842
        s->cc_op = CC_OP_EFLAGS;
5843
        break;
5844
    case 0x9f: /* lahf */
5845
        if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
5846
            goto illegal_op;
5847
        if (s->cc_op != CC_OP_DYNAMIC)
5848
            gen_op_set_cc_op(s->cc_op);
5849
        gen_compute_eflags(cpu_T[0]);
5850
        /* Note: gen_compute_eflags() only gives the condition codes */
5851
        tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
5852
        gen_op_mov_reg_T0(OT_BYTE, R_AH);
5853
        break;
5854
    case 0xf5: /* cmc */
5855
        if (s->cc_op != CC_OP_DYNAMIC)
5856
            gen_op_set_cc_op(s->cc_op);
5857
        gen_compute_eflags(cpu_cc_src);
5858
        tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
5859
        s->cc_op = CC_OP_EFLAGS;
5860
        break;
5861
    case 0xf8: /* clc */
5862
        if (s->cc_op != CC_OP_DYNAMIC)
5863
            gen_op_set_cc_op(s->cc_op);
5864
        gen_compute_eflags(cpu_cc_src);
5865
        tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
5866
        s->cc_op = CC_OP_EFLAGS;
5867
        break;
5868
    case 0xf9: /* stc */
5869
        if (s->cc_op != CC_OP_DYNAMIC)
5870
            gen_op_set_cc_op(s->cc_op);
5871
        gen_compute_eflags(cpu_cc_src);
5872
        tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
5873
        s->cc_op = CC_OP_EFLAGS;
5874
        break;
5875
    case 0xfc: /* cld */
5876
        tcg_gen_movi_i32(cpu_tmp2_i32, 1);
5877
        tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5878
        break;
5879
    case 0xfd: /* std */
5880
        tcg_gen_movi_i32(cpu_tmp2_i32, -1);
5881
        tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5882
        break;
5883

    
5884
        /************************/
5885
        /* bit operations */
5886
    case 0x1ba: /* bt/bts/btr/btc Gv, im */
5887
        ot = dflag + OT_WORD;
5888
        modrm = ldub_code(s->pc++);
5889
        op = (modrm >> 3) & 7;
5890
        mod = (modrm >> 6) & 3;
5891
        rm = (modrm & 7) | REX_B(s);
5892
        if (mod != 3) {
5893
            s->rip_offset = 1;
5894
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5895
            gen_op_ld_T0_A0(ot + s->mem_index);
5896
        } else {
5897
            gen_op_mov_TN_reg(ot, 0, rm);
5898
        }
5899
        /* load shift */
5900
        val = ldub_code(s->pc++);
5901
        gen_op_movl_T1_im(val);
5902
        if (op < 4)
5903
            goto illegal_op;
5904
        op -= 4;
5905
        goto bt_op;
5906
    case 0x1a3: /* bt Gv, Ev */
5907
        op = 0;
5908
        goto do_btx;
5909
    case 0x1ab: /* bts */
5910
        op = 1;
5911
        goto do_btx;
5912
    case 0x1b3: /* btr */
5913
        op = 2;
5914
        goto do_btx;
5915
    case 0x1bb: /* btc */
5916
        op = 3;
5917
    do_btx:
5918
        ot = dflag + OT_WORD;
5919
        modrm = ldub_code(s->pc++);
5920
        reg = ((modrm >> 3) & 7) | rex_r;
5921
        mod = (modrm >> 6) & 3;
5922
        rm = (modrm & 7) | REX_B(s);
5923
        gen_op_mov_TN_reg(OT_LONG, 1, reg);
5924
        if (mod != 3) {
5925
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5926
            /* specific case: we need to add a displacement */
5927
            gen_exts(ot, cpu_T[1]);
5928
            tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
5929
            tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
5930
            tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
5931
            gen_op_ld_T0_A0(ot + s->mem_index);
5932
        } else {
5933
            gen_op_mov_TN_reg(ot, 0, rm);
5934
        }
5935
    bt_op:
5936
        tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
5937
        switch(op) {
5938
        case 0:
5939
            tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
5940
            tcg_gen_movi_tl(cpu_cc_dst, 0);
5941
            break;
5942
        case 1:
5943
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5944
            tcg_gen_movi_tl(cpu_tmp0, 1);
5945
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5946
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5947
            break;
5948
        case 2:
5949
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5950
            tcg_gen_movi_tl(cpu_tmp0, 1);
5951
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5952
            tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
5953
            tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5954
            break;
5955
        default:
5956
        case 3:
5957
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5958
            tcg_gen_movi_tl(cpu_tmp0, 1);
5959
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5960
            tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5961
            break;
5962
        }
5963
        s->cc_op = CC_OP_SARB + ot;
5964
        if (op != 0) {
5965
            if (mod != 3)
5966
                gen_op_st_T0_A0(ot + s->mem_index);
5967
            else
5968
                gen_op_mov_reg_T0(ot, rm);
5969
            tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
5970
            tcg_gen_movi_tl(cpu_cc_dst, 0);
5971
        }
5972
        break;
5973
    case 0x1bc: /* bsf */
5974
    case 0x1bd: /* bsr */
5975
        {
5976
            int label1;
5977
            ot = dflag + OT_WORD;
5978
            modrm = ldub_code(s->pc++);
5979
            reg = ((modrm >> 3) & 7) | rex_r;
5980
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5981
            gen_extu(ot, cpu_T[0]);
5982
            label1 = gen_new_label();
5983
            tcg_gen_movi_tl(cpu_cc_dst, 0);
5984
            tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), label1);
5985
            if (b & 1) {
5986
                tcg_gen_helper_1_1(helper_bsr, cpu_T[0], cpu_T[0]);
5987
            } else {
5988
                tcg_gen_helper_1_1(helper_bsf, cpu_T[0], cpu_T[0]);
5989
            }
5990
            gen_op_mov_reg_T0(ot, reg);
5991
            tcg_gen_movi_tl(cpu_cc_dst, 1);
5992
            gen_set_label(label1);
5993
            tcg_gen_discard_tl(cpu_cc_src);
5994
            s->cc_op = CC_OP_LOGICB + ot;
5995
        }
5996
        break;
5997
        /************************/
5998
        /* bcd */
5999
    case 0x27: /* daa */
6000
        if (CODE64(s))
6001
            goto illegal_op;
6002
        if (s->cc_op != CC_OP_DYNAMIC)
6003
            gen_op_set_cc_op(s->cc_op);
6004
        tcg_gen_helper_0_0(helper_daa);
6005
        s->cc_op = CC_OP_EFLAGS;
6006
        break;
6007
    case 0x2f: /* das */
6008
        if (CODE64(s))
6009
            goto illegal_op;
6010
        if (s->cc_op != CC_OP_DYNAMIC)
6011
            gen_op_set_cc_op(s->cc_op);
6012
        tcg_gen_helper_0_0(helper_das);
6013
        s->cc_op = CC_OP_EFLAGS;
6014
        break;
6015
    case 0x37: /* aaa */
6016
        if (CODE64(s))
6017
            goto illegal_op;
6018
        if (s->cc_op != CC_OP_DYNAMIC)
6019
            gen_op_set_cc_op(s->cc_op);
6020
        tcg_gen_helper_0_0(helper_aaa);
6021
        s->cc_op = CC_OP_EFLAGS;
6022
        break;
6023
    case 0x3f: /* aas */
6024
        if (CODE64(s))
6025
            goto illegal_op;
6026
        if (s->cc_op != CC_OP_DYNAMIC)
6027
            gen_op_set_cc_op(s->cc_op);
6028
        tcg_gen_helper_0_0(helper_aas);
6029
        s->cc_op = CC_OP_EFLAGS;
6030
        break;
6031
    case 0xd4: /* aam */
6032
        if (CODE64(s))
6033
            goto illegal_op;
6034
        val = ldub_code(s->pc++);
6035
        if (val == 0) {
6036
            gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
6037
        } else {
6038
            tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
6039
            s->cc_op = CC_OP_LOGICB;
6040
        }
6041
        break;
6042
    case 0xd5: /* aad */
6043
        if (CODE64(s))
6044
            goto illegal_op;
6045
        val = ldub_code(s->pc++);
6046
        tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
6047
        s->cc_op = CC_OP_LOGICB;
6048
        break;
6049
        /************************/
6050
        /* misc */
6051
    case 0x90: /* nop */
6052
        /* XXX: xchg + rex handling */
6053
        /* XXX: correct lock test for all insn */
6054
        if (prefixes & PREFIX_LOCK)
6055
            goto illegal_op;
6056
        if (prefixes & PREFIX_REPZ) {
6057
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
6058
        }
6059
        break;
6060
    case 0x9b: /* fwait */
6061
        if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
6062
            (HF_MP_MASK | HF_TS_MASK)) {
6063
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6064
        } else {
6065
            if (s->cc_op != CC_OP_DYNAMIC)
6066
                gen_op_set_cc_op(s->cc_op);
6067
            gen_jmp_im(pc_start - s->cs_base);
6068
            tcg_gen_helper_0_0(helper_fwait);
6069
        }
6070
        break;
6071
    case 0xcc: /* int3 */
6072
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
6073
            break;
6074
        gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
6075
        break;
6076
    case 0xcd: /* int N */
6077
        val = ldub_code(s->pc++);
6078
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
6079
            break;
6080
        if (s->vm86 && s->iopl != 3) {
6081
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6082
        } else {
6083
            gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
6084
        }
6085
        break;
6086
    case 0xce: /* into */
6087
        if (CODE64(s))
6088
            goto illegal_op;
6089
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
6090
            break;
6091
        if (s->cc_op != CC_OP_DYNAMIC)
6092
            gen_op_set_cc_op(s->cc_op);
6093
        gen_jmp_im(pc_start - s->cs_base);
6094
        tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
6095
        break;
6096
    case 0xf1: /* icebp (undocumented, exits to external debugger) */
6097
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
6098
            break;
6099
#if 1
6100
        gen_debug(s, pc_start - s->cs_base);
6101
#else
6102
        /* start debug */
6103
        tb_flush(cpu_single_env);
6104
        cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
6105
#endif
6106
        break;
6107
    case 0xfa: /* cli */
6108
        if (!s->vm86) {
6109
            if (s->cpl <= s->iopl) {
6110
                tcg_gen_helper_0_0(helper_cli);
6111
            } else {
6112
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6113
            }
6114
        } else {
6115
            if (s->iopl == 3) {
6116
                tcg_gen_helper_0_0(helper_cli);
6117
            } else {
6118
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6119
            }
6120
        }
6121
        break;
6122
    case 0xfb: /* sti */
6123
        if (!s->vm86) {
6124
            if (s->cpl <= s->iopl) {
6125
            gen_sti:
6126
                tcg_gen_helper_0_0(helper_sti);
6127
                /* interruptions are enabled only the first insn after sti */
6128
                /* If several instructions disable interrupts, only the
6129
                   _first_ does it */
6130
                if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
6131
                    tcg_gen_helper_0_0(helper_set_inhibit_irq);
6132
                /* give a chance to handle pending irqs */
6133
                gen_jmp_im(s->pc - s->cs_base);
6134
                gen_eob(s);
6135
            } else {
6136
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6137
            }
6138
        } else {
6139
            if (s->iopl == 3) {
6140
                goto gen_sti;
6141
            } else {
6142
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6143
            }
6144
        }
6145
        break;
6146
    case 0x62: /* bound */
6147
        if (CODE64(s))
6148
            goto illegal_op;
6149
        ot = dflag ? OT_LONG : OT_WORD;
6150
        modrm = ldub_code(s->pc++);
6151
        reg = (modrm >> 3) & 7;
6152
        mod = (modrm >> 6) & 3;
6153
        if (mod == 3)
6154
            goto illegal_op;
6155
        gen_op_mov_TN_reg(ot, 0, reg);
6156
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6157
        gen_jmp_im(pc_start - s->cs_base);
6158
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6159
        if (ot == OT_WORD)
6160
            tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
6161
        else
6162
            tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
6163
        break;
6164
    case 0x1c8 ... 0x1cf: /* bswap reg */
6165
        reg = (b & 7) | REX_B(s);
6166
#ifdef TARGET_X86_64
6167
        if (dflag == 2) {
6168
            gen_op_mov_TN_reg(OT_QUAD, 0, reg);
6169
            tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
6170
            gen_op_mov_reg_T0(OT_QUAD, reg);
6171
        } else
6172
        {
6173
            TCGv tmp0;
6174
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
6175
            
6176
            tmp0 = tcg_temp_new(TCG_TYPE_I32);
6177
            tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
6178
            tcg_gen_bswap_i32(tmp0, tmp0);
6179
            tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
6180
            gen_op_mov_reg_T0(OT_LONG, reg);
6181
        }
6182
#else
6183
        {
6184
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
6185
            tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
6186
            gen_op_mov_reg_T0(OT_LONG, reg);
6187
        }
6188
#endif
6189
        break;
6190
    case 0xd6: /* salc */
6191
        if (CODE64(s))
6192
            goto illegal_op;
6193
        if (s->cc_op != CC_OP_DYNAMIC)
6194
            gen_op_set_cc_op(s->cc_op);
6195
        gen_compute_eflags_c(cpu_T[0]);
6196
        tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
6197
        gen_op_mov_reg_T0(OT_BYTE, R_EAX);
6198
        break;
6199
    case 0xe0: /* loopnz */
6200
    case 0xe1: /* loopz */
6201
    case 0xe2: /* loop */
6202
    case 0xe3: /* jecxz */
6203
        {
6204
            int l1, l2, l3;
6205

    
6206
            tval = (int8_t)insn_get(s, OT_BYTE);
6207
            next_eip = s->pc - s->cs_base;
6208
            tval += next_eip;
6209
            if (s->dflag == 0)
6210
                tval &= 0xffff;
6211

    
6212
            l1 = gen_new_label();
6213
            l2 = gen_new_label();
6214
            l3 = gen_new_label();
6215
            b &= 3;
6216
            switch(b) {
6217
            case 0: /* loopnz */
6218
            case 1: /* loopz */
6219
                if (s->cc_op != CC_OP_DYNAMIC)
6220
                    gen_op_set_cc_op(s->cc_op);
6221
                gen_op_add_reg_im(s->aflag, R_ECX, -1);
6222
                gen_op_jz_ecx(s->aflag, l3);
6223
                gen_compute_eflags(cpu_tmp0);
6224
                tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
6225
                if (b == 0) {
6226
                    tcg_gen_brcond_tl(TCG_COND_EQ, 
6227
                                      cpu_tmp0, tcg_const_tl(0), l1);
6228
                } else {
6229
                    tcg_gen_brcond_tl(TCG_COND_NE, 
6230
                                      cpu_tmp0, tcg_const_tl(0), l1);
6231
                }
6232
                break;
6233
            case 2: /* loop */
6234
                gen_op_add_reg_im(s->aflag, R_ECX, -1);
6235
                gen_op_jnz_ecx(s->aflag, l1);
6236
                break;
6237
            default:
6238
            case 3: /* jcxz */
6239
                gen_op_jz_ecx(s->aflag, l1);
6240
                break;
6241
            }
6242

    
6243
            gen_set_label(l3);
6244
            gen_jmp_im(next_eip);
6245
            tcg_gen_br(l2);
6246

    
6247
            gen_set_label(l1);
6248
            gen_jmp_im(tval);
6249
            gen_set_label(l2);
6250
            gen_eob(s);
6251
        }
6252
        break;
6253
    case 0x130: /* wrmsr */
6254
    case 0x132: /* rdmsr */
6255
        if (s->cpl != 0) {
6256
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6257
        } else {
6258
            int retval = 0;
6259
            if (b & 2) {
6260
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
6261
                tcg_gen_helper_0_0(helper_rdmsr);
6262
            } else {
6263
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
6264
                tcg_gen_helper_0_0(helper_wrmsr);
6265
            }
6266
            if(retval)
6267
                gen_eob(s);
6268
        }
6269
        break;
6270
    case 0x131: /* rdtsc */
6271
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
6272
            break;
6273
        gen_jmp_im(pc_start - s->cs_base);
6274
        tcg_gen_helper_0_0(helper_rdtsc);
6275
        break;
6276
    case 0x133: /* rdpmc */
6277
        gen_jmp_im(pc_start - s->cs_base);
6278
        tcg_gen_helper_0_0(helper_rdpmc);
6279
        break;
6280
    case 0x134: /* sysenter */
6281
        if (CODE64(s))
6282
            goto illegal_op;
6283
        if (!s->pe) {
6284
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6285
        } else {
6286
            if (s->cc_op != CC_OP_DYNAMIC) {
6287
                gen_op_set_cc_op(s->cc_op);
6288
                s->cc_op = CC_OP_DYNAMIC;
6289
            }
6290
            gen_jmp_im(pc_start - s->cs_base);
6291
            tcg_gen_helper_0_0(helper_sysenter);
6292
            gen_eob(s);
6293
        }
6294
        break;
6295
    case 0x135: /* sysexit */
6296
        if (CODE64(s))
6297
            goto illegal_op;
6298
        if (!s->pe) {
6299
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6300
        } else {
6301
            if (s->cc_op != CC_OP_DYNAMIC) {
6302
                gen_op_set_cc_op(s->cc_op);
6303
                s->cc_op = CC_OP_DYNAMIC;
6304
            }
6305
            gen_jmp_im(pc_start - s->cs_base);
6306
            tcg_gen_helper_0_0(helper_sysexit);
6307
            gen_eob(s);
6308
        }
6309
        break;
6310
#ifdef TARGET_X86_64
6311
    case 0x105: /* syscall */
6312
        /* XXX: is it usable in real mode ? */
6313
        if (s->cc_op != CC_OP_DYNAMIC) {
6314
            gen_op_set_cc_op(s->cc_op);
6315
            s->cc_op = CC_OP_DYNAMIC;
6316
        }
6317
        gen_jmp_im(pc_start - s->cs_base);
6318
        tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
6319
        gen_eob(s);
6320
        break;
6321
    case 0x107: /* sysret */
6322
        if (!s->pe) {
6323
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6324
        } else {
6325
            if (s->cc_op != CC_OP_DYNAMIC) {
6326
                gen_op_set_cc_op(s->cc_op);
6327
                s->cc_op = CC_OP_DYNAMIC;
6328
            }
6329
            gen_jmp_im(pc_start - s->cs_base);
6330
            tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
6331
            /* condition codes are modified only in long mode */
6332
            if (s->lma)
6333
                s->cc_op = CC_OP_EFLAGS;
6334
            gen_eob(s);
6335
        }
6336
        break;
6337
#endif
6338
    case 0x1a2: /* cpuid */
6339
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
6340
            break;
6341
        tcg_gen_helper_0_0(helper_cpuid);
6342
        break;
6343
    case 0xf4: /* hlt */
6344
        if (s->cpl != 0) {
6345
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6346
        } else {
6347
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
6348
                break;
6349
            if (s->cc_op != CC_OP_DYNAMIC)
6350
                gen_op_set_cc_op(s->cc_op);
6351
            gen_jmp_im(s->pc - s->cs_base);
6352
            tcg_gen_helper_0_0(helper_hlt);
6353
            s->is_jmp = 3;
6354
        }
6355
        break;
6356
    case 0x100:
6357
        modrm = ldub_code(s->pc++);
6358
        mod = (modrm >> 6) & 3;
6359
        op = (modrm >> 3) & 7;
6360
        switch(op) {
6361
        case 0: /* sldt */
6362
            if (!s->pe || s->vm86)
6363
                goto illegal_op;
6364
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
6365
                break;
6366
            tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
6367
            ot = OT_WORD;
6368
            if (mod == 3)
6369
                ot += s->dflag;
6370
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6371
            break;
6372
        case 2: /* lldt */
6373
            if (!s->pe || s->vm86)
6374
                goto illegal_op;
6375
            if (s->cpl != 0) {
6376
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6377
            } else {
6378
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
6379
                    break;
6380
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6381
                gen_jmp_im(pc_start - s->cs_base);
6382
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6383
                tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
6384
            }
6385
            break;
6386
        case 1: /* str */
6387
            if (!s->pe || s->vm86)
6388
                goto illegal_op;
6389
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
6390
                break;
6391
            tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
6392
            ot = OT_WORD;
6393
            if (mod == 3)
6394
                ot += s->dflag;
6395
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6396
            break;
6397
        case 3: /* ltr */
6398
            if (!s->pe || s->vm86)
6399
                goto illegal_op;
6400
            if (s->cpl != 0) {
6401
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6402
            } else {
6403
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
6404
                    break;
6405
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6406
                gen_jmp_im(pc_start - s->cs_base);
6407
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6408
                tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
6409
            }
6410
            break;
6411
        case 4: /* verr */
6412
        case 5: /* verw */
6413
            if (!s->pe || s->vm86)
6414
                goto illegal_op;
6415
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6416
            if (s->cc_op != CC_OP_DYNAMIC)
6417
                gen_op_set_cc_op(s->cc_op);
6418
            if (op == 4)
6419
                tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
6420
            else
6421
                tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
6422
            s->cc_op = CC_OP_EFLAGS;
6423
            break;
6424
        default:
6425
            goto illegal_op;
6426
        }
6427
        break;
6428
    case 0x101:
6429
        modrm = ldub_code(s->pc++);
6430
        mod = (modrm >> 6) & 3;
6431
        op = (modrm >> 3) & 7;
6432
        rm = modrm & 7;
6433
        switch(op) {
6434
        case 0: /* sgdt */
6435
            if (mod == 3)
6436
                goto illegal_op;
6437
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
6438
                break;
6439
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6440
            tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
6441
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
6442
            gen_add_A0_im(s, 2);
6443
            tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
6444
            if (!s->dflag)
6445
                gen_op_andl_T0_im(0xffffff);
6446
            gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6447
            break;
6448
        case 1:
6449
            if (mod == 3) {
6450
                switch (rm) {
6451
                case 0: /* monitor */
6452
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6453
                        s->cpl != 0)
6454
                        goto illegal_op;
6455
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
6456
                        break;
6457
                    gen_jmp_im(pc_start - s->cs_base);
6458
#ifdef TARGET_X86_64
6459
                    if (s->aflag == 2) {
6460
                        gen_op_movq_A0_reg(R_EAX);
6461
                    } else
6462
#endif
6463
                    {
6464
                        gen_op_movl_A0_reg(R_EAX);
6465
                        if (s->aflag == 0)
6466
                            gen_op_andl_A0_ffff();
6467
                    }
6468
                    gen_add_A0_ds_seg(s);
6469
                    tcg_gen_helper_0_1(helper_monitor, cpu_A0);
6470
                    break;
6471
                case 1: /* mwait */
6472
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6473
                        s->cpl != 0)
6474
                        goto illegal_op;
6475
                    if (s->cc_op != CC_OP_DYNAMIC) {
6476
                        gen_op_set_cc_op(s->cc_op);
6477
                        s->cc_op = CC_OP_DYNAMIC;
6478
                    }
6479
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
6480
                        break;
6481
                    gen_jmp_im(s->pc - s->cs_base);
6482
                    tcg_gen_helper_0_0(helper_mwait);
6483
                    gen_eob(s);
6484
                    break;
6485
                default:
6486
                    goto illegal_op;
6487
                }
6488
            } else { /* sidt */
6489
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
6490
                    break;
6491
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6492
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
6493
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
6494
                gen_add_A0_im(s, 2);
6495
                tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
6496
                if (!s->dflag)
6497
                    gen_op_andl_T0_im(0xffffff);
6498
                gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6499
            }
6500
            break;
6501
        case 2: /* lgdt */
6502
        case 3: /* lidt */
6503
            if (mod == 3) {
6504
                switch(rm) {
6505
                case 0: /* VMRUN */
6506
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
6507
                        break;
6508
                    if (s->cc_op != CC_OP_DYNAMIC)
6509
                        gen_op_set_cc_op(s->cc_op);
6510
                    gen_jmp_im(s->pc - s->cs_base);
6511
                    tcg_gen_helper_0_0(helper_vmrun);
6512
                    s->cc_op = CC_OP_EFLAGS;
6513
                    gen_eob(s);
6514
                    break;
6515
                case 1: /* VMMCALL */
6516
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
6517
                         break;
6518
                    /* FIXME: cause #UD if hflags & SVM */
6519
                    tcg_gen_helper_0_0(helper_vmmcall);
6520
                    break;
6521
                case 2: /* VMLOAD */
6522
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
6523
                         break;
6524
                    tcg_gen_helper_0_0(helper_vmload);
6525
                    break;
6526
                case 3: /* VMSAVE */
6527
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
6528
                         break;
6529
                    tcg_gen_helper_0_0(helper_vmsave);
6530
                    break;
6531
                case 4: /* STGI */
6532
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
6533
                         break;
6534
                    tcg_gen_helper_0_0(helper_stgi);
6535
                    break;
6536
                case 5: /* CLGI */
6537
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
6538
                         break;
6539
                    tcg_gen_helper_0_0(helper_clgi);
6540
                    break;
6541
                case 6: /* SKINIT */
6542
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
6543
                         break;
6544
                    tcg_gen_helper_0_0(helper_skinit);
6545
                    break;
6546
                case 7: /* INVLPGA */
6547
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
6548
                         break;
6549
                    tcg_gen_helper_0_0(helper_invlpga);
6550
                    break;
6551
                default:
6552
                    goto illegal_op;
6553
                }
6554
            } else if (s->cpl != 0) {
6555
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6556
            } else {
6557
                if (gen_svm_check_intercept(s, pc_start,
6558
                                            op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
6559
                    break;
6560
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6561
                gen_op_ld_T1_A0(OT_WORD + s->mem_index);
6562
                gen_add_A0_im(s, 2);
6563
                gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6564
                if (!s->dflag)
6565
                    gen_op_andl_T0_im(0xffffff);
6566
                if (op == 2) {
6567
                    tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
6568
                    tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
6569
                } else {
6570
                    tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
6571
                    tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
6572
                }
6573
            }
6574
            break;
6575
        case 4: /* smsw */
6576
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
6577
                break;
6578
            tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
6579
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6580
            break;
6581
        case 6: /* lmsw */
6582
            if (s->cpl != 0) {
6583
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6584
            } else {
6585
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
6586
                    break;
6587
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6588
                tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
6589
                gen_jmp_im(s->pc - s->cs_base);
6590
                gen_eob(s);
6591
            }
6592
            break;
6593
        case 7: /* invlpg */
6594
            if (s->cpl != 0) {
6595
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6596
            } else {
6597
                if (mod == 3) {
6598
#ifdef TARGET_X86_64
6599
                    if (CODE64(s) && rm == 0) {
6600
                        /* swapgs */
6601
                        tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
6602
                        tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
6603
                        tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
6604
                        tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
6605
                    } else
6606
#endif
6607
                    {
6608
                        goto illegal_op;
6609
                    }
6610
                } else {
6611
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
6612
                        break;
6613
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6614
                    tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
6615
                    gen_jmp_im(s->pc - s->cs_base);
6616
                    gen_eob(s);
6617
                }
6618
            }
6619
            break;
6620
        default:
6621
            goto illegal_op;
6622
        }
6623
        break;
6624
    case 0x108: /* invd */
6625
    case 0x109: /* wbinvd */
6626
        if (s->cpl != 0) {
6627
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6628
        } else {
6629
            if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6630
                break;
6631
            /* nothing to do */
6632
        }
6633
        break;
6634
    case 0x63: /* arpl or movslS (x86_64) */
6635
#ifdef TARGET_X86_64
6636
        if (CODE64(s)) {
6637
            int d_ot;
6638
            /* d_ot is the size of destination */
6639
            d_ot = dflag + OT_WORD;
6640

    
6641
            modrm = ldub_code(s->pc++);
6642
            reg = ((modrm >> 3) & 7) | rex_r;
6643
            mod = (modrm >> 6) & 3;
6644
            rm = (modrm & 7) | REX_B(s);
6645

    
6646
            if (mod == 3) {
6647
                gen_op_mov_TN_reg(OT_LONG, 0, rm);
6648
                /* sign extend */
6649
                if (d_ot == OT_QUAD)
6650
                    tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
6651
                gen_op_mov_reg_T0(d_ot, reg);
6652
            } else {
6653
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6654
                if (d_ot == OT_QUAD) {
6655
                    gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6656
                } else {
6657
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6658
                }
6659
                gen_op_mov_reg_T0(d_ot, reg);
6660
            }
6661
        } else
6662
#endif
6663
        {
6664
            int label1;
6665
            if (!s->pe || s->vm86)
6666
                goto illegal_op;
6667
            ot = OT_WORD;
6668
            modrm = ldub_code(s->pc++);
6669
            reg = (modrm >> 3) & 7;
6670
            mod = (modrm >> 6) & 3;
6671
            rm = modrm & 7;
6672
            if (mod != 3) {
6673
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6674
                gen_op_ld_T0_A0(ot + s->mem_index);
6675
            } else {
6676
                gen_op_mov_TN_reg(ot, 0, rm);
6677
            }
6678
            gen_op_mov_TN_reg(ot, 1, reg);
6679
            tcg_gen_andi_tl(cpu_tmp0, cpu_T[0], 3);
6680
            tcg_gen_andi_tl(cpu_T[1], cpu_T[1], 3);
6681
            tcg_gen_movi_tl(cpu_T3, 0);
6682
            label1 = gen_new_label();
6683
            tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, cpu_T[1], label1);
6684
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], ~3);
6685
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
6686
            tcg_gen_movi_tl(cpu_T3, CC_Z);
6687
            gen_set_label(label1);
6688
            if (mod != 3) {
6689
                gen_op_st_T0_A0(ot + s->mem_index);
6690
            } else {
6691
                gen_op_mov_reg_T0(ot, rm);
6692
            }
6693
            if (s->cc_op != CC_OP_DYNAMIC)
6694
                gen_op_set_cc_op(s->cc_op);
6695
            gen_compute_eflags(cpu_cc_src);
6696
            tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
6697
            tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T3);
6698
            s->cc_op = CC_OP_EFLAGS;
6699
        }
6700
        break;
6701
    case 0x102: /* lar */
6702
    case 0x103: /* lsl */
6703
        {
6704
            int label1;
6705
            if (!s->pe || s->vm86)
6706
                goto illegal_op;
6707
            ot = dflag ? OT_LONG : OT_WORD;
6708
            modrm = ldub_code(s->pc++);
6709
            reg = ((modrm >> 3) & 7) | rex_r;
6710
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6711
            if (s->cc_op != CC_OP_DYNAMIC)
6712
                gen_op_set_cc_op(s->cc_op);
6713
            if (b == 0x102)
6714
                tcg_gen_helper_1_1(helper_lar, cpu_T[0], cpu_T[0]);
6715
            else
6716
                tcg_gen_helper_1_1(helper_lsl, cpu_T[0], cpu_T[0]);
6717
            tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
6718
            label1 = gen_new_label();
6719
            tcg_gen_brcond_tl(TCG_COND_EQ, cpu_tmp0, tcg_const_tl(0), label1);
6720
            gen_op_mov_reg_T0(ot, reg);
6721
            gen_set_label(label1);
6722
            s->cc_op = CC_OP_EFLAGS;
6723
        }
6724
        break;
6725
    case 0x118:
6726
        modrm = ldub_code(s->pc++);
6727
        mod = (modrm >> 6) & 3;
6728
        op = (modrm >> 3) & 7;
6729
        switch(op) {
6730
        case 0: /* prefetchnta */
6731
        case 1: /* prefetchnt0 */
6732
        case 2: /* prefetchnt0 */
6733
        case 3: /* prefetchnt0 */
6734
            if (mod == 3)
6735
                goto illegal_op;
6736
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6737
            /* nothing more to do */
6738
            break;
6739
        default: /* nop (multi byte) */
6740
            gen_nop_modrm(s, modrm);
6741
            break;
6742
        }
6743
        break;
6744
    case 0x119 ... 0x11f: /* nop (multi byte) */
6745
        modrm = ldub_code(s->pc++);
6746
        gen_nop_modrm(s, modrm);
6747
        break;
6748
    case 0x120: /* mov reg, crN */
6749
    case 0x122: /* mov crN, reg */
6750
        if (s->cpl != 0) {
6751
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6752
        } else {
6753
            modrm = ldub_code(s->pc++);
6754
            if ((modrm & 0xc0) != 0xc0)
6755
                goto illegal_op;
6756
            rm = (modrm & 7) | REX_B(s);
6757
            reg = ((modrm >> 3) & 7) | rex_r;
6758
            if (CODE64(s))
6759
                ot = OT_QUAD;
6760
            else
6761
                ot = OT_LONG;
6762
            switch(reg) {
6763
            case 0:
6764
            case 2:
6765
            case 3:
6766
            case 4:
6767
            case 8:
6768
                if (b & 2) {
6769
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6770
                    gen_op_mov_TN_reg(ot, 0, rm);
6771
                    tcg_gen_helper_0_2(helper_movl_crN_T0, 
6772
                                       tcg_const_i32(reg), cpu_T[0]);
6773
                    gen_jmp_im(s->pc - s->cs_base);
6774
                    gen_eob(s);
6775
                } else {
6776
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6777
#if !defined(CONFIG_USER_ONLY)
6778
                    if (reg == 8)
6779
                        tcg_gen_helper_1_0(helper_movtl_T0_cr8, cpu_T[0]);
6780
                    else
6781
#endif
6782
                        tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[reg]));
6783
                    gen_op_mov_reg_T0(ot, rm);
6784
                }
6785
                break;
6786
            default:
6787
                goto illegal_op;
6788
            }
6789
        }
6790
        break;
6791
    case 0x121: /* mov reg, drN */
6792
    case 0x123: /* mov drN, reg */
6793
        if (s->cpl != 0) {
6794
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6795
        } else {
6796
            modrm = ldub_code(s->pc++);
6797
            if ((modrm & 0xc0) != 0xc0)
6798
                goto illegal_op;
6799
            rm = (modrm & 7) | REX_B(s);
6800
            reg = ((modrm >> 3) & 7) | rex_r;
6801
            if (CODE64(s))
6802
                ot = OT_QUAD;
6803
            else
6804
                ot = OT_LONG;
6805
            /* XXX: do it dynamically with CR4.DE bit */
6806
            if (reg == 4 || reg == 5 || reg >= 8)
6807
                goto illegal_op;
6808
            if (b & 2) {
6809
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6810
                gen_op_mov_TN_reg(ot, 0, rm);
6811
                tcg_gen_helper_0_2(helper_movl_drN_T0,
6812
                                   tcg_const_i32(reg), cpu_T[0]);
6813
                gen_jmp_im(s->pc - s->cs_base);
6814
                gen_eob(s);
6815
            } else {
6816
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6817
                tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
6818
                gen_op_mov_reg_T0(ot, rm);
6819
            }
6820
        }
6821
        break;
6822
    case 0x106: /* clts */
6823
        if (s->cpl != 0) {
6824
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6825
        } else {
6826
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6827
            tcg_gen_helper_0_0(helper_clts);
6828
            /* abort block because static cpu state changed */
6829
            gen_jmp_im(s->pc - s->cs_base);
6830
            gen_eob(s);
6831
        }
6832
        break;
6833
    /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6834
    case 0x1c3: /* MOVNTI reg, mem */
6835
        if (!(s->cpuid_features & CPUID_SSE2))
6836
            goto illegal_op;
6837
        ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6838
        modrm = ldub_code(s->pc++);
6839
        mod = (modrm >> 6) & 3;
6840
        if (mod == 3)
6841
            goto illegal_op;
6842
        reg = ((modrm >> 3) & 7) | rex_r;
6843
        /* generate a generic store */
6844
        gen_ldst_modrm(s, modrm, ot, reg, 1);
6845
        break;
6846
    case 0x1ae:
6847
        modrm = ldub_code(s->pc++);
6848
        mod = (modrm >> 6) & 3;
6849
        op = (modrm >> 3) & 7;
6850
        switch(op) {
6851
        case 0: /* fxsave */
6852
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6853
                (s->flags & HF_EM_MASK))
6854
                goto illegal_op;
6855
            if (s->flags & HF_TS_MASK) {
6856
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6857
                break;
6858
            }
6859
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6860
            if (s->cc_op != CC_OP_DYNAMIC)
6861
                gen_op_set_cc_op(s->cc_op);
6862
            gen_jmp_im(pc_start - s->cs_base);
6863
            tcg_gen_helper_0_2(helper_fxsave, 
6864
                               cpu_A0, tcg_const_i32((s->dflag == 2)));
6865
            break;
6866
        case 1: /* fxrstor */
6867
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6868
                (s->flags & HF_EM_MASK))
6869
                goto illegal_op;
6870
            if (s->flags & HF_TS_MASK) {
6871
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6872
                break;
6873
            }
6874
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6875
            if (s->cc_op != CC_OP_DYNAMIC)
6876
                gen_op_set_cc_op(s->cc_op);
6877
            gen_jmp_im(pc_start - s->cs_base);
6878
            tcg_gen_helper_0_2(helper_fxrstor,
6879
                               cpu_A0, tcg_const_i32((s->dflag == 2)));
6880
            break;
6881
        case 2: /* ldmxcsr */
6882
        case 3: /* stmxcsr */
6883
            if (s->flags & HF_TS_MASK) {
6884
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6885
                break;
6886
            }
6887
            if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6888
                mod == 3)
6889
                goto illegal_op;
6890
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6891
            if (op == 2) {
6892
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6893
                tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
6894
            } else {
6895
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
6896
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
6897
            }
6898
            break;
6899
        case 5: /* lfence */
6900
        case 6: /* mfence */
6901
            if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6902
                goto illegal_op;
6903
            break;
6904
        case 7: /* sfence / clflush */
6905
            if ((modrm & 0xc7) == 0xc0) {
6906
                /* sfence */
6907
                /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6908
                if (!(s->cpuid_features & CPUID_SSE))
6909
                    goto illegal_op;
6910
            } else {
6911
                /* clflush */
6912
                if (!(s->cpuid_features & CPUID_CLFLUSH))
6913
                    goto illegal_op;
6914
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6915
            }
6916
            break;
6917
        default:
6918
            goto illegal_op;
6919
        }
6920
        break;
6921
    case 0x10d: /* 3DNow! prefetch(w) */
6922
        modrm = ldub_code(s->pc++);
6923
        mod = (modrm >> 6) & 3;
6924
        if (mod == 3)
6925
            goto illegal_op;
6926
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6927
        /* ignore for now */
6928
        break;
6929
    case 0x1aa: /* rsm */
6930
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6931
            break;
6932
        if (!(s->flags & HF_SMM_MASK))
6933
            goto illegal_op;
6934
        if (s->cc_op != CC_OP_DYNAMIC) {
6935
            gen_op_set_cc_op(s->cc_op);
6936
            s->cc_op = CC_OP_DYNAMIC;
6937
        }
6938
        gen_jmp_im(s->pc - s->cs_base);
6939
        tcg_gen_helper_0_0(helper_rsm);
6940
        gen_eob(s);
6941
        break;
6942
    case 0x10e ... 0x10f:
6943
        /* 3DNow! instructions, ignore prefixes */
6944
        s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
6945
    case 0x110 ... 0x117:
6946
    case 0x128 ... 0x12f:
6947
    case 0x150 ... 0x177:
6948
    case 0x17c ... 0x17f:
6949
    case 0x1c2:
6950
    case 0x1c4 ... 0x1c6:
6951
    case 0x1d0 ... 0x1fe:
6952
        gen_sse(s, b, pc_start, rex_r);
6953
        break;
6954
    default:
6955
        goto illegal_op;
6956
    }
6957
    /* lock generation */
6958
    if (s->prefix & PREFIX_LOCK)
6959
        tcg_gen_helper_0_0(helper_unlock);
6960
    return s->pc;
6961
 illegal_op:
6962
    if (s->prefix & PREFIX_LOCK)
6963
        tcg_gen_helper_0_0(helper_unlock);
6964
    /* XXX: ensure that no lock was generated */
6965
    gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6966
    return s->pc;
6967
}
6968

    
6969
static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
6970
{
6971
    switch(macro_id) {
6972
#ifdef MACRO_TEST
6973
    case MACRO_TEST:
6974
        tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
6975
        break;
6976
#endif
6977
    }
6978
}
6979

    
6980
void optimize_flags_init(void)
6981
{
6982
#if TCG_TARGET_REG_BITS == 32
6983
    assert(sizeof(CCTable) == (1 << 3));
6984
#else
6985
    assert(sizeof(CCTable) == (1 << 4));
6986
#endif
6987
    tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
6988

    
6989
    cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
6990
#if TARGET_LONG_BITS > HOST_LONG_BITS
6991
    cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL, 
6992
                                  TCG_AREG0, offsetof(CPUState, t0), "T0");
6993
    cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
6994
                                  TCG_AREG0, offsetof(CPUState, t1), "T1");
6995
    cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
6996
                                TCG_AREG0, offsetof(CPUState, t2), "A0");
6997
#else
6998
    cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
6999
    cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
7000
    cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
7001
#endif
7002
    cpu_T3 = tcg_global_mem_new(TCG_TYPE_TL,
7003
                                TCG_AREG0, offsetof(CPUState, t3), "T3");
7004
#if defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS)
7005
    /* XXX: must be suppressed once there are less fixed registers */
7006
    cpu_tmp1_i64 = tcg_global_reg2_new_hack(TCG_TYPE_I64, TCG_AREG1, TCG_AREG2, "tmp1");
7007
#endif
7008
    cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
7009
                                   TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
7010
    cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
7011
                                    TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
7012
    cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
7013
                                    TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
7014
}
7015

    
7016
/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
7017
   basic block 'tb'. If search_pc is TRUE, also generate PC
7018
   information for each intermediate instruction. */
7019
static inline int gen_intermediate_code_internal(CPUState *env,
7020
                                                 TranslationBlock *tb,
7021
                                                 int search_pc)
7022
{
7023
    DisasContext dc1, *dc = &dc1;
7024
    target_ulong pc_ptr;
7025
    uint16_t *gen_opc_end;
7026
    int j, lj, cflags;
7027
    uint64_t flags;
7028
    target_ulong pc_start;
7029
    target_ulong cs_base;
7030

    
7031
    /* generate intermediate code */
7032
    pc_start = tb->pc;
7033
    cs_base = tb->cs_base;
7034
    flags = tb->flags;
7035
    cflags = tb->cflags;
7036

    
7037
    dc->pe = (flags >> HF_PE_SHIFT) & 1;
7038
    dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
7039
    dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
7040
    dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
7041
    dc->f_st = 0;
7042
    dc->vm86 = (flags >> VM_SHIFT) & 1;
7043
    dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
7044
    dc->iopl = (flags >> IOPL_SHIFT) & 3;
7045
    dc->tf = (flags >> TF_SHIFT) & 1;
7046
    dc->singlestep_enabled = env->singlestep_enabled;
7047
    dc->cc_op = CC_OP_DYNAMIC;
7048
    dc->cs_base = cs_base;
7049
    dc->tb = tb;
7050
    dc->popl_esp_hack = 0;
7051
    /* select memory access functions */
7052
    dc->mem_index = 0;
7053
    if (flags & HF_SOFTMMU_MASK) {
7054
        if (dc->cpl == 3)
7055
            dc->mem_index = 2 * 4;
7056
        else
7057
            dc->mem_index = 1 * 4;
7058
    }
7059
    dc->cpuid_features = env->cpuid_features;
7060
    dc->cpuid_ext_features = env->cpuid_ext_features;
7061
    dc->cpuid_ext2_features = env->cpuid_ext2_features;
7062
    dc->cpuid_ext3_features = env->cpuid_ext3_features;
7063
#ifdef TARGET_X86_64
7064
    dc->lma = (flags >> HF_LMA_SHIFT) & 1;
7065
    dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
7066
#endif
7067
    dc->flags = flags;
7068
    dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
7069
                    (flags & HF_INHIBIT_IRQ_MASK)
7070
#ifndef CONFIG_SOFTMMU
7071
                    || (flags & HF_SOFTMMU_MASK)
7072
#endif
7073
                    );
7074
#if 0
7075
    /* check addseg logic */
7076
    if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
7077
        printf("ERROR addseg\n");
7078
#endif
7079

    
7080
    cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
7081
#if !(defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS))
7082
    cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
7083
#endif
7084
    cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
7085
    cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
7086
    cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
7087
    cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
7088
    cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
7089
    cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
7090
    cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
7091

    
7092
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
7093

    
7094
    dc->is_jmp = DISAS_NEXT;
7095
    pc_ptr = pc_start;
7096
    lj = -1;
7097

    
7098
    for(;;) {
7099
        if (env->nb_breakpoints > 0) {
7100
            for(j = 0; j < env->nb_breakpoints; j++) {
7101
                if (env->breakpoints[j] == pc_ptr) {
7102
                    gen_debug(dc, pc_ptr - dc->cs_base);
7103
                    break;
7104
                }
7105
            }
7106
        }
7107
        if (search_pc) {
7108
            j = gen_opc_ptr - gen_opc_buf;
7109
            if (lj < j) {
7110
                lj++;
7111
                while (lj < j)
7112
                    gen_opc_instr_start[lj++] = 0;
7113
            }
7114
            gen_opc_pc[lj] = pc_ptr;
7115
            gen_opc_cc_op[lj] = dc->cc_op;
7116
            gen_opc_instr_start[lj] = 1;
7117
        }
7118
        pc_ptr = disas_insn(dc, pc_ptr);
7119
        /* stop translation if indicated */
7120
        if (dc->is_jmp)
7121
            break;
7122
        /* if single step mode, we generate only one instruction and
7123
           generate an exception */
7124
        /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
7125
           the flag and abort the translation to give the irqs a
7126
           change to be happen */
7127
        if (dc->tf || dc->singlestep_enabled ||
7128
            (flags & HF_INHIBIT_IRQ_MASK) ||
7129
            (cflags & CF_SINGLE_INSN)) {
7130
            gen_jmp_im(pc_ptr - dc->cs_base);
7131
            gen_eob(dc);
7132
            break;
7133
        }
7134
        /* if too long translation, stop generation too */
7135
        if (gen_opc_ptr >= gen_opc_end ||
7136
            (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
7137
            gen_jmp_im(pc_ptr - dc->cs_base);
7138
            gen_eob(dc);
7139
            break;
7140
        }
7141
    }
7142
    *gen_opc_ptr = INDEX_op_end;
7143
    /* we don't forget to fill the last values */
7144
    if (search_pc) {
7145
        j = gen_opc_ptr - gen_opc_buf;
7146
        lj++;
7147
        while (lj <= j)
7148
            gen_opc_instr_start[lj++] = 0;
7149
    }
7150

    
7151
#ifdef DEBUG_DISAS
7152
    if (loglevel & CPU_LOG_TB_CPU) {
7153
        cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
7154
    }
7155
    if (loglevel & CPU_LOG_TB_IN_ASM) {
7156
        int disas_flags;
7157
        fprintf(logfile, "----------------\n");
7158
        fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
7159
#ifdef TARGET_X86_64
7160
        if (dc->code64)
7161
            disas_flags = 2;
7162
        else
7163
#endif
7164
            disas_flags = !dc->code32;
7165
        target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
7166
        fprintf(logfile, "\n");
7167
        if (loglevel & CPU_LOG_TB_OP_OPT) {
7168
            fprintf(logfile, "OP before opt:\n");
7169
            tcg_dump_ops(&tcg_ctx, logfile);
7170
            fprintf(logfile, "\n");
7171
        }
7172
    }
7173
#endif
7174

    
7175
    if (!search_pc)
7176
        tb->size = pc_ptr - pc_start;
7177
    return 0;
7178
}
7179

    
7180
int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
7181
{
7182
    return gen_intermediate_code_internal(env, tb, 0);
7183
}
7184

    
7185
int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
7186
{
7187
    return gen_intermediate_code_internal(env, tb, 1);
7188
}
7189

    
7190
void gen_pc_load(CPUState *env, TranslationBlock *tb,
7191
                unsigned long searched_pc, int pc_pos, void *puc)
7192
{
7193
    int cc_op;
7194
#ifdef DEBUG_DISAS
7195
    if (loglevel & CPU_LOG_TB_OP) {
7196
        int i;
7197
        fprintf(logfile, "RESTORE:\n");
7198
        for(i = 0;i <= pc_pos; i++) {
7199
            if (gen_opc_instr_start[i]) {
7200
                fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7201
            }
7202
        }
7203
        fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7204
                searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7205
                (uint32_t)tb->cs_base);
7206
    }
7207
#endif
7208
    env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7209
    cc_op = gen_opc_cc_op[pc_pos];
7210
    if (cc_op != CC_OP_DYNAMIC)
7211
        env->cc_op = cc_op;
7212
}