Statistics
| Branch: | Revision:

root / target-i386 / translate.c @ 839bca84

History | View | Annotate | Download (227.7 kB)

1
/*
2
 *  i386 translation
3
 *
4
 *  Copyright (c) 2003 Fabrice Bellard
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20
#include <stdarg.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23
#include <string.h>
24
#include <inttypes.h>
25
#include <signal.h>
26
#include <assert.h>
27

    
28
#include "cpu.h"
29
#include "exec-all.h"
30
#include "disas.h"
31
#include "helper.h"
32
#include "tcg-op.h"
33

    
34
#define PREFIX_REPZ   0x01
35
#define PREFIX_REPNZ  0x02
36
#define PREFIX_LOCK   0x04
37
#define PREFIX_DATA   0x08
38
#define PREFIX_ADR    0x10
39

    
40
#ifdef TARGET_X86_64
41
#define X86_64_ONLY(x) x
42
#define X86_64_DEF(x...) x
43
#define CODE64(s) ((s)->code64)
44
#define REX_X(s) ((s)->rex_x)
45
#define REX_B(s) ((s)->rex_b)
46
/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
47
#if 1
48
#define BUGGY_64(x) NULL
49
#endif
50
#else
51
#define X86_64_ONLY(x) NULL
52
#define X86_64_DEF(x...)
53
#define CODE64(s) 0
54
#define REX_X(s) 0
55
#define REX_B(s) 0
56
#endif
57

    
58
//#define MACRO_TEST   1
59

    
60
/* global register indexes */
61
static TCGv cpu_env, cpu_T[2], cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst;
62
static TCGv cpu_T3;
63
/* local register indexes (only used inside old micro ops) */
64
static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
65
static TCGv cpu_tmp5, cpu_tmp6;
66

    
67
#ifdef TARGET_X86_64
68
static int x86_64_hregs;
69
#endif
70

    
71
typedef struct DisasContext {
72
    /* current insn context */
73
    int override; /* -1 if no override */
74
    int prefix;
75
    int aflag, dflag;
76
    target_ulong pc; /* pc = eip + cs_base */
77
    int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
78
                   static state change (stop translation) */
79
    /* current block context */
80
    target_ulong cs_base; /* base of CS segment */
81
    int pe;     /* protected mode */
82
    int code32; /* 32 bit code segment */
83
#ifdef TARGET_X86_64
84
    int lma;    /* long mode active */
85
    int code64; /* 64 bit code segment */
86
    int rex_x, rex_b;
87
#endif
88
    int ss32;   /* 32 bit stack segment */
89
    int cc_op;  /* current CC operation */
90
    int addseg; /* non zero if either DS/ES/SS have a non zero base */
91
    int f_st;   /* currently unused */
92
    int vm86;   /* vm86 mode */
93
    int cpl;
94
    int iopl;
95
    int tf;     /* TF cpu flag */
96
    int singlestep_enabled; /* "hardware" single step enabled */
97
    int jmp_opt; /* use direct block chaining for direct jumps */
98
    int mem_index; /* select memory access functions */
99
    uint64_t flags; /* all execution flags */
100
    struct TranslationBlock *tb;
101
    int popl_esp_hack; /* for correct popl with esp base handling */
102
    int rip_offset; /* only used in x86_64, but left for simplicity */
103
    int cpuid_features;
104
    int cpuid_ext_features;
105
    int cpuid_ext2_features;
106
} DisasContext;
107

    
108
static void gen_eob(DisasContext *s);
109
static void gen_jmp(DisasContext *s, target_ulong eip);
110
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
111

    
112
/* i386 arith/logic operations */
113
enum {
114
    OP_ADDL,
115
    OP_ORL,
116
    OP_ADCL,
117
    OP_SBBL,
118
    OP_ANDL,
119
    OP_SUBL,
120
    OP_XORL,
121
    OP_CMPL,
122
};
123

    
124
/* i386 shift ops */
125
enum {
126
    OP_ROL,
127
    OP_ROR,
128
    OP_RCL,
129
    OP_RCR,
130
    OP_SHL,
131
    OP_SHR,
132
    OP_SHL1, /* undocumented */
133
    OP_SAR = 7,
134
};
135

    
136
/* operand size */
137
enum {
138
    OT_BYTE = 0,
139
    OT_WORD,
140
    OT_LONG,
141
    OT_QUAD,
142
};
143

    
144
enum {
145
    /* I386 int registers */
146
    OR_EAX,   /* MUST be even numbered */
147
    OR_ECX,
148
    OR_EDX,
149
    OR_EBX,
150
    OR_ESP,
151
    OR_EBP,
152
    OR_ESI,
153
    OR_EDI,
154

    
155
    OR_TMP0 = 16,    /* temporary operand register */
156
    OR_TMP1,
157
    OR_A0, /* temporary register used when doing address evaluation */
158
};
159

    
160
static inline void gen_op_movl_T0_0(void)
161
{
162
    tcg_gen_movi_tl(cpu_T[0], 0);
163
}
164

    
165
static inline void gen_op_movl_T0_im(int32_t val)
166
{
167
    tcg_gen_movi_tl(cpu_T[0], val);
168
}
169

    
170
static inline void gen_op_movl_T0_imu(uint32_t val)
171
{
172
    tcg_gen_movi_tl(cpu_T[0], val);
173
}
174

    
175
static inline void gen_op_movl_T1_im(int32_t val)
176
{
177
    tcg_gen_movi_tl(cpu_T[1], val);
178
}
179

    
180
static inline void gen_op_movl_T1_imu(uint32_t val)
181
{
182
    tcg_gen_movi_tl(cpu_T[1], val);
183
}
184

    
185
static inline void gen_op_movl_A0_im(uint32_t val)
186
{
187
    tcg_gen_movi_tl(cpu_A0, val);
188
}
189

    
190
#ifdef TARGET_X86_64
191
static inline void gen_op_movq_A0_im(int64_t val)
192
{
193
    tcg_gen_movi_tl(cpu_A0, val);
194
}
195
#endif
196

    
197
static inline void gen_movtl_T0_im(target_ulong val)
198
{
199
    tcg_gen_movi_tl(cpu_T[0], val);
200
}
201

    
202
static inline void gen_movtl_T1_im(target_ulong val)
203
{
204
    tcg_gen_movi_tl(cpu_T[1], val);
205
}
206

    
207
static inline void gen_op_andl_T0_ffff(void)
208
{
209
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
210
}
211

    
212
static inline void gen_op_andl_T0_im(uint32_t val)
213
{
214
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
215
}
216

    
217
static inline void gen_op_movl_T0_T1(void)
218
{
219
    tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
220
}
221

    
222
static inline void gen_op_andl_A0_ffff(void)
223
{
224
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
225
}
226

    
227
#ifdef TARGET_X86_64
228

    
229
#define NB_OP_SIZES 4
230

    
231
#define DEF_REGS(prefix, suffix) \
232
  prefix ## EAX ## suffix,\
233
  prefix ## ECX ## suffix,\
234
  prefix ## EDX ## suffix,\
235
  prefix ## EBX ## suffix,\
236
  prefix ## ESP ## suffix,\
237
  prefix ## EBP ## suffix,\
238
  prefix ## ESI ## suffix,\
239
  prefix ## EDI ## suffix,\
240
  prefix ## R8 ## suffix,\
241
  prefix ## R9 ## suffix,\
242
  prefix ## R10 ## suffix,\
243
  prefix ## R11 ## suffix,\
244
  prefix ## R12 ## suffix,\
245
  prefix ## R13 ## suffix,\
246
  prefix ## R14 ## suffix,\
247
  prefix ## R15 ## suffix,
248

    
249
#else /* !TARGET_X86_64 */
250

    
251
#define NB_OP_SIZES 3
252

    
253
#define DEF_REGS(prefix, suffix) \
254
  prefix ## EAX ## suffix,\
255
  prefix ## ECX ## suffix,\
256
  prefix ## EDX ## suffix,\
257
  prefix ## EBX ## suffix,\
258
  prefix ## ESP ## suffix,\
259
  prefix ## EBP ## suffix,\
260
  prefix ## ESI ## suffix,\
261
  prefix ## EDI ## suffix,
262

    
263
#endif /* !TARGET_X86_64 */
264

    
265
#if defined(WORDS_BIGENDIAN)
266
#define REG_B_OFFSET (sizeof(target_ulong) - 1)
267
#define REG_H_OFFSET (sizeof(target_ulong) - 2)
268
#define REG_W_OFFSET (sizeof(target_ulong) - 2)
269
#define REG_L_OFFSET (sizeof(target_ulong) - 4)
270
#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
271
#else
272
#define REG_B_OFFSET 0
273
#define REG_H_OFFSET 1
274
#define REG_W_OFFSET 0
275
#define REG_L_OFFSET 0
276
#define REG_LH_OFFSET 4
277
#endif
278

    
279
static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
280
{
281
    switch(ot) {
282
    case OT_BYTE:
283
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
284
            tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
285
        } else {
286
            tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
287
        }
288
        break;
289
    case OT_WORD:
290
        tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
291
        break;
292
#ifdef TARGET_X86_64
293
    case OT_LONG:
294
        tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
295
        /* high part of register set to zero */
296
        tcg_gen_movi_tl(cpu_tmp0, 0);
297
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
298
        break;
299
    default:
300
    case OT_QUAD:
301
        tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
302
        break;
303
#else
304
    default:
305
    case OT_LONG:
306
        tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
307
        break;
308
#endif
309
    }
310
}
311

    
312
static inline void gen_op_mov_reg_T0(int ot, int reg)
313
{
314
    gen_op_mov_reg_TN(ot, 0, reg);
315
}
316

    
317
static inline void gen_op_mov_reg_T1(int ot, int reg)
318
{
319
    gen_op_mov_reg_TN(ot, 1, reg);
320
}
321

    
322
static inline void gen_op_mov_reg_A0(int size, int reg)
323
{
324
    switch(size) {
325
    case 0:
326
        tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
327
        break;
328
#ifdef TARGET_X86_64
329
    case 1:
330
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
331
        /* high part of register set to zero */
332
        tcg_gen_movi_tl(cpu_tmp0, 0);
333
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
334
        break;
335
    default:
336
    case 2:
337
        tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
338
        break;
339
#else
340
    default:
341
    case 1:
342
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
343
        break;
344
#endif
345
    }
346
}
347

    
348
static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
349
{
350
    switch(ot) {
351
    case OT_BYTE:
352
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
353
            goto std_case;
354
        } else {
355
            tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
356
        }
357
        break;
358
    default:
359
    std_case:
360
        tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
361
        break;
362
    }
363
}
364

    
365
static inline void gen_op_movl_A0_reg(int reg)
366
{
367
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
368
}
369

    
370
static inline void gen_op_addl_A0_im(int32_t val)
371
{
372
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
373
#ifdef TARGET_X86_64
374
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
375
#endif
376
}
377

    
378
#ifdef TARGET_X86_64
379
static inline void gen_op_addq_A0_im(int64_t val)
380
{
381
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
382
}
383
#endif
384
    
385
static void gen_add_A0_im(DisasContext *s, int val)
386
{
387
#ifdef TARGET_X86_64
388
    if (CODE64(s))
389
        gen_op_addq_A0_im(val);
390
    else
391
#endif
392
        gen_op_addl_A0_im(val);
393
}
394

    
395
static inline void gen_op_addl_T0_T1(void)
396
{
397
    tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
398
}
399

    
400
static inline void gen_op_jmp_T0(void)
401
{
402
    tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
403
}
404

    
405
static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
406
{
407
    switch(size) {
408
    case 0:
409
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
410
        tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
411
        tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
412
        break;
413
    case 1:
414
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
415
        tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
416
#ifdef TARGET_X86_64
417
        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
418
#endif
419
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
420
        break;
421
#ifdef TARGET_X86_64
422
    case 2:
423
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
424
        tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
425
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
426
        break;
427
#endif
428
    }
429
}
430

    
431
static inline void gen_op_add_reg_T0(int size, int reg)
432
{
433
    switch(size) {
434
    case 0:
435
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
436
        tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
437
        tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
438
        break;
439
    case 1:
440
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
441
        tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
442
#ifdef TARGET_X86_64
443
        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
444
#endif
445
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
446
        break;
447
#ifdef TARGET_X86_64
448
    case 2:
449
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
450
        tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
451
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
452
        break;
453
#endif
454
    }
455
}
456

    
457
static inline void gen_op_set_cc_op(int32_t val)
458
{
459
    tcg_gen_movi_i32(cpu_cc_op, val);
460
}
461

    
462
static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
463
{
464
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
465
    if (shift != 0) 
466
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
467
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
468
#ifdef TARGET_X86_64
469
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
470
#endif
471
}
472

    
473
static inline void gen_op_movl_A0_seg(int reg)
474
{
475
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
476
}
477

    
478
static inline void gen_op_addl_A0_seg(int reg)
479
{
480
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
481
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
482
#ifdef TARGET_X86_64
483
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
484
#endif
485
}
486

    
487
#ifdef TARGET_X86_64
488
static inline void gen_op_movq_A0_seg(int reg)
489
{
490
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
491
}
492

    
493
static inline void gen_op_addq_A0_seg(int reg)
494
{
495
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
496
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
497
}
498

    
499
static inline void gen_op_movq_A0_reg(int reg)
500
{
501
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
502
}
503

    
504
static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
505
{
506
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
507
    if (shift != 0) 
508
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
509
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
510
}
511
#endif
512

    
513
static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
514
    [0] = {
515
        DEF_REGS(gen_op_cmovw_, _T1_T0)
516
    },
517
    [1] = {
518
        DEF_REGS(gen_op_cmovl_, _T1_T0)
519
    },
520
#ifdef TARGET_X86_64
521
    [2] = {
522
        DEF_REGS(gen_op_cmovq_, _T1_T0)
523
    },
524
#endif
525
};
526

    
527
static inline void gen_op_lds_T0_A0(int idx)
528
{
529
    int mem_index = (idx >> 2) - 1;
530
    switch(idx & 3) {
531
    case 0:
532
        tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
533
        break;
534
    case 1:
535
        tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
536
        break;
537
    default:
538
    case 2:
539
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
540
        break;
541
    }
542
}
543

    
544
/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
545
static inline void gen_op_ld_T0_A0(int idx)
546
{
547
    int mem_index = (idx >> 2) - 1;
548
    switch(idx & 3) {
549
    case 0:
550
        tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
551
        break;
552
    case 1:
553
        tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
554
        break;
555
    case 2:
556
        tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
557
        break;
558
    default:
559
    case 3:
560
        tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
561
        break;
562
    }
563
}
564

    
565
static inline void gen_op_ldu_T0_A0(int idx)
566
{
567
    gen_op_ld_T0_A0(idx);
568
}
569

    
570
static inline void gen_op_ld_T1_A0(int idx)
571
{
572
    int mem_index = (idx >> 2) - 1;
573
    switch(idx & 3) {
574
    case 0:
575
        tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
576
        break;
577
    case 1:
578
        tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
579
        break;
580
    case 2:
581
        tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
582
        break;
583
    default:
584
    case 3:
585
        tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
586
        break;
587
    }
588
}
589

    
590
static inline void gen_op_st_T0_A0(int idx)
591
{
592
    int mem_index = (idx >> 2) - 1;
593
    switch(idx & 3) {
594
    case 0:
595
        tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
596
        break;
597
    case 1:
598
        tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
599
        break;
600
    case 2:
601
        tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
602
        break;
603
    default:
604
    case 3:
605
        tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
606
        break;
607
    }
608
}
609

    
610
static inline void gen_op_st_T1_A0(int idx)
611
{
612
    int mem_index = (idx >> 2) - 1;
613
    switch(idx & 3) {
614
    case 0:
615
        tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
616
        break;
617
    case 1:
618
        tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
619
        break;
620
    case 2:
621
        tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
622
        break;
623
    default:
624
    case 3:
625
        tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
626
        break;
627
    }
628
}
629

    
630
static inline void gen_jmp_im(target_ulong pc)
631
{
632
    tcg_gen_movi_tl(cpu_tmp0, pc);
633
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
634
}
635

    
636
static inline void gen_string_movl_A0_ESI(DisasContext *s)
637
{
638
    int override;
639

    
640
    override = s->override;
641
#ifdef TARGET_X86_64
642
    if (s->aflag == 2) {
643
        if (override >= 0) {
644
            gen_op_movq_A0_seg(override);
645
            gen_op_addq_A0_reg_sN(0, R_ESI);
646
        } else {
647
            gen_op_movq_A0_reg(R_ESI);
648
        }
649
    } else
650
#endif
651
    if (s->aflag) {
652
        /* 32 bit address */
653
        if (s->addseg && override < 0)
654
            override = R_DS;
655
        if (override >= 0) {
656
            gen_op_movl_A0_seg(override);
657
            gen_op_addl_A0_reg_sN(0, R_ESI);
658
        } else {
659
            gen_op_movl_A0_reg(R_ESI);
660
        }
661
    } else {
662
        /* 16 address, always override */
663
        if (override < 0)
664
            override = R_DS;
665
        gen_op_movl_A0_reg(R_ESI);
666
        gen_op_andl_A0_ffff();
667
        gen_op_addl_A0_seg(override);
668
    }
669
}
670

    
671
static inline void gen_string_movl_A0_EDI(DisasContext *s)
672
{
673
#ifdef TARGET_X86_64
674
    if (s->aflag == 2) {
675
        gen_op_movq_A0_reg(R_EDI);
676
    } else
677
#endif
678
    if (s->aflag) {
679
        if (s->addseg) {
680
            gen_op_movl_A0_seg(R_ES);
681
            gen_op_addl_A0_reg_sN(0, R_EDI);
682
        } else {
683
            gen_op_movl_A0_reg(R_EDI);
684
        }
685
    } else {
686
        gen_op_movl_A0_reg(R_EDI);
687
        gen_op_andl_A0_ffff();
688
        gen_op_addl_A0_seg(R_ES);
689
    }
690
}
691

    
692
static inline void gen_op_movl_T0_Dshift(int ot) 
693
{
694
    tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
695
    tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
696
};
697

    
698
static void gen_extu(int ot, TCGv reg)
699
{
700
    switch(ot) {
701
    case OT_BYTE:
702
        tcg_gen_ext8u_tl(reg, reg);
703
        break;
704
    case OT_WORD:
705
        tcg_gen_ext16u_tl(reg, reg);
706
        break;
707
    case OT_LONG:
708
        tcg_gen_ext32u_tl(reg, reg);
709
        break;
710
    default:
711
        break;
712
    }
713
}
714

    
715
static void gen_exts(int ot, TCGv reg)
716
{
717
    switch(ot) {
718
    case OT_BYTE:
719
        tcg_gen_ext8s_tl(reg, reg);
720
        break;
721
    case OT_WORD:
722
        tcg_gen_ext16s_tl(reg, reg);
723
        break;
724
    case OT_LONG:
725
        tcg_gen_ext32s_tl(reg, reg);
726
        break;
727
    default:
728
        break;
729
    }
730
}
731

    
732
static inline void gen_op_jnz_ecx(int size, int label1)
733
{
734
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
735
    gen_extu(size + 1, cpu_tmp0);
736
    tcg_gen_brcond_tl(TCG_COND_NE, cpu_tmp0, tcg_const_tl(0), label1);
737
}
738

    
739
static inline void gen_op_jz_ecx(int size, int label1)
740
{
741
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
742
    gen_extu(size + 1, cpu_tmp0);
743
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_tmp0, tcg_const_tl(0), label1);
744
}
745

    
746
static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
747
    {
748
        gen_op_jnz_subb,
749
        gen_op_jnz_subw,
750
        gen_op_jnz_subl,
751
        X86_64_ONLY(gen_op_jnz_subq),
752
    },
753
    {
754
        gen_op_jz_subb,
755
        gen_op_jz_subw,
756
        gen_op_jz_subl,
757
        X86_64_ONLY(gen_op_jz_subq),
758
    },
759
};
760

    
761
static void *helper_in_func[3] = {
762
    helper_inb,
763
    helper_inw,
764
    helper_inl,
765
};
766

    
767
static void *helper_out_func[3] = {
768
    helper_outb,
769
    helper_outw,
770
    helper_outl,
771
};
772

    
773
static void *gen_check_io_func[3] = {
774
    helper_check_iob,
775
    helper_check_iow,
776
    helper_check_iol,
777
};
778

    
779
static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
780
                         uint32_t svm_flags)
781
{
782
    int state_saved;
783
    target_ulong next_eip;
784

    
785
    state_saved = 0;
786
    if (s->pe && (s->cpl > s->iopl || s->vm86)) {
787
        if (s->cc_op != CC_OP_DYNAMIC)
788
            gen_op_set_cc_op(s->cc_op);
789
        gen_jmp_im(cur_eip);
790
        state_saved = 1;
791
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
792
        tcg_gen_helper_0_1(gen_check_io_func[ot],
793
                           cpu_tmp2_i32);
794
    }
795
    if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
796
        if (!state_saved) {
797
            if (s->cc_op != CC_OP_DYNAMIC)
798
                gen_op_set_cc_op(s->cc_op);
799
            gen_jmp_im(cur_eip);
800
            state_saved = 1;
801
        }
802
        svm_flags |= (1 << (4 + ot));
803
        next_eip = s->pc - s->cs_base;
804
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
805
        tcg_gen_helper_0_3(helper_svm_check_io,
806
                           cpu_tmp2_i32,
807
                           tcg_const_i32(svm_flags),
808
                           tcg_const_i32(next_eip - cur_eip));
809
    }
810
}
811

    
812
static inline void gen_movs(DisasContext *s, int ot)
813
{
814
    gen_string_movl_A0_ESI(s);
815
    gen_op_ld_T0_A0(ot + s->mem_index);
816
    gen_string_movl_A0_EDI(s);
817
    gen_op_st_T0_A0(ot + s->mem_index);
818
    gen_op_movl_T0_Dshift(ot);
819
    gen_op_add_reg_T0(s->aflag, R_ESI);
820
    gen_op_add_reg_T0(s->aflag, R_EDI);
821
}
822

    
823
static inline void gen_update_cc_op(DisasContext *s)
824
{
825
    if (s->cc_op != CC_OP_DYNAMIC) {
826
        gen_op_set_cc_op(s->cc_op);
827
        s->cc_op = CC_OP_DYNAMIC;
828
    }
829
}
830

    
831
static void gen_op_update1_cc(void)
832
{
833
    tcg_gen_discard_tl(cpu_cc_src);
834
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
835
}
836

    
837
static void gen_op_update2_cc(void)
838
{
839
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
840
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
841
}
842

    
843
static inline void gen_op_cmpl_T0_T1_cc(void)
844
{
845
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
846
    tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
847
}
848

    
849
static inline void gen_op_testl_T0_T1_cc(void)
850
{
851
    tcg_gen_discard_tl(cpu_cc_src);
852
    tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
853
}
854

    
855
static void gen_op_update_neg_cc(void)
856
{
857
    tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
858
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
859
}
860

    
861
/* XXX: does not work with gdbstub "ice" single step - not a
862
   serious problem */
863
static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
864
{
865
    int l1, l2;
866

    
867
    l1 = gen_new_label();
868
    l2 = gen_new_label();
869
    gen_op_jnz_ecx(s->aflag, l1);
870
    gen_set_label(l2);
871
    gen_jmp_tb(s, next_eip, 1);
872
    gen_set_label(l1);
873
    return l2;
874
}
875

    
876
static inline void gen_stos(DisasContext *s, int ot)
877
{
878
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
879
    gen_string_movl_A0_EDI(s);
880
    gen_op_st_T0_A0(ot + s->mem_index);
881
    gen_op_movl_T0_Dshift(ot);
882
    gen_op_add_reg_T0(s->aflag, R_EDI);
883
}
884

    
885
static inline void gen_lods(DisasContext *s, int ot)
886
{
887
    gen_string_movl_A0_ESI(s);
888
    gen_op_ld_T0_A0(ot + s->mem_index);
889
    gen_op_mov_reg_T0(ot, R_EAX);
890
    gen_op_movl_T0_Dshift(ot);
891
    gen_op_add_reg_T0(s->aflag, R_ESI);
892
}
893

    
894
static inline void gen_scas(DisasContext *s, int ot)
895
{
896
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
897
    gen_string_movl_A0_EDI(s);
898
    gen_op_ld_T1_A0(ot + s->mem_index);
899
    gen_op_cmpl_T0_T1_cc();
900
    gen_op_movl_T0_Dshift(ot);
901
    gen_op_add_reg_T0(s->aflag, R_EDI);
902
}
903

    
904
static inline void gen_cmps(DisasContext *s, int ot)
905
{
906
    gen_string_movl_A0_ESI(s);
907
    gen_op_ld_T0_A0(ot + s->mem_index);
908
    gen_string_movl_A0_EDI(s);
909
    gen_op_ld_T1_A0(ot + s->mem_index);
910
    gen_op_cmpl_T0_T1_cc();
911
    gen_op_movl_T0_Dshift(ot);
912
    gen_op_add_reg_T0(s->aflag, R_ESI);
913
    gen_op_add_reg_T0(s->aflag, R_EDI);
914
}
915

    
916
static inline void gen_ins(DisasContext *s, int ot)
917
{
918
    gen_string_movl_A0_EDI(s);
919
    /* Note: we must do this dummy write first to be restartable in
920
       case of page fault. */
921
    gen_op_movl_T0_0();
922
    gen_op_st_T0_A0(ot + s->mem_index);
923
    gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
924
    tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
925
    tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
926
    tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
927
    gen_op_st_T0_A0(ot + s->mem_index);
928
    gen_op_movl_T0_Dshift(ot);
929
    gen_op_add_reg_T0(s->aflag, R_EDI);
930
}
931

    
932
static inline void gen_outs(DisasContext *s, int ot)
933
{
934
    gen_string_movl_A0_ESI(s);
935
    gen_op_ld_T0_A0(ot + s->mem_index);
936

    
937
    gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
938
    tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
939
    tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
940
    tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
941
    tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
942

    
943
    gen_op_movl_T0_Dshift(ot);
944
    gen_op_add_reg_T0(s->aflag, R_ESI);
945
}
946

    
947
/* same method as Valgrind : we generate jumps to current or next
948
   instruction */
949
#define GEN_REPZ(op)                                                          \
950
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
951
                                 target_ulong cur_eip, target_ulong next_eip) \
952
{                                                                             \
953
    int l2;\
954
    gen_update_cc_op(s);                                                      \
955
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
956
    gen_ ## op(s, ot);                                                        \
957
    gen_op_add_reg_im(s->aflag, R_ECX, -1);                                   \
958
    /* a loop would cause two single step exceptions if ECX = 1               \
959
       before rep string_insn */                                              \
960
    if (!s->jmp_opt)                                                          \
961
        gen_op_jz_ecx(s->aflag, l2);                                          \
962
    gen_jmp(s, cur_eip);                                                      \
963
}
964

    
965
#define GEN_REPZ2(op)                                                         \
966
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
967
                                   target_ulong cur_eip,                      \
968
                                   target_ulong next_eip,                     \
969
                                   int nz)                                    \
970
{                                                                             \
971
    int l2;\
972
    gen_update_cc_op(s);                                                      \
973
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
974
    gen_ ## op(s, ot);                                                        \
975
    gen_op_add_reg_im(s->aflag, R_ECX, -1);                                   \
976
    gen_op_set_cc_op(CC_OP_SUBB + ot);                                        \
977
    gen_op_string_jnz_sub[nz][ot](l2);\
978
    if (!s->jmp_opt)                                                          \
979
        gen_op_jz_ecx(s->aflag, l2);                                          \
980
    gen_jmp(s, cur_eip);                                                      \
981
}
982

    
983
GEN_REPZ(movs)
984
GEN_REPZ(stos)
985
GEN_REPZ(lods)
986
GEN_REPZ(ins)
987
GEN_REPZ(outs)
988
GEN_REPZ2(scas)
989
GEN_REPZ2(cmps)
990

    
991
enum {
992
    JCC_O,
993
    JCC_B,
994
    JCC_Z,
995
    JCC_BE,
996
    JCC_S,
997
    JCC_P,
998
    JCC_L,
999
    JCC_LE,
1000
};
1001

    
1002
static GenOpFunc1 *gen_jcc_sub[4][8] = {
1003
    [OT_BYTE] = {
1004
        NULL,
1005
        gen_op_jb_subb,
1006
        gen_op_jz_subb,
1007
        gen_op_jbe_subb,
1008
        gen_op_js_subb,
1009
        NULL,
1010
        gen_op_jl_subb,
1011
        gen_op_jle_subb,
1012
    },
1013
    [OT_WORD] = {
1014
        NULL,
1015
        gen_op_jb_subw,
1016
        gen_op_jz_subw,
1017
        gen_op_jbe_subw,
1018
        gen_op_js_subw,
1019
        NULL,
1020
        gen_op_jl_subw,
1021
        gen_op_jle_subw,
1022
    },
1023
    [OT_LONG] = {
1024
        NULL,
1025
        gen_op_jb_subl,
1026
        gen_op_jz_subl,
1027
        gen_op_jbe_subl,
1028
        gen_op_js_subl,
1029
        NULL,
1030
        gen_op_jl_subl,
1031
        gen_op_jle_subl,
1032
    },
1033
#ifdef TARGET_X86_64
1034
    [OT_QUAD] = {
1035
        NULL,
1036
        BUGGY_64(gen_op_jb_subq),
1037
        gen_op_jz_subq,
1038
        BUGGY_64(gen_op_jbe_subq),
1039
        gen_op_js_subq,
1040
        NULL,
1041
        BUGGY_64(gen_op_jl_subq),
1042
        BUGGY_64(gen_op_jle_subq),
1043
    },
1044
#endif
1045
};
1046

    
1047
static GenOpFunc *gen_setcc_slow[8] = {
1048
    gen_op_seto_T0_cc,
1049
    gen_op_setb_T0_cc,
1050
    gen_op_setz_T0_cc,
1051
    gen_op_setbe_T0_cc,
1052
    gen_op_sets_T0_cc,
1053
    gen_op_setp_T0_cc,
1054
    gen_op_setl_T0_cc,
1055
    gen_op_setle_T0_cc,
1056
};
1057

    
1058
static GenOpFunc *gen_setcc_sub[4][8] = {
1059
    [OT_BYTE] = {
1060
        NULL,
1061
        gen_op_setb_T0_subb,
1062
        gen_op_setz_T0_subb,
1063
        gen_op_setbe_T0_subb,
1064
        gen_op_sets_T0_subb,
1065
        NULL,
1066
        gen_op_setl_T0_subb,
1067
        gen_op_setle_T0_subb,
1068
    },
1069
    [OT_WORD] = {
1070
        NULL,
1071
        gen_op_setb_T0_subw,
1072
        gen_op_setz_T0_subw,
1073
        gen_op_setbe_T0_subw,
1074
        gen_op_sets_T0_subw,
1075
        NULL,
1076
        gen_op_setl_T0_subw,
1077
        gen_op_setle_T0_subw,
1078
    },
1079
    [OT_LONG] = {
1080
        NULL,
1081
        gen_op_setb_T0_subl,
1082
        gen_op_setz_T0_subl,
1083
        gen_op_setbe_T0_subl,
1084
        gen_op_sets_T0_subl,
1085
        NULL,
1086
        gen_op_setl_T0_subl,
1087
        gen_op_setle_T0_subl,
1088
    },
1089
#ifdef TARGET_X86_64
1090
    [OT_QUAD] = {
1091
        NULL,
1092
        gen_op_setb_T0_subq,
1093
        gen_op_setz_T0_subq,
1094
        gen_op_setbe_T0_subq,
1095
        gen_op_sets_T0_subq,
1096
        NULL,
1097
        gen_op_setl_T0_subq,
1098
        gen_op_setle_T0_subq,
1099
    },
1100
#endif
1101
};
1102

    
1103
static void *helper_fp_arith_ST0_FT0[8] = {
1104
    helper_fadd_ST0_FT0,
1105
    helper_fmul_ST0_FT0,
1106
    helper_fcom_ST0_FT0,
1107
    helper_fcom_ST0_FT0,
1108
    helper_fsub_ST0_FT0,
1109
    helper_fsubr_ST0_FT0,
1110
    helper_fdiv_ST0_FT0,
1111
    helper_fdivr_ST0_FT0,
1112
};
1113

    
1114
/* NOTE the exception in "r" op ordering */
1115
static void *helper_fp_arith_STN_ST0[8] = {
1116
    helper_fadd_STN_ST0,
1117
    helper_fmul_STN_ST0,
1118
    NULL,
1119
    NULL,
1120
    helper_fsubr_STN_ST0,
1121
    helper_fsub_STN_ST0,
1122
    helper_fdivr_STN_ST0,
1123
    helper_fdiv_STN_ST0,
1124
};
1125

    
1126
/* compute eflags.C to reg */
1127
static void gen_compute_eflags_c(TCGv reg)
1128
{
1129
#if TCG_TARGET_REG_BITS == 32
1130
    tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1131
    tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 
1132
                     (long)cc_table + offsetof(CCTable, compute_c));
1133
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1134
    tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE, 
1135
                 1, &cpu_tmp2_i32, 0, NULL);
1136
#else
1137
    tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1138
    tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1139
    tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64, 
1140
                     (long)cc_table + offsetof(CCTable, compute_c));
1141
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1142
    tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE, 
1143
                 1, &cpu_tmp2_i32, 0, NULL);
1144
#endif
1145
    tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1146
}
1147

    
1148
/* compute all eflags to cc_src */
1149
static void gen_compute_eflags(TCGv reg)
1150
{
1151
#if TCG_TARGET_REG_BITS == 32
1152
    tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1153
    tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 
1154
                     (long)cc_table + offsetof(CCTable, compute_all));
1155
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1156
    tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE, 
1157
                 1, &cpu_tmp2_i32, 0, NULL);
1158
#else
1159
    tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1160
    tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1161
    tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64, 
1162
                     (long)cc_table + offsetof(CCTable, compute_all));
1163
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1164
    tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE, 
1165
                 1, &cpu_tmp2_i32, 0, NULL);
1166
#endif
1167
    tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1168
}
1169

    
1170
/* if d == OR_TMP0, it means memory operand (address in A0) */
1171
static void gen_op(DisasContext *s1, int op, int ot, int d)
1172
{
1173
    if (d != OR_TMP0) {
1174
        gen_op_mov_TN_reg(ot, 0, d);
1175
    } else {
1176
        gen_op_ld_T0_A0(ot + s1->mem_index);
1177
    }
1178
    switch(op) {
1179
    case OP_ADCL:
1180
        if (s1->cc_op != CC_OP_DYNAMIC)
1181
            gen_op_set_cc_op(s1->cc_op);
1182
        gen_compute_eflags_c(cpu_tmp4);
1183
        tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1184
        tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1185
        if (d != OR_TMP0)
1186
            gen_op_mov_reg_T0(ot, d);
1187
        else
1188
            gen_op_st_T0_A0(ot + s1->mem_index);
1189
        tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1190
        tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1191
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1192
        tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1193
        tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1194
        s1->cc_op = CC_OP_DYNAMIC;
1195
        break;
1196
    case OP_SBBL:
1197
        if (s1->cc_op != CC_OP_DYNAMIC)
1198
            gen_op_set_cc_op(s1->cc_op);
1199
        gen_compute_eflags_c(cpu_tmp4);
1200
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1201
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1202
        if (d != OR_TMP0)
1203
            gen_op_mov_reg_T0(ot, d);
1204
        else
1205
            gen_op_st_T0_A0(ot + s1->mem_index);
1206
        tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1207
        tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1208
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1209
        tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1210
        tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1211
        s1->cc_op = CC_OP_DYNAMIC;
1212
        break;
1213
    case OP_ADDL:
1214
        gen_op_addl_T0_T1();
1215
        if (d != OR_TMP0)
1216
            gen_op_mov_reg_T0(ot, d);
1217
        else
1218
            gen_op_st_T0_A0(ot + s1->mem_index);
1219
        gen_op_update2_cc();
1220
        s1->cc_op = CC_OP_ADDB + ot;
1221
        break;
1222
    case OP_SUBL:
1223
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1224
        if (d != OR_TMP0)
1225
            gen_op_mov_reg_T0(ot, d);
1226
        else
1227
            gen_op_st_T0_A0(ot + s1->mem_index);
1228
        gen_op_update2_cc();
1229
        s1->cc_op = CC_OP_SUBB + ot;
1230
        break;
1231
    default:
1232
    case OP_ANDL:
1233
        tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1234
        if (d != OR_TMP0)
1235
            gen_op_mov_reg_T0(ot, d);
1236
        else
1237
            gen_op_st_T0_A0(ot + s1->mem_index);
1238
        gen_op_update1_cc();
1239
        s1->cc_op = CC_OP_LOGICB + ot;
1240
        break;
1241
    case OP_ORL:
1242
        tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1243
        if (d != OR_TMP0)
1244
            gen_op_mov_reg_T0(ot, d);
1245
        else
1246
            gen_op_st_T0_A0(ot + s1->mem_index);
1247
        gen_op_update1_cc();
1248
        s1->cc_op = CC_OP_LOGICB + ot;
1249
        break;
1250
    case OP_XORL:
1251
        tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1252
        if (d != OR_TMP0)
1253
            gen_op_mov_reg_T0(ot, d);
1254
        else
1255
            gen_op_st_T0_A0(ot + s1->mem_index);
1256
        gen_op_update1_cc();
1257
        s1->cc_op = CC_OP_LOGICB + ot;
1258
        break;
1259
    case OP_CMPL:
1260
        gen_op_cmpl_T0_T1_cc();
1261
        s1->cc_op = CC_OP_SUBB + ot;
1262
        break;
1263
    }
1264
}
1265

    
1266
/* if d == OR_TMP0, it means memory operand (address in A0) */
1267
static void gen_inc(DisasContext *s1, int ot, int d, int c)
1268
{
1269
    if (d != OR_TMP0)
1270
        gen_op_mov_TN_reg(ot, 0, d);
1271
    else
1272
        gen_op_ld_T0_A0(ot + s1->mem_index);
1273
    if (s1->cc_op != CC_OP_DYNAMIC)
1274
        gen_op_set_cc_op(s1->cc_op);
1275
    if (c > 0) {
1276
        tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1277
        s1->cc_op = CC_OP_INCB + ot;
1278
    } else {
1279
        tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1280
        s1->cc_op = CC_OP_DECB + ot;
1281
    }
1282
    if (d != OR_TMP0)
1283
        gen_op_mov_reg_T0(ot, d);
1284
    else
1285
        gen_op_st_T0_A0(ot + s1->mem_index);
1286
    gen_compute_eflags_c(cpu_cc_src);
1287
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1288
}
1289

    
1290
/* XXX: add faster immediate case */
1291
static void gen_shift_rm_T1(DisasContext *s, int ot, int op1, 
1292
                            int is_right, int is_arith)
1293
{
1294
    target_ulong mask;
1295
    int shift_label;
1296
    
1297
    if (ot == OT_QUAD)
1298
        mask = 0x3f;
1299
    else
1300
        mask = 0x1f;
1301

    
1302
    /* load */
1303
    if (op1 == OR_TMP0)
1304
        gen_op_ld_T0_A0(ot + s->mem_index);
1305
    else
1306
        gen_op_mov_TN_reg(ot, 0, op1);
1307

    
1308
    tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1309

    
1310
    tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1311

    
1312
    if (is_right) {
1313
        if (is_arith) {
1314
            gen_exts(ot, cpu_T[0]);
1315
            tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1316
            tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1317
        } else {
1318
            gen_extu(ot, cpu_T[0]);
1319
            tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1320
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1321
        }
1322
    } else {
1323
        tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1324
        tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1325
    }
1326

    
1327
    /* store */
1328
    if (op1 == OR_TMP0)
1329
        gen_op_st_T0_A0(ot + s->mem_index);
1330
    else
1331
        gen_op_mov_reg_T0(ot, op1);
1332
        
1333
    /* update eflags if non zero shift */
1334
    if (s->cc_op != CC_OP_DYNAMIC)
1335
        gen_op_set_cc_op(s->cc_op);
1336

    
1337
    shift_label = gen_new_label();
1338
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), shift_label);
1339

    
1340
    tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1341
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1342
    if (is_right)
1343
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1344
    else
1345
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1346
        
1347
    gen_set_label(shift_label);
1348
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1349
}
1350

    
1351
static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1352
{
1353
    if (arg2 >= 0)
1354
        tcg_gen_shli_tl(ret, arg1, arg2);
1355
    else
1356
        tcg_gen_shri_tl(ret, arg1, -arg2);
1357
}
1358

    
1359
/* XXX: add faster immediate case */
1360
static void gen_rot_rm_T1(DisasContext *s, int ot, int op1, 
1361
                          int is_right)
1362
{
1363
    target_ulong mask;
1364
    int label1, label2, data_bits;
1365
    
1366
    if (ot == OT_QUAD)
1367
        mask = 0x3f;
1368
    else
1369
        mask = 0x1f;
1370

    
1371
    /* load */
1372
    if (op1 == OR_TMP0)
1373
        gen_op_ld_T0_A0(ot + s->mem_index);
1374
    else
1375
        gen_op_mov_TN_reg(ot, 0, op1);
1376

    
1377
    tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1378

    
1379
    /* Must test zero case to avoid using undefined behaviour in TCG
1380
       shifts. */
1381
    label1 = gen_new_label();
1382
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label1);
1383
    
1384
    if (ot <= OT_WORD)
1385
        tcg_gen_andi_tl(cpu_tmp0, cpu_T[1], (1 << (3 + ot)) - 1);
1386
    else
1387
        tcg_gen_mov_tl(cpu_tmp0, cpu_T[1]);
1388
    
1389
    gen_extu(ot, cpu_T[0]);
1390
    tcg_gen_mov_tl(cpu_T3, cpu_T[0]);
1391

    
1392
    data_bits = 8 << ot;
1393
    /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1394
       fix TCG definition) */
1395
    if (is_right) {
1396
        tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1397
        tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1398
        tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1399
    } else {
1400
        tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1401
        tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1402
        tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1403
    }
1404
    tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1405

    
1406
    gen_set_label(label1);
1407
    /* store */
1408
    if (op1 == OR_TMP0)
1409
        gen_op_st_T0_A0(ot + s->mem_index);
1410
    else
1411
        gen_op_mov_reg_T0(ot, op1);
1412
    
1413
    /* update eflags */
1414
    if (s->cc_op != CC_OP_DYNAMIC)
1415
        gen_op_set_cc_op(s->cc_op);
1416

    
1417
    label2 = gen_new_label();
1418
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label2);
1419

    
1420
    gen_compute_eflags(cpu_cc_src);
1421
    tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1422
    tcg_gen_xor_tl(cpu_tmp0, cpu_T3, cpu_T[0]);
1423
    tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1424
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1425
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1426
    if (is_right) {
1427
        tcg_gen_shri_tl(cpu_T[0], cpu_T[0], data_bits - 1);
1428
    }
1429
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_C);
1430
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
1431
    
1432
    tcg_gen_discard_tl(cpu_cc_dst);
1433
    tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1434
        
1435
    gen_set_label(label2);
1436
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1437
}
1438

    
1439
static void *helper_rotc[8] = {
1440
    helper_rclb,
1441
    helper_rclw,
1442
    helper_rcll,
1443
    X86_64_ONLY(helper_rclq),
1444
    helper_rcrb,
1445
    helper_rcrw,
1446
    helper_rcrl,
1447
    X86_64_ONLY(helper_rcrq),
1448
};
1449

    
1450
/* XXX: add faster immediate = 1 case */
1451
static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1, 
1452
                           int is_right)
1453
{
1454
    int label1;
1455

    
1456
    if (s->cc_op != CC_OP_DYNAMIC)
1457
        gen_op_set_cc_op(s->cc_op);
1458

    
1459
    /* load */
1460
    if (op1 == OR_TMP0)
1461
        gen_op_ld_T0_A0(ot + s->mem_index);
1462
    else
1463
        gen_op_mov_TN_reg(ot, 0, op1);
1464
    
1465
    tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
1466
                       cpu_T[0], cpu_T[0], cpu_T[1]);
1467
    /* store */
1468
    if (op1 == OR_TMP0)
1469
        gen_op_st_T0_A0(ot + s->mem_index);
1470
    else
1471
        gen_op_mov_reg_T0(ot, op1);
1472

    
1473
    /* update eflags */
1474
    label1 = gen_new_label();
1475
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(-1), label1);
1476

    
1477
    tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1478
    tcg_gen_discard_tl(cpu_cc_dst);
1479
    tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1480
        
1481
    gen_set_label(label1);
1482
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1483
}
1484

    
1485
/* XXX: add faster immediate case */
1486
static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1, 
1487
                                int is_right)
1488
{
1489
    int label1, label2, data_bits;
1490
    target_ulong mask;
1491

    
1492
    if (ot == OT_QUAD)
1493
        mask = 0x3f;
1494
    else
1495
        mask = 0x1f;
1496

    
1497
    /* load */
1498
    if (op1 == OR_TMP0)
1499
        gen_op_ld_T0_A0(ot + s->mem_index);
1500
    else
1501
        gen_op_mov_TN_reg(ot, 0, op1);
1502

    
1503
    tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
1504
    /* Must test zero case to avoid using undefined behaviour in TCG
1505
       shifts. */
1506
    label1 = gen_new_label();
1507
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
1508
    
1509
    tcg_gen_addi_tl(cpu_tmp5, cpu_T3, -1);
1510
    if (ot == OT_WORD) {
1511
        /* Note: we implement the Intel behaviour for shift count > 16 */
1512
        if (is_right) {
1513
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1514
            tcg_gen_shli_tl(cpu_tmp0, cpu_T[1], 16);
1515
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1516
            tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1517

    
1518
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1519
            
1520
            /* only needed if count > 16, but a test would complicate */
1521
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1522
            tcg_gen_shl_tl(cpu_tmp0, cpu_T[0], cpu_tmp5);
1523

    
1524
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1525

    
1526
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1527
        } else {
1528
            /* XXX: not optimal */
1529
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1530
            tcg_gen_shli_tl(cpu_T[1], cpu_T[1], 16);
1531
            tcg_gen_or_tl(cpu_T[1], cpu_T[1], cpu_T[0]);
1532
            tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1533
            
1534
            tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1535
            tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
1536
            tcg_gen_shr_tl(cpu_tmp6, cpu_T[1], cpu_tmp0);
1537
            tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
1538

    
1539
            tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1540
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1541
            tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1542
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1543
        }
1544
    } else {
1545
        data_bits = 8 << ot;
1546
        if (is_right) {
1547
            if (ot == OT_LONG)
1548
                tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1549

    
1550
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1551

    
1552
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1553
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1554
            tcg_gen_shl_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1555
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1556
            
1557
        } else {
1558
            if (ot == OT_LONG)
1559
                tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1560

    
1561
            tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1562
            
1563
            tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1564
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1565
            tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1566
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1567
        }
1568
    }
1569
    tcg_gen_mov_tl(cpu_T[1], cpu_tmp4);
1570

    
1571
    gen_set_label(label1);
1572
    /* store */
1573
    if (op1 == OR_TMP0)
1574
        gen_op_st_T0_A0(ot + s->mem_index);
1575
    else
1576
        gen_op_mov_reg_T0(ot, op1);
1577
    
1578
    /* update eflags */
1579
    if (s->cc_op != CC_OP_DYNAMIC)
1580
        gen_op_set_cc_op(s->cc_op);
1581

    
1582
    label2 = gen_new_label();
1583
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label2);
1584

    
1585
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1586
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1587
    if (is_right) {
1588
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1589
    } else {
1590
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1591
    }
1592
    gen_set_label(label2);
1593
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1594
}
1595

    
1596
static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1597
{
1598
    if (s != OR_TMP1)
1599
        gen_op_mov_TN_reg(ot, 1, s);
1600
    switch(op) {
1601
    case OP_ROL:
1602
        gen_rot_rm_T1(s1, ot, d, 0);
1603
        break;
1604
    case OP_ROR:
1605
        gen_rot_rm_T1(s1, ot, d, 1);
1606
        break;
1607
    case OP_SHL:
1608
    case OP_SHL1:
1609
        gen_shift_rm_T1(s1, ot, d, 0, 0);
1610
        break;
1611
    case OP_SHR:
1612
        gen_shift_rm_T1(s1, ot, d, 1, 0);
1613
        break;
1614
    case OP_SAR:
1615
        gen_shift_rm_T1(s1, ot, d, 1, 1);
1616
        break;
1617
    case OP_RCL:
1618
        gen_rotc_rm_T1(s1, ot, d, 0);
1619
        break;
1620
    case OP_RCR:
1621
        gen_rotc_rm_T1(s1, ot, d, 1);
1622
        break;
1623
    }
1624
}
1625

    
1626
static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1627
{
1628
    /* currently not optimized */
1629
    gen_op_movl_T1_im(c);
1630
    gen_shift(s1, op, ot, d, OR_TMP1);
1631
}
1632

    
1633
static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1634
{
1635
    target_long disp;
1636
    int havesib;
1637
    int base;
1638
    int index;
1639
    int scale;
1640
    int opreg;
1641
    int mod, rm, code, override, must_add_seg;
1642

    
1643
    override = s->override;
1644
    must_add_seg = s->addseg;
1645
    if (override >= 0)
1646
        must_add_seg = 1;
1647
    mod = (modrm >> 6) & 3;
1648
    rm = modrm & 7;
1649

    
1650
    if (s->aflag) {
1651

    
1652
        havesib = 0;
1653
        base = rm;
1654
        index = 0;
1655
        scale = 0;
1656

    
1657
        if (base == 4) {
1658
            havesib = 1;
1659
            code = ldub_code(s->pc++);
1660
            scale = (code >> 6) & 3;
1661
            index = ((code >> 3) & 7) | REX_X(s);
1662
            base = (code & 7);
1663
        }
1664
        base |= REX_B(s);
1665

    
1666
        switch (mod) {
1667
        case 0:
1668
            if ((base & 7) == 5) {
1669
                base = -1;
1670
                disp = (int32_t)ldl_code(s->pc);
1671
                s->pc += 4;
1672
                if (CODE64(s) && !havesib) {
1673
                    disp += s->pc + s->rip_offset;
1674
                }
1675
            } else {
1676
                disp = 0;
1677
            }
1678
            break;
1679
        case 1:
1680
            disp = (int8_t)ldub_code(s->pc++);
1681
            break;
1682
        default:
1683
        case 2:
1684
            disp = ldl_code(s->pc);
1685
            s->pc += 4;
1686
            break;
1687
        }
1688

    
1689
        if (base >= 0) {
1690
            /* for correct popl handling with esp */
1691
            if (base == 4 && s->popl_esp_hack)
1692
                disp += s->popl_esp_hack;
1693
#ifdef TARGET_X86_64
1694
            if (s->aflag == 2) {
1695
                gen_op_movq_A0_reg(base);
1696
                if (disp != 0) {
1697
                    gen_op_addq_A0_im(disp);
1698
                }
1699
            } else
1700
#endif
1701
            {
1702
                gen_op_movl_A0_reg(base);
1703
                if (disp != 0)
1704
                    gen_op_addl_A0_im(disp);
1705
            }
1706
        } else {
1707
#ifdef TARGET_X86_64
1708
            if (s->aflag == 2) {
1709
                gen_op_movq_A0_im(disp);
1710
            } else
1711
#endif
1712
            {
1713
                gen_op_movl_A0_im(disp);
1714
            }
1715
        }
1716
        /* XXX: index == 4 is always invalid */
1717
        if (havesib && (index != 4 || scale != 0)) {
1718
#ifdef TARGET_X86_64
1719
            if (s->aflag == 2) {
1720
                gen_op_addq_A0_reg_sN(scale, index);
1721
            } else
1722
#endif
1723
            {
1724
                gen_op_addl_A0_reg_sN(scale, index);
1725
            }
1726
        }
1727
        if (must_add_seg) {
1728
            if (override < 0) {
1729
                if (base == R_EBP || base == R_ESP)
1730
                    override = R_SS;
1731
                else
1732
                    override = R_DS;
1733
            }
1734
#ifdef TARGET_X86_64
1735
            if (s->aflag == 2) {
1736
                gen_op_addq_A0_seg(override);
1737
            } else
1738
#endif
1739
            {
1740
                gen_op_addl_A0_seg(override);
1741
            }
1742
        }
1743
    } else {
1744
        switch (mod) {
1745
        case 0:
1746
            if (rm == 6) {
1747
                disp = lduw_code(s->pc);
1748
                s->pc += 2;
1749
                gen_op_movl_A0_im(disp);
1750
                rm = 0; /* avoid SS override */
1751
                goto no_rm;
1752
            } else {
1753
                disp = 0;
1754
            }
1755
            break;
1756
        case 1:
1757
            disp = (int8_t)ldub_code(s->pc++);
1758
            break;
1759
        default:
1760
        case 2:
1761
            disp = lduw_code(s->pc);
1762
            s->pc += 2;
1763
            break;
1764
        }
1765
        switch(rm) {
1766
        case 0:
1767
            gen_op_movl_A0_reg(R_EBX);
1768
            gen_op_addl_A0_reg_sN(0, R_ESI);
1769
            break;
1770
        case 1:
1771
            gen_op_movl_A0_reg(R_EBX);
1772
            gen_op_addl_A0_reg_sN(0, R_EDI);
1773
            break;
1774
        case 2:
1775
            gen_op_movl_A0_reg(R_EBP);
1776
            gen_op_addl_A0_reg_sN(0, R_ESI);
1777
            break;
1778
        case 3:
1779
            gen_op_movl_A0_reg(R_EBP);
1780
            gen_op_addl_A0_reg_sN(0, R_EDI);
1781
            break;
1782
        case 4:
1783
            gen_op_movl_A0_reg(R_ESI);
1784
            break;
1785
        case 5:
1786
            gen_op_movl_A0_reg(R_EDI);
1787
            break;
1788
        case 6:
1789
            gen_op_movl_A0_reg(R_EBP);
1790
            break;
1791
        default:
1792
        case 7:
1793
            gen_op_movl_A0_reg(R_EBX);
1794
            break;
1795
        }
1796
        if (disp != 0)
1797
            gen_op_addl_A0_im(disp);
1798
        gen_op_andl_A0_ffff();
1799
    no_rm:
1800
        if (must_add_seg) {
1801
            if (override < 0) {
1802
                if (rm == 2 || rm == 3 || rm == 6)
1803
                    override = R_SS;
1804
                else
1805
                    override = R_DS;
1806
            }
1807
            gen_op_addl_A0_seg(override);
1808
        }
1809
    }
1810

    
1811
    opreg = OR_A0;
1812
    disp = 0;
1813
    *reg_ptr = opreg;
1814
    *offset_ptr = disp;
1815
}
1816

    
1817
static void gen_nop_modrm(DisasContext *s, int modrm)
1818
{
1819
    int mod, rm, base, code;
1820

    
1821
    mod = (modrm >> 6) & 3;
1822
    if (mod == 3)
1823
        return;
1824
    rm = modrm & 7;
1825

    
1826
    if (s->aflag) {
1827

    
1828
        base = rm;
1829

    
1830
        if (base == 4) {
1831
            code = ldub_code(s->pc++);
1832
            base = (code & 7);
1833
        }
1834

    
1835
        switch (mod) {
1836
        case 0:
1837
            if (base == 5) {
1838
                s->pc += 4;
1839
            }
1840
            break;
1841
        case 1:
1842
            s->pc++;
1843
            break;
1844
        default:
1845
        case 2:
1846
            s->pc += 4;
1847
            break;
1848
        }
1849
    } else {
1850
        switch (mod) {
1851
        case 0:
1852
            if (rm == 6) {
1853
                s->pc += 2;
1854
            }
1855
            break;
1856
        case 1:
1857
            s->pc++;
1858
            break;
1859
        default:
1860
        case 2:
1861
            s->pc += 2;
1862
            break;
1863
        }
1864
    }
1865
}
1866

    
1867
/* used for LEA and MOV AX, mem */
1868
static void gen_add_A0_ds_seg(DisasContext *s)
1869
{
1870
    int override, must_add_seg;
1871
    must_add_seg = s->addseg;
1872
    override = R_DS;
1873
    if (s->override >= 0) {
1874
        override = s->override;
1875
        must_add_seg = 1;
1876
    } else {
1877
        override = R_DS;
1878
    }
1879
    if (must_add_seg) {
1880
#ifdef TARGET_X86_64
1881
        if (CODE64(s)) {
1882
            gen_op_addq_A0_seg(override);
1883
        } else
1884
#endif
1885
        {
1886
            gen_op_addl_A0_seg(override);
1887
        }
1888
    }
1889
}
1890

    
1891
/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1892
   OR_TMP0 */
1893
static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1894
{
1895
    int mod, rm, opreg, disp;
1896

    
1897
    mod = (modrm >> 6) & 3;
1898
    rm = (modrm & 7) | REX_B(s);
1899
    if (mod == 3) {
1900
        if (is_store) {
1901
            if (reg != OR_TMP0)
1902
                gen_op_mov_TN_reg(ot, 0, reg);
1903
            gen_op_mov_reg_T0(ot, rm);
1904
        } else {
1905
            gen_op_mov_TN_reg(ot, 0, rm);
1906
            if (reg != OR_TMP0)
1907
                gen_op_mov_reg_T0(ot, reg);
1908
        }
1909
    } else {
1910
        gen_lea_modrm(s, modrm, &opreg, &disp);
1911
        if (is_store) {
1912
            if (reg != OR_TMP0)
1913
                gen_op_mov_TN_reg(ot, 0, reg);
1914
            gen_op_st_T0_A0(ot + s->mem_index);
1915
        } else {
1916
            gen_op_ld_T0_A0(ot + s->mem_index);
1917
            if (reg != OR_TMP0)
1918
                gen_op_mov_reg_T0(ot, reg);
1919
        }
1920
    }
1921
}
1922

    
1923
static inline uint32_t insn_get(DisasContext *s, int ot)
1924
{
1925
    uint32_t ret;
1926

    
1927
    switch(ot) {
1928
    case OT_BYTE:
1929
        ret = ldub_code(s->pc);
1930
        s->pc++;
1931
        break;
1932
    case OT_WORD:
1933
        ret = lduw_code(s->pc);
1934
        s->pc += 2;
1935
        break;
1936
    default:
1937
    case OT_LONG:
1938
        ret = ldl_code(s->pc);
1939
        s->pc += 4;
1940
        break;
1941
    }
1942
    return ret;
1943
}
1944

    
1945
static inline int insn_const_size(unsigned int ot)
1946
{
1947
    if (ot <= OT_LONG)
1948
        return 1 << ot;
1949
    else
1950
        return 4;
1951
}
1952

    
1953
static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1954
{
1955
    TranslationBlock *tb;
1956
    target_ulong pc;
1957

    
1958
    pc = s->cs_base + eip;
1959
    tb = s->tb;
1960
    /* NOTE: we handle the case where the TB spans two pages here */
1961
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1962
        (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK))  {
1963
        /* jump to same page: we can use a direct jump */
1964
        tcg_gen_goto_tb(tb_num);
1965
        gen_jmp_im(eip);
1966
        tcg_gen_exit_tb((long)tb + tb_num);
1967
    } else {
1968
        /* jump to another page: currently not optimized */
1969
        gen_jmp_im(eip);
1970
        gen_eob(s);
1971
    }
1972
}
1973

    
1974
static inline void gen_jcc(DisasContext *s, int b,
1975
                           target_ulong val, target_ulong next_eip)
1976
{
1977
    TranslationBlock *tb;
1978
    int inv, jcc_op;
1979
    GenOpFunc1 *func;
1980
    target_ulong tmp;
1981
    int l1, l2;
1982

    
1983
    inv = b & 1;
1984
    jcc_op = (b >> 1) & 7;
1985

    
1986
    if (s->jmp_opt) {
1987
        switch(s->cc_op) {
1988
            /* we optimize the cmp/jcc case */
1989
        case CC_OP_SUBB:
1990
        case CC_OP_SUBW:
1991
        case CC_OP_SUBL:
1992
        case CC_OP_SUBQ:
1993
            func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1994
            break;
1995

    
1996
            /* some jumps are easy to compute */
1997
        case CC_OP_ADDB:
1998
        case CC_OP_ADDW:
1999
        case CC_OP_ADDL:
2000
        case CC_OP_ADDQ:
2001

    
2002
        case CC_OP_ADCB:
2003
        case CC_OP_ADCW:
2004
        case CC_OP_ADCL:
2005
        case CC_OP_ADCQ:
2006

    
2007
        case CC_OP_SBBB:
2008
        case CC_OP_SBBW:
2009
        case CC_OP_SBBL:
2010
        case CC_OP_SBBQ:
2011

    
2012
        case CC_OP_LOGICB:
2013
        case CC_OP_LOGICW:
2014
        case CC_OP_LOGICL:
2015
        case CC_OP_LOGICQ:
2016

    
2017
        case CC_OP_INCB:
2018
        case CC_OP_INCW:
2019
        case CC_OP_INCL:
2020
        case CC_OP_INCQ:
2021

    
2022
        case CC_OP_DECB:
2023
        case CC_OP_DECW:
2024
        case CC_OP_DECL:
2025
        case CC_OP_DECQ:
2026

    
2027
        case CC_OP_SHLB:
2028
        case CC_OP_SHLW:
2029
        case CC_OP_SHLL:
2030
        case CC_OP_SHLQ:
2031

    
2032
        case CC_OP_SARB:
2033
        case CC_OP_SARW:
2034
        case CC_OP_SARL:
2035
        case CC_OP_SARQ:
2036
            switch(jcc_op) {
2037
            case JCC_Z:
2038
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2039
                break;
2040
            case JCC_S:
2041
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2042
                break;
2043
            default:
2044
                func = NULL;
2045
                break;
2046
            }
2047
            break;
2048
        default:
2049
            func = NULL;
2050
            break;
2051
        }
2052

    
2053
        if (s->cc_op != CC_OP_DYNAMIC) {
2054
            gen_op_set_cc_op(s->cc_op);
2055
            s->cc_op = CC_OP_DYNAMIC;
2056
        }
2057

    
2058
        if (!func) {
2059
            gen_setcc_slow[jcc_op]();
2060
            func = gen_op_jnz_T0_label;
2061
        }
2062

    
2063
        if (inv) {
2064
            tmp = val;
2065
            val = next_eip;
2066
            next_eip = tmp;
2067
        }
2068
        tb = s->tb;
2069

    
2070
        l1 = gen_new_label();
2071
        func(l1);
2072

    
2073
        gen_goto_tb(s, 0, next_eip);
2074

    
2075
        gen_set_label(l1);
2076
        gen_goto_tb(s, 1, val);
2077

    
2078
        s->is_jmp = 3;
2079
    } else {
2080

    
2081
        if (s->cc_op != CC_OP_DYNAMIC) {
2082
            gen_op_set_cc_op(s->cc_op);
2083
            s->cc_op = CC_OP_DYNAMIC;
2084
        }
2085
        gen_setcc_slow[jcc_op]();
2086
        if (inv) {
2087
            tmp = val;
2088
            val = next_eip;
2089
            next_eip = tmp;
2090
        }
2091
        l1 = gen_new_label();
2092
        l2 = gen_new_label();
2093
        gen_op_jnz_T0_label(l1);
2094
        gen_jmp_im(next_eip);
2095
        gen_op_jmp_label(l2);
2096
        gen_set_label(l1);
2097
        gen_jmp_im(val);
2098
        gen_set_label(l2);
2099
        gen_eob(s);
2100
    }
2101
}
2102

    
2103
static void gen_setcc(DisasContext *s, int b)
2104
{
2105
    int inv, jcc_op;
2106
    GenOpFunc *func;
2107

    
2108
    inv = b & 1;
2109
    jcc_op = (b >> 1) & 7;
2110
    switch(s->cc_op) {
2111
        /* we optimize the cmp/jcc case */
2112
    case CC_OP_SUBB:
2113
    case CC_OP_SUBW:
2114
    case CC_OP_SUBL:
2115
    case CC_OP_SUBQ:
2116
        func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
2117
        if (!func)
2118
            goto slow_jcc;
2119
        break;
2120

    
2121
        /* some jumps are easy to compute */
2122
    case CC_OP_ADDB:
2123
    case CC_OP_ADDW:
2124
    case CC_OP_ADDL:
2125
    case CC_OP_ADDQ:
2126

    
2127
    case CC_OP_LOGICB:
2128
    case CC_OP_LOGICW:
2129
    case CC_OP_LOGICL:
2130
    case CC_OP_LOGICQ:
2131

    
2132
    case CC_OP_INCB:
2133
    case CC_OP_INCW:
2134
    case CC_OP_INCL:
2135
    case CC_OP_INCQ:
2136

    
2137
    case CC_OP_DECB:
2138
    case CC_OP_DECW:
2139
    case CC_OP_DECL:
2140
    case CC_OP_DECQ:
2141

    
2142
    case CC_OP_SHLB:
2143
    case CC_OP_SHLW:
2144
    case CC_OP_SHLL:
2145
    case CC_OP_SHLQ:
2146
        switch(jcc_op) {
2147
        case JCC_Z:
2148
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2149
            break;
2150
        case JCC_S:
2151
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2152
            break;
2153
        default:
2154
            goto slow_jcc;
2155
        }
2156
        break;
2157
    default:
2158
    slow_jcc:
2159
        if (s->cc_op != CC_OP_DYNAMIC)
2160
            gen_op_set_cc_op(s->cc_op);
2161
        func = gen_setcc_slow[jcc_op];
2162
        break;
2163
    }
2164
    func();
2165
    if (inv) {
2166
        gen_op_xor_T0_1();
2167
    }
2168
}
2169

    
2170
/* move T0 to seg_reg and compute if the CPU state may change. Never
2171
   call this function with seg_reg == R_CS */
2172
static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2173
{
2174
    if (s->pe && !s->vm86) {
2175
        /* XXX: optimize by finding processor state dynamically */
2176
        if (s->cc_op != CC_OP_DYNAMIC)
2177
            gen_op_set_cc_op(s->cc_op);
2178
        gen_jmp_im(cur_eip);
2179
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2180
        tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2181
        /* abort translation because the addseg value may change or
2182
           because ss32 may change. For R_SS, translation must always
2183
           stop as a special handling must be done to disable hardware
2184
           interrupts for the next instruction */
2185
        if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2186
            s->is_jmp = 3;
2187
    } else {
2188
        gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
2189
        if (seg_reg == R_SS)
2190
            s->is_jmp = 3;
2191
    }
2192
}
2193

    
2194
static inline int svm_is_rep(int prefixes)
2195
{
2196
    return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2197
}
2198

    
2199
static inline int
2200
gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2201
                              uint32_t type, uint64_t param)
2202
{
2203
    if(!(s->flags & (INTERCEPT_SVM_MASK)))
2204
        /* no SVM activated */
2205
        return 0;
2206
    switch(type) {
2207
        /* CRx and DRx reads/writes */
2208
        case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2209
            if (s->cc_op != CC_OP_DYNAMIC) {
2210
                gen_op_set_cc_op(s->cc_op);
2211
            }
2212
            gen_jmp_im(pc_start - s->cs_base);
2213
            tcg_gen_helper_0_2(helper_svm_check_intercept_param, 
2214
                               tcg_const_i32(type), tcg_const_i64(param));
2215
            /* this is a special case as we do not know if the interception occurs
2216
               so we assume there was none */
2217
            return 0;
2218
        case SVM_EXIT_MSR:
2219
            if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2220
                if (s->cc_op != CC_OP_DYNAMIC) {
2221
                    gen_op_set_cc_op(s->cc_op);
2222
                }
2223
                gen_jmp_im(pc_start - s->cs_base);
2224
                tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2225
                                   tcg_const_i32(type), tcg_const_i64(param));
2226
                /* this is a special case as we do not know if the interception occurs
2227
                   so we assume there was none */
2228
                return 0;
2229
            }
2230
            break;
2231
        default:
2232
            if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2233
                if (s->cc_op != CC_OP_DYNAMIC) {
2234
                    gen_op_set_cc_op(s->cc_op);
2235
                }
2236
                gen_jmp_im(pc_start - s->cs_base);
2237
                tcg_gen_helper_0_2(helper_vmexit,
2238
                                   tcg_const_i32(type), tcg_const_i64(param));
2239
                /* we can optimize this one so TBs don't get longer
2240
                   than up to vmexit */
2241
                gen_eob(s);
2242
                return 1;
2243
            }
2244
    }
2245
    return 0;
2246
}
2247

    
2248
static inline int
2249
gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2250
{
2251
    return gen_svm_check_intercept_param(s, pc_start, type, 0);
2252
}
2253

    
2254
static inline void gen_stack_update(DisasContext *s, int addend)
2255
{
2256
#ifdef TARGET_X86_64
2257
    if (CODE64(s)) {
2258
        gen_op_add_reg_im(2, R_ESP, addend);
2259
    } else
2260
#endif
2261
    if (s->ss32) {
2262
        gen_op_add_reg_im(1, R_ESP, addend);
2263
    } else {
2264
        gen_op_add_reg_im(0, R_ESP, addend);
2265
    }
2266
}
2267

    
2268
/* generate a push. It depends on ss32, addseg and dflag */
2269
static void gen_push_T0(DisasContext *s)
2270
{
2271
#ifdef TARGET_X86_64
2272
    if (CODE64(s)) {
2273
        gen_op_movq_A0_reg(R_ESP);
2274
        if (s->dflag) {
2275
            gen_op_addq_A0_im(-8);
2276
            gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2277
        } else {
2278
            gen_op_addq_A0_im(-2);
2279
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2280
        }
2281
        gen_op_mov_reg_A0(2, R_ESP);
2282
    } else
2283
#endif
2284
    {
2285
        gen_op_movl_A0_reg(R_ESP);
2286
        if (!s->dflag)
2287
            gen_op_addl_A0_im(-2);
2288
        else
2289
            gen_op_addl_A0_im(-4);
2290
        if (s->ss32) {
2291
            if (s->addseg) {
2292
                tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2293
                gen_op_addl_A0_seg(R_SS);
2294
            }
2295
        } else {
2296
            gen_op_andl_A0_ffff();
2297
            tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2298
            gen_op_addl_A0_seg(R_SS);
2299
        }
2300
        gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2301
        if (s->ss32 && !s->addseg)
2302
            gen_op_mov_reg_A0(1, R_ESP);
2303
        else
2304
            gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2305
    }
2306
}
2307

    
2308
/* generate a push. It depends on ss32, addseg and dflag */
2309
/* slower version for T1, only used for call Ev */
2310
static void gen_push_T1(DisasContext *s)
2311
{
2312
#ifdef TARGET_X86_64
2313
    if (CODE64(s)) {
2314
        gen_op_movq_A0_reg(R_ESP);
2315
        if (s->dflag) {
2316
            gen_op_addq_A0_im(-8);
2317
            gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2318
        } else {
2319
            gen_op_addq_A0_im(-2);
2320
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2321
        }
2322
        gen_op_mov_reg_A0(2, R_ESP);
2323
    } else
2324
#endif
2325
    {
2326
        gen_op_movl_A0_reg(R_ESP);
2327
        if (!s->dflag)
2328
            gen_op_addl_A0_im(-2);
2329
        else
2330
            gen_op_addl_A0_im(-4);
2331
        if (s->ss32) {
2332
            if (s->addseg) {
2333
                gen_op_addl_A0_seg(R_SS);
2334
            }
2335
        } else {
2336
            gen_op_andl_A0_ffff();
2337
            gen_op_addl_A0_seg(R_SS);
2338
        }
2339
        gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2340

    
2341
        if (s->ss32 && !s->addseg)
2342
            gen_op_mov_reg_A0(1, R_ESP);
2343
        else
2344
            gen_stack_update(s, (-2) << s->dflag);
2345
    }
2346
}
2347

    
2348
/* two step pop is necessary for precise exceptions */
2349
static void gen_pop_T0(DisasContext *s)
2350
{
2351
#ifdef TARGET_X86_64
2352
    if (CODE64(s)) {
2353
        gen_op_movq_A0_reg(R_ESP);
2354
        gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2355
    } else
2356
#endif
2357
    {
2358
        gen_op_movl_A0_reg(R_ESP);
2359
        if (s->ss32) {
2360
            if (s->addseg)
2361
                gen_op_addl_A0_seg(R_SS);
2362
        } else {
2363
            gen_op_andl_A0_ffff();
2364
            gen_op_addl_A0_seg(R_SS);
2365
        }
2366
        gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2367
    }
2368
}
2369

    
2370
static void gen_pop_update(DisasContext *s)
2371
{
2372
#ifdef TARGET_X86_64
2373
    if (CODE64(s) && s->dflag) {
2374
        gen_stack_update(s, 8);
2375
    } else
2376
#endif
2377
    {
2378
        gen_stack_update(s, 2 << s->dflag);
2379
    }
2380
}
2381

    
2382
static void gen_stack_A0(DisasContext *s)
2383
{
2384
    gen_op_movl_A0_reg(R_ESP);
2385
    if (!s->ss32)
2386
        gen_op_andl_A0_ffff();
2387
    tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2388
    if (s->addseg)
2389
        gen_op_addl_A0_seg(R_SS);
2390
}
2391

    
2392
/* NOTE: wrap around in 16 bit not fully handled */
2393
static void gen_pusha(DisasContext *s)
2394
{
2395
    int i;
2396
    gen_op_movl_A0_reg(R_ESP);
2397
    gen_op_addl_A0_im(-16 <<  s->dflag);
2398
    if (!s->ss32)
2399
        gen_op_andl_A0_ffff();
2400
    tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2401
    if (s->addseg)
2402
        gen_op_addl_A0_seg(R_SS);
2403
    for(i = 0;i < 8; i++) {
2404
        gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2405
        gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2406
        gen_op_addl_A0_im(2 <<  s->dflag);
2407
    }
2408
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2409
}
2410

    
2411
/* NOTE: wrap around in 16 bit not fully handled */
2412
static void gen_popa(DisasContext *s)
2413
{
2414
    int i;
2415
    gen_op_movl_A0_reg(R_ESP);
2416
    if (!s->ss32)
2417
        gen_op_andl_A0_ffff();
2418
    tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2419
    tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 <<  s->dflag);
2420
    if (s->addseg)
2421
        gen_op_addl_A0_seg(R_SS);
2422
    for(i = 0;i < 8; i++) {
2423
        /* ESP is not reloaded */
2424
        if (i != 3) {
2425
            gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2426
            gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2427
        }
2428
        gen_op_addl_A0_im(2 <<  s->dflag);
2429
    }
2430
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2431
}
2432

    
2433
static void gen_enter(DisasContext *s, int esp_addend, int level)
2434
{
2435
    int ot, opsize;
2436

    
2437
    level &= 0x1f;
2438
#ifdef TARGET_X86_64
2439
    if (CODE64(s)) {
2440
        ot = s->dflag ? OT_QUAD : OT_WORD;
2441
        opsize = 1 << ot;
2442

    
2443
        gen_op_movl_A0_reg(R_ESP);
2444
        gen_op_addq_A0_im(-opsize);
2445
        tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2446

    
2447
        /* push bp */
2448
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2449
        gen_op_st_T0_A0(ot + s->mem_index);
2450
        if (level) {
2451
            /* XXX: must save state */
2452
            tcg_gen_helper_0_3(helper_enter64_level,
2453
                               tcg_const_i32(level),
2454
                               tcg_const_i32((ot == OT_QUAD)),
2455
                               cpu_T[1]);
2456
        }
2457
        gen_op_mov_reg_T1(ot, R_EBP);
2458
        tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2459
        gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2460
    } else
2461
#endif
2462
    {
2463
        ot = s->dflag + OT_WORD;
2464
        opsize = 2 << s->dflag;
2465

    
2466
        gen_op_movl_A0_reg(R_ESP);
2467
        gen_op_addl_A0_im(-opsize);
2468
        if (!s->ss32)
2469
            gen_op_andl_A0_ffff();
2470
        tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2471
        if (s->addseg)
2472
            gen_op_addl_A0_seg(R_SS);
2473
        /* push bp */
2474
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2475
        gen_op_st_T0_A0(ot + s->mem_index);
2476
        if (level) {
2477
            /* XXX: must save state */
2478
            tcg_gen_helper_0_3(helper_enter_level,
2479
                               tcg_const_i32(level),
2480
                               tcg_const_i32(s->dflag),
2481
                               cpu_T[1]);
2482
        }
2483
        gen_op_mov_reg_T1(ot, R_EBP);
2484
        tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2485
        gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2486
    }
2487
}
2488

    
2489
static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2490
{
2491
    if (s->cc_op != CC_OP_DYNAMIC)
2492
        gen_op_set_cc_op(s->cc_op);
2493
    gen_jmp_im(cur_eip);
2494
    tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
2495
    s->is_jmp = 3;
2496
}
2497

    
2498
/* an interrupt is different from an exception because of the
2499
   privilege checks */
2500
static void gen_interrupt(DisasContext *s, int intno,
2501
                          target_ulong cur_eip, target_ulong next_eip)
2502
{
2503
    if (s->cc_op != CC_OP_DYNAMIC)
2504
        gen_op_set_cc_op(s->cc_op);
2505
    gen_jmp_im(cur_eip);
2506
    tcg_gen_helper_0_2(helper_raise_interrupt, 
2507
                       tcg_const_i32(intno), 
2508
                       tcg_const_i32(next_eip - cur_eip));
2509
    s->is_jmp = 3;
2510
}
2511

    
2512
static void gen_debug(DisasContext *s, target_ulong cur_eip)
2513
{
2514
    if (s->cc_op != CC_OP_DYNAMIC)
2515
        gen_op_set_cc_op(s->cc_op);
2516
    gen_jmp_im(cur_eip);
2517
    tcg_gen_helper_0_0(helper_debug);
2518
    s->is_jmp = 3;
2519
}
2520

    
2521
/* generate a generic end of block. Trace exception is also generated
2522
   if needed */
2523
static void gen_eob(DisasContext *s)
2524
{
2525
    if (s->cc_op != CC_OP_DYNAMIC)
2526
        gen_op_set_cc_op(s->cc_op);
2527
    if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2528
        tcg_gen_helper_0_0(helper_reset_inhibit_irq);
2529
    }
2530
    if (s->singlestep_enabled) {
2531
        tcg_gen_helper_0_0(helper_debug);
2532
    } else if (s->tf) {
2533
        tcg_gen_helper_0_0(helper_single_step);
2534
    } else {
2535
        tcg_gen_exit_tb(0);
2536
    }
2537
    s->is_jmp = 3;
2538
}
2539

    
2540
/* generate a jump to eip. No segment change must happen before as a
2541
   direct call to the next block may occur */
2542
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2543
{
2544
    if (s->jmp_opt) {
2545
        if (s->cc_op != CC_OP_DYNAMIC) {
2546
            gen_op_set_cc_op(s->cc_op);
2547
            s->cc_op = CC_OP_DYNAMIC;
2548
        }
2549
        gen_goto_tb(s, tb_num, eip);
2550
        s->is_jmp = 3;
2551
    } else {
2552
        gen_jmp_im(eip);
2553
        gen_eob(s);
2554
    }
2555
}
2556

    
2557
static void gen_jmp(DisasContext *s, target_ulong eip)
2558
{
2559
    gen_jmp_tb(s, eip, 0);
2560
}
2561

    
2562
static inline void gen_ldq_env_A0(int idx, int offset)
2563
{
2564
    int mem_index = (idx >> 2) - 1;
2565
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2566
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
2567
}
2568

    
2569
static inline void gen_stq_env_A0(int idx, int offset)
2570
{
2571
    int mem_index = (idx >> 2) - 1;
2572
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2573
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2574
}
2575

    
2576
static inline void gen_ldo_env_A0(int idx, int offset)
2577
{
2578
    int mem_index = (idx >> 2) - 1;
2579
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2580
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2581
    tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2582
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2583
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2584
}
2585

    
2586
static inline void gen_sto_env_A0(int idx, int offset)
2587
{
2588
    int mem_index = (idx >> 2) - 1;
2589
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2590
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2591
    tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2592
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2593
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2594
}
2595

    
2596
static inline void gen_op_movo(int d_offset, int s_offset)
2597
{
2598
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2599
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2600
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2601
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
2602
}
2603

    
2604
static inline void gen_op_movq(int d_offset, int s_offset)
2605
{
2606
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2607
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2608
}
2609

    
2610
static inline void gen_op_movl(int d_offset, int s_offset)
2611
{
2612
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2613
    tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
2614
}
2615

    
2616
static inline void gen_op_movq_env_0(int d_offset)
2617
{
2618
    tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2619
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2620
}
2621

    
2622
#define SSE_SPECIAL ((void *)1)
2623
#define SSE_DUMMY ((void *)2)
2624

    
2625
#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2626
#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2627
                     helper_ ## x ## ss, helper_ ## x ## sd, }
2628

    
2629
static void *sse_op_table1[256][4] = {
2630
    /* 3DNow! extensions */
2631
    [0x0e] = { SSE_DUMMY }, /* femms */
2632
    [0x0f] = { SSE_DUMMY }, /* pf... */
2633
    /* pure SSE operations */
2634
    [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2635
    [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2636
    [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2637
    [0x13] = { SSE_SPECIAL, SSE_SPECIAL },  /* movlps, movlpd */
2638
    [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2639
    [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2640
    [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd, movshdup */
2641
    [0x17] = { SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd */
2642

    
2643
    [0x28] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2644
    [0x29] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2645
    [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2646
    [0x2b] = { SSE_SPECIAL, SSE_SPECIAL },  /* movntps, movntpd */
2647
    [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2648
    [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2649
    [0x2e] = { helper_ucomiss, helper_ucomisd },
2650
    [0x2f] = { helper_comiss, helper_comisd },
2651
    [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2652
    [0x51] = SSE_FOP(sqrt),
2653
    [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
2654
    [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
2655
    [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
2656
    [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
2657
    [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
2658
    [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
2659
    [0x58] = SSE_FOP(add),
2660
    [0x59] = SSE_FOP(mul),
2661
    [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
2662
               helper_cvtss2sd, helper_cvtsd2ss },
2663
    [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
2664
    [0x5c] = SSE_FOP(sub),
2665
    [0x5d] = SSE_FOP(min),
2666
    [0x5e] = SSE_FOP(div),
2667
    [0x5f] = SSE_FOP(max),
2668

    
2669
    [0xc2] = SSE_FOP(cmpeq),
2670
    [0xc6] = { helper_shufps, helper_shufpd },
2671

    
2672
    /* MMX ops and their SSE extensions */
2673
    [0x60] = MMX_OP2(punpcklbw),
2674
    [0x61] = MMX_OP2(punpcklwd),
2675
    [0x62] = MMX_OP2(punpckldq),
2676
    [0x63] = MMX_OP2(packsswb),
2677
    [0x64] = MMX_OP2(pcmpgtb),
2678
    [0x65] = MMX_OP2(pcmpgtw),
2679
    [0x66] = MMX_OP2(pcmpgtl),
2680
    [0x67] = MMX_OP2(packuswb),
2681
    [0x68] = MMX_OP2(punpckhbw),
2682
    [0x69] = MMX_OP2(punpckhwd),
2683
    [0x6a] = MMX_OP2(punpckhdq),
2684
    [0x6b] = MMX_OP2(packssdw),
2685
    [0x6c] = { NULL, helper_punpcklqdq_xmm },
2686
    [0x6d] = { NULL, helper_punpckhqdq_xmm },
2687
    [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2688
    [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2689
    [0x70] = { helper_pshufw_mmx,
2690
               helper_pshufd_xmm,
2691
               helper_pshufhw_xmm,
2692
               helper_pshuflw_xmm },
2693
    [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2694
    [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2695
    [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2696
    [0x74] = MMX_OP2(pcmpeqb),
2697
    [0x75] = MMX_OP2(pcmpeqw),
2698
    [0x76] = MMX_OP2(pcmpeql),
2699
    [0x77] = { SSE_DUMMY }, /* emms */
2700
    [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
2701
    [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
2702
    [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2703
    [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2704
    [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2705
    [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2706
    [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
2707
    [0xd1] = MMX_OP2(psrlw),
2708
    [0xd2] = MMX_OP2(psrld),
2709
    [0xd3] = MMX_OP2(psrlq),
2710
    [0xd4] = MMX_OP2(paddq),
2711
    [0xd5] = MMX_OP2(pmullw),
2712
    [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2713
    [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2714
    [0xd8] = MMX_OP2(psubusb),
2715
    [0xd9] = MMX_OP2(psubusw),
2716
    [0xda] = MMX_OP2(pminub),
2717
    [0xdb] = MMX_OP2(pand),
2718
    [0xdc] = MMX_OP2(paddusb),
2719
    [0xdd] = MMX_OP2(paddusw),
2720
    [0xde] = MMX_OP2(pmaxub),
2721
    [0xdf] = MMX_OP2(pandn),
2722
    [0xe0] = MMX_OP2(pavgb),
2723
    [0xe1] = MMX_OP2(psraw),
2724
    [0xe2] = MMX_OP2(psrad),
2725
    [0xe3] = MMX_OP2(pavgw),
2726
    [0xe4] = MMX_OP2(pmulhuw),
2727
    [0xe5] = MMX_OP2(pmulhw),
2728
    [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
2729
    [0xe7] = { SSE_SPECIAL , SSE_SPECIAL },  /* movntq, movntq */
2730
    [0xe8] = MMX_OP2(psubsb),
2731
    [0xe9] = MMX_OP2(psubsw),
2732
    [0xea] = MMX_OP2(pminsw),
2733
    [0xeb] = MMX_OP2(por),
2734
    [0xec] = MMX_OP2(paddsb),
2735
    [0xed] = MMX_OP2(paddsw),
2736
    [0xee] = MMX_OP2(pmaxsw),
2737
    [0xef] = MMX_OP2(pxor),
2738
    [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2739
    [0xf1] = MMX_OP2(psllw),
2740
    [0xf2] = MMX_OP2(pslld),
2741
    [0xf3] = MMX_OP2(psllq),
2742
    [0xf4] = MMX_OP2(pmuludq),
2743
    [0xf5] = MMX_OP2(pmaddwd),
2744
    [0xf6] = MMX_OP2(psadbw),
2745
    [0xf7] = MMX_OP2(maskmov),
2746
    [0xf8] = MMX_OP2(psubb),
2747
    [0xf9] = MMX_OP2(psubw),
2748
    [0xfa] = MMX_OP2(psubl),
2749
    [0xfb] = MMX_OP2(psubq),
2750
    [0xfc] = MMX_OP2(paddb),
2751
    [0xfd] = MMX_OP2(paddw),
2752
    [0xfe] = MMX_OP2(paddl),
2753
};
2754

    
2755
static void *sse_op_table2[3 * 8][2] = {
2756
    [0 + 2] = MMX_OP2(psrlw),
2757
    [0 + 4] = MMX_OP2(psraw),
2758
    [0 + 6] = MMX_OP2(psllw),
2759
    [8 + 2] = MMX_OP2(psrld),
2760
    [8 + 4] = MMX_OP2(psrad),
2761
    [8 + 6] = MMX_OP2(pslld),
2762
    [16 + 2] = MMX_OP2(psrlq),
2763
    [16 + 3] = { NULL, helper_psrldq_xmm },
2764
    [16 + 6] = MMX_OP2(psllq),
2765
    [16 + 7] = { NULL, helper_pslldq_xmm },
2766
};
2767

    
2768
static void *sse_op_table3[4 * 3] = {
2769
    helper_cvtsi2ss,
2770
    helper_cvtsi2sd,
2771
    X86_64_ONLY(helper_cvtsq2ss),
2772
    X86_64_ONLY(helper_cvtsq2sd),
2773

    
2774
    helper_cvttss2si,
2775
    helper_cvttsd2si,
2776
    X86_64_ONLY(helper_cvttss2sq),
2777
    X86_64_ONLY(helper_cvttsd2sq),
2778

    
2779
    helper_cvtss2si,
2780
    helper_cvtsd2si,
2781
    X86_64_ONLY(helper_cvtss2sq),
2782
    X86_64_ONLY(helper_cvtsd2sq),
2783
};
2784

    
2785
static void *sse_op_table4[8][4] = {
2786
    SSE_FOP(cmpeq),
2787
    SSE_FOP(cmplt),
2788
    SSE_FOP(cmple),
2789
    SSE_FOP(cmpunord),
2790
    SSE_FOP(cmpneq),
2791
    SSE_FOP(cmpnlt),
2792
    SSE_FOP(cmpnle),
2793
    SSE_FOP(cmpord),
2794
};
2795

    
2796
static void *sse_op_table5[256] = {
2797
    [0x0c] = helper_pi2fw,
2798
    [0x0d] = helper_pi2fd,
2799
    [0x1c] = helper_pf2iw,
2800
    [0x1d] = helper_pf2id,
2801
    [0x8a] = helper_pfnacc,
2802
    [0x8e] = helper_pfpnacc,
2803
    [0x90] = helper_pfcmpge,
2804
    [0x94] = helper_pfmin,
2805
    [0x96] = helper_pfrcp,
2806
    [0x97] = helper_pfrsqrt,
2807
    [0x9a] = helper_pfsub,
2808
    [0x9e] = helper_pfadd,
2809
    [0xa0] = helper_pfcmpgt,
2810
    [0xa4] = helper_pfmax,
2811
    [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2812
    [0xa7] = helper_movq, /* pfrsqit1 */
2813
    [0xaa] = helper_pfsubr,
2814
    [0xae] = helper_pfacc,
2815
    [0xb0] = helper_pfcmpeq,
2816
    [0xb4] = helper_pfmul,
2817
    [0xb6] = helper_movq, /* pfrcpit2 */
2818
    [0xb7] = helper_pmulhrw_mmx,
2819
    [0xbb] = helper_pswapd,
2820
    [0xbf] = helper_pavgb_mmx /* pavgusb */
2821
};
2822

    
2823
static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2824
{
2825
    int b1, op1_offset, op2_offset, is_xmm, val, ot;
2826
    int modrm, mod, rm, reg, reg_addr, offset_addr;
2827
    void *sse_op2;
2828

    
2829
    b &= 0xff;
2830
    if (s->prefix & PREFIX_DATA)
2831
        b1 = 1;
2832
    else if (s->prefix & PREFIX_REPZ)
2833
        b1 = 2;
2834
    else if (s->prefix & PREFIX_REPNZ)
2835
        b1 = 3;
2836
    else
2837
        b1 = 0;
2838
    sse_op2 = sse_op_table1[b][b1];
2839
    if (!sse_op2)
2840
        goto illegal_op;
2841
    if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2842
        is_xmm = 1;
2843
    } else {
2844
        if (b1 == 0) {
2845
            /* MMX case */
2846
            is_xmm = 0;
2847
        } else {
2848
            is_xmm = 1;
2849
        }
2850
    }
2851
    /* simple MMX/SSE operation */
2852
    if (s->flags & HF_TS_MASK) {
2853
        gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2854
        return;
2855
    }
2856
    if (s->flags & HF_EM_MASK) {
2857
    illegal_op:
2858
        gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2859
        return;
2860
    }
2861
    if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2862
        goto illegal_op;
2863
    if (b == 0x0e) {
2864
        if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
2865
            goto illegal_op;
2866
        /* femms */
2867
        tcg_gen_helper_0_0(helper_emms);
2868
        return;
2869
    }
2870
    if (b == 0x77) {
2871
        /* emms */
2872
        tcg_gen_helper_0_0(helper_emms);
2873
        return;
2874
    }
2875
    /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2876
       the static cpu state) */
2877
    if (!is_xmm) {
2878
        tcg_gen_helper_0_0(helper_enter_mmx);
2879
    }
2880

    
2881
    modrm = ldub_code(s->pc++);
2882
    reg = ((modrm >> 3) & 7);
2883
    if (is_xmm)
2884
        reg |= rex_r;
2885
    mod = (modrm >> 6) & 3;
2886
    if (sse_op2 == SSE_SPECIAL) {
2887
        b |= (b1 << 8);
2888
        switch(b) {
2889
        case 0x0e7: /* movntq */
2890
            if (mod == 3)
2891
                goto illegal_op;
2892
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2893
            gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2894
            break;
2895
        case 0x1e7: /* movntdq */
2896
        case 0x02b: /* movntps */
2897
        case 0x12b: /* movntps */
2898
        case 0x3f0: /* lddqu */
2899
            if (mod == 3)
2900
                goto illegal_op;
2901
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2902
            gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2903
            break;
2904
        case 0x6e: /* movd mm, ea */
2905
#ifdef TARGET_X86_64
2906
            if (s->dflag == 2) {
2907
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2908
                tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
2909
            } else
2910
#endif
2911
            {
2912
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2913
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
2914
                                 offsetof(CPUX86State,fpregs[reg].mmx));
2915
                tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
2916
            }
2917
            break;
2918
        case 0x16e: /* movd xmm, ea */
2919
#ifdef TARGET_X86_64
2920
            if (s->dflag == 2) {
2921
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2922
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
2923
                                 offsetof(CPUX86State,xmm_regs[reg]));
2924
                tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
2925
            } else
2926
#endif
2927
            {
2928
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2929
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
2930
                                 offsetof(CPUX86State,xmm_regs[reg]));
2931
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2932
                tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
2933
            }
2934
            break;
2935
        case 0x6f: /* movq mm, ea */
2936
            if (mod != 3) {
2937
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2938
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2939
            } else {
2940
                rm = (modrm & 7);
2941
                tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
2942
                               offsetof(CPUX86State,fpregs[rm].mmx));
2943
                tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
2944
                               offsetof(CPUX86State,fpregs[reg].mmx));
2945
            }
2946
            break;
2947
        case 0x010: /* movups */
2948
        case 0x110: /* movupd */
2949
        case 0x028: /* movaps */
2950
        case 0x128: /* movapd */
2951
        case 0x16f: /* movdqa xmm, ea */
2952
        case 0x26f: /* movdqu xmm, ea */
2953
            if (mod != 3) {
2954
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2955
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2956
            } else {
2957
                rm = (modrm & 7) | REX_B(s);
2958
                gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2959
                            offsetof(CPUX86State,xmm_regs[rm]));
2960
            }
2961
            break;
2962
        case 0x210: /* movss xmm, ea */
2963
            if (mod != 3) {
2964
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2965
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
2966
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2967
                gen_op_movl_T0_0();
2968
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2969
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2970
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2971
            } else {
2972
                rm = (modrm & 7) | REX_B(s);
2973
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2974
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2975
            }
2976
            break;
2977
        case 0x310: /* movsd xmm, ea */
2978
            if (mod != 3) {
2979
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2980
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2981
                gen_op_movl_T0_0();
2982
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2983
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2984
            } else {
2985
                rm = (modrm & 7) | REX_B(s);
2986
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2987
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2988
            }
2989
            break;
2990
        case 0x012: /* movlps */
2991
        case 0x112: /* movlpd */
2992
            if (mod != 3) {
2993
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2994
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2995
            } else {
2996
                /* movhlps */
2997
                rm = (modrm & 7) | REX_B(s);
2998
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2999
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3000
            }
3001
            break;
3002
        case 0x212: /* movsldup */
3003
            if (mod != 3) {
3004
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3005
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3006
            } else {
3007
                rm = (modrm & 7) | REX_B(s);
3008
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3009
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3010
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3011
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3012
            }
3013
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3014
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3015
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3016
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3017
            break;
3018
        case 0x312: /* movddup */
3019
            if (mod != 3) {
3020
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3021
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3022
            } else {
3023
                rm = (modrm & 7) | REX_B(s);
3024
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3025
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3026
            }
3027
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3028
                        offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3029
            break;
3030
        case 0x016: /* movhps */
3031
        case 0x116: /* movhpd */
3032
            if (mod != 3) {
3033
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3034
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3035
            } else {
3036
                /* movlhps */
3037
                rm = (modrm & 7) | REX_B(s);
3038
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3039
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3040
            }
3041
            break;
3042
        case 0x216: /* movshdup */
3043
            if (mod != 3) {
3044
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3045
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3046
            } else {
3047
                rm = (modrm & 7) | REX_B(s);
3048
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3049
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3050
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3051
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3052
            }
3053
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3054
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3055
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3056
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3057
            break;
3058
        case 0x7e: /* movd ea, mm */
3059
#ifdef TARGET_X86_64
3060
            if (s->dflag == 2) {
3061
                tcg_gen_ld_i64(cpu_T[0], cpu_env, 
3062
                               offsetof(CPUX86State,fpregs[reg].mmx));
3063
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3064
            } else
3065
#endif
3066
            {
3067
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
3068
                                 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3069
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3070
            }
3071
            break;
3072
        case 0x17e: /* movd ea, xmm */
3073
#ifdef TARGET_X86_64
3074
            if (s->dflag == 2) {
3075
                tcg_gen_ld_i64(cpu_T[0], cpu_env, 
3076
                               offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3077
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3078
            } else
3079
#endif
3080
            {
3081
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
3082
                                 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3083
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3084
            }
3085
            break;
3086
        case 0x27e: /* movq xmm, ea */
3087
            if (mod != 3) {
3088
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3089
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3090
            } else {
3091
                rm = (modrm & 7) | REX_B(s);
3092
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3093
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3094
            }
3095
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3096
            break;
3097
        case 0x7f: /* movq ea, mm */
3098
            if (mod != 3) {
3099
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3100
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3101
            } else {
3102
                rm = (modrm & 7);
3103
                gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3104
                            offsetof(CPUX86State,fpregs[reg].mmx));
3105
            }
3106
            break;
3107
        case 0x011: /* movups */
3108
        case 0x111: /* movupd */
3109
        case 0x029: /* movaps */
3110
        case 0x129: /* movapd */
3111
        case 0x17f: /* movdqa ea, xmm */
3112
        case 0x27f: /* movdqu ea, xmm */
3113
            if (mod != 3) {
3114
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3115
                gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3116
            } else {
3117
                rm = (modrm & 7) | REX_B(s);
3118
                gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3119
                            offsetof(CPUX86State,xmm_regs[reg]));
3120
            }
3121
            break;
3122
        case 0x211: /* movss ea, xmm */
3123
            if (mod != 3) {
3124
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3125
                gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3126
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
3127
            } else {
3128
                rm = (modrm & 7) | REX_B(s);
3129
                gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3130
                            offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3131
            }
3132
            break;
3133
        case 0x311: /* movsd ea, xmm */
3134
            if (mod != 3) {
3135
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3136
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3137
            } else {
3138
                rm = (modrm & 7) | REX_B(s);
3139
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3140
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3141
            }
3142
            break;
3143
        case 0x013: /* movlps */
3144
        case 0x113: /* movlpd */
3145
            if (mod != 3) {
3146
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3147
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3148
            } else {
3149
                goto illegal_op;
3150
            }
3151
            break;
3152
        case 0x017: /* movhps */
3153
        case 0x117: /* movhpd */
3154
            if (mod != 3) {
3155
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3156
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3157
            } else {
3158
                goto illegal_op;
3159
            }
3160
            break;
3161
        case 0x71: /* shift mm, im */
3162
        case 0x72:
3163
        case 0x73:
3164
        case 0x171: /* shift xmm, im */
3165
        case 0x172:
3166
        case 0x173:
3167
            val = ldub_code(s->pc++);
3168
            if (is_xmm) {
3169
                gen_op_movl_T0_im(val);
3170
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3171
                gen_op_movl_T0_0();
3172
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3173
                op1_offset = offsetof(CPUX86State,xmm_t0);
3174
            } else {
3175
                gen_op_movl_T0_im(val);
3176
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3177
                gen_op_movl_T0_0();
3178
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3179
                op1_offset = offsetof(CPUX86State,mmx_t0);
3180
            }
3181
            sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3182
            if (!sse_op2)
3183
                goto illegal_op;
3184
            if (is_xmm) {
3185
                rm = (modrm & 7) | REX_B(s);
3186
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3187
            } else {
3188
                rm = (modrm & 7);
3189
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3190
            }
3191
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3192
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3193
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3194
            break;
3195
        case 0x050: /* movmskps */
3196
            rm = (modrm & 7) | REX_B(s);
3197
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3198
                             offsetof(CPUX86State,xmm_regs[rm]));
3199
            tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3200
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3201
            gen_op_mov_reg_T0(OT_LONG, reg);
3202
            break;
3203
        case 0x150: /* movmskpd */
3204
            rm = (modrm & 7) | REX_B(s);
3205
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3206
                             offsetof(CPUX86State,xmm_regs[rm]));
3207
            tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3208
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3209
            gen_op_mov_reg_T0(OT_LONG, reg);
3210
            break;
3211
        case 0x02a: /* cvtpi2ps */
3212
        case 0x12a: /* cvtpi2pd */
3213
            tcg_gen_helper_0_0(helper_enter_mmx);
3214
            if (mod != 3) {
3215
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3216
                op2_offset = offsetof(CPUX86State,mmx_t0);
3217
                gen_ldq_env_A0(s->mem_index, op2_offset);
3218
            } else {
3219
                rm = (modrm & 7);
3220
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3221
            }
3222
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3223
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3224
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3225
            switch(b >> 8) {
3226
            case 0x0:
3227
                tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3228
                break;
3229
            default:
3230
            case 0x1:
3231
                tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3232
                break;
3233
            }
3234
            break;
3235
        case 0x22a: /* cvtsi2ss */
3236
        case 0x32a: /* cvtsi2sd */
3237
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3238
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3239
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3240
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3241
            sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3242
            tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3243
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3244
            break;
3245
        case 0x02c: /* cvttps2pi */
3246
        case 0x12c: /* cvttpd2pi */
3247
        case 0x02d: /* cvtps2pi */
3248
        case 0x12d: /* cvtpd2pi */
3249
            tcg_gen_helper_0_0(helper_enter_mmx);
3250
            if (mod != 3) {
3251
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3252
                op2_offset = offsetof(CPUX86State,xmm_t0);
3253
                gen_ldo_env_A0(s->mem_index, op2_offset);
3254
            } else {
3255
                rm = (modrm & 7) | REX_B(s);
3256
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3257
            }
3258
            op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3259
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3260
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3261
            switch(b) {
3262
            case 0x02c:
3263
                tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3264
                break;
3265
            case 0x12c:
3266
                tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3267
                break;
3268
            case 0x02d:
3269
                tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3270
                break;
3271
            case 0x12d:
3272
                tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3273
                break;
3274
            }
3275
            break;
3276
        case 0x22c: /* cvttss2si */
3277
        case 0x32c: /* cvttsd2si */
3278
        case 0x22d: /* cvtss2si */
3279
        case 0x32d: /* cvtsd2si */
3280
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3281
            if (mod != 3) {
3282
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3283
                if ((b >> 8) & 1) {
3284
                    gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3285
                } else {
3286
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3287
                    gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3288
                }
3289
                op2_offset = offsetof(CPUX86State,xmm_t0);
3290
            } else {
3291
                rm = (modrm & 7) | REX_B(s);
3292
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3293
            }
3294
            sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3295
                                    (b & 1) * 4];
3296
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3297
            if (ot == OT_LONG) {
3298
                tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
3299
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3300
            } else {
3301
                tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3302
            }
3303
            gen_op_mov_reg_T0(ot, reg);
3304
            break;
3305
        case 0xc4: /* pinsrw */
3306
        case 0x1c4:
3307
            s->rip_offset = 1;
3308
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3309
            val = ldub_code(s->pc++);
3310
            if (b1) {
3311
                val &= 7;
3312
                tcg_gen_st16_tl(cpu_T[0], cpu_env,
3313
                                offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3314
            } else {
3315
                val &= 3;
3316
                tcg_gen_st16_tl(cpu_T[0], cpu_env,
3317
                                offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3318
            }
3319
            break;
3320
        case 0xc5: /* pextrw */
3321
        case 0x1c5:
3322
            if (mod != 3)
3323
                goto illegal_op;
3324
            val = ldub_code(s->pc++);
3325
            if (b1) {
3326
                val &= 7;
3327
                rm = (modrm & 7) | REX_B(s);
3328
                tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3329
                                 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3330
            } else {
3331
                val &= 3;
3332
                rm = (modrm & 7);
3333
                tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3334
                                offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3335
            }
3336
            reg = ((modrm >> 3) & 7) | rex_r;
3337
            gen_op_mov_reg_T0(OT_LONG, reg);
3338
            break;
3339
        case 0x1d6: /* movq ea, xmm */
3340
            if (mod != 3) {
3341
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3342
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3343
            } else {
3344
                rm = (modrm & 7) | REX_B(s);
3345
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3346
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3347
                gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3348
            }
3349
            break;
3350
        case 0x2d6: /* movq2dq */
3351
            tcg_gen_helper_0_0(helper_enter_mmx);
3352
            rm = (modrm & 7);
3353
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3354
                        offsetof(CPUX86State,fpregs[rm].mmx));
3355
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3356
            break;
3357
        case 0x3d6: /* movdq2q */
3358
            tcg_gen_helper_0_0(helper_enter_mmx);
3359
            rm = (modrm & 7) | REX_B(s);
3360
            gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3361
                        offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3362
            break;
3363
        case 0xd7: /* pmovmskb */
3364
        case 0x1d7:
3365
            if (mod != 3)
3366
                goto illegal_op;
3367
            if (b1) {
3368
                rm = (modrm & 7) | REX_B(s);
3369
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3370
                tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
3371
            } else {
3372
                rm = (modrm & 7);
3373
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3374
                tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
3375
            }
3376
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3377
            reg = ((modrm >> 3) & 7) | rex_r;
3378
            gen_op_mov_reg_T0(OT_LONG, reg);
3379
            break;
3380
        default:
3381
            goto illegal_op;
3382
        }
3383
    } else {
3384
        /* generic MMX or SSE operation */
3385
        switch(b) {
3386
        case 0x70: /* pshufx insn */
3387
        case 0xc6: /* pshufx insn */
3388
        case 0xc2: /* compare insns */
3389
            s->rip_offset = 1;
3390
            break;
3391
        default:
3392
            break;
3393
        }
3394
        if (is_xmm) {
3395
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3396
            if (mod != 3) {
3397
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3398
                op2_offset = offsetof(CPUX86State,xmm_t0);
3399
                if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3400
                                b == 0xc2)) {
3401
                    /* specific case for SSE single instructions */
3402
                    if (b1 == 2) {
3403
                        /* 32 bit access */
3404
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3405
                        gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3406
                    } else {
3407
                        /* 64 bit access */
3408
                        gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3409
                    }
3410
                } else {
3411
                    gen_ldo_env_A0(s->mem_index, op2_offset);
3412
                }
3413
            } else {
3414
                rm = (modrm & 7) | REX_B(s);
3415
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3416
            }
3417
        } else {
3418
            op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3419
            if (mod != 3) {
3420
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3421
                op2_offset = offsetof(CPUX86State,mmx_t0);
3422
                gen_ldq_env_A0(s->mem_index, op2_offset);
3423
            } else {
3424
                rm = (modrm & 7);
3425
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3426
            }
3427
        }
3428
        switch(b) {
3429
        case 0x0f: /* 3DNow! data insns */
3430
            if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3431
                goto illegal_op;
3432
            val = ldub_code(s->pc++);
3433
            sse_op2 = sse_op_table5[val];
3434
            if (!sse_op2)
3435
                goto illegal_op;
3436
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3437
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3438
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3439
            break;
3440
        case 0x70: /* pshufx insn */
3441
        case 0xc6: /* pshufx insn */
3442
            val = ldub_code(s->pc++);
3443
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3444
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3445
            tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3446
            break;
3447
        case 0xc2:
3448
            /* compare insns */
3449
            val = ldub_code(s->pc++);
3450
            if (val >= 8)
3451
                goto illegal_op;
3452
            sse_op2 = sse_op_table4[val][b1];
3453
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3454
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3455
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3456
            break;
3457
        case 0xf7:
3458
            /* maskmov : we must prepare A0 */
3459
            if (mod != 3)
3460
                goto illegal_op;
3461
#ifdef TARGET_X86_64
3462
            if (s->aflag == 2) {
3463
                gen_op_movq_A0_reg(R_EDI);
3464
            } else
3465
#endif
3466
            {
3467
                gen_op_movl_A0_reg(R_EDI);
3468
                if (s->aflag == 0)
3469
                    gen_op_andl_A0_ffff();
3470
            }
3471
            gen_add_A0_ds_seg(s);
3472

    
3473
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3474
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3475
            tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
3476
            break;
3477
        default:
3478
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3479
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3480
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3481
            break;
3482
        }
3483
        if (b == 0x2e || b == 0x2f) {
3484
            s->cc_op = CC_OP_EFLAGS;
3485
        }
3486
    }
3487
}
3488

    
3489
/* convert one instruction. s->is_jmp is set if the translation must
3490
   be stopped. Return the next pc value */
3491
static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3492
{
3493
    int b, prefixes, aflag, dflag;
3494
    int shift, ot;
3495
    int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3496
    target_ulong next_eip, tval;
3497
    int rex_w, rex_r;
3498

    
3499
    s->pc = pc_start;
3500
    prefixes = 0;
3501
    aflag = s->code32;
3502
    dflag = s->code32;
3503
    s->override = -1;
3504
    rex_w = -1;
3505
    rex_r = 0;
3506
#ifdef TARGET_X86_64
3507
    s->rex_x = 0;
3508
    s->rex_b = 0;
3509
    x86_64_hregs = 0;
3510
#endif
3511
    s->rip_offset = 0; /* for relative ip address */
3512
 next_byte:
3513
    b = ldub_code(s->pc);
3514
    s->pc++;
3515
    /* check prefixes */
3516
#ifdef TARGET_X86_64
3517
    if (CODE64(s)) {
3518
        switch (b) {
3519
        case 0xf3:
3520
            prefixes |= PREFIX_REPZ;
3521
            goto next_byte;
3522
        case 0xf2:
3523
            prefixes |= PREFIX_REPNZ;
3524
            goto next_byte;
3525
        case 0xf0:
3526
            prefixes |= PREFIX_LOCK;
3527
            goto next_byte;
3528
        case 0x2e:
3529
            s->override = R_CS;
3530
            goto next_byte;
3531
        case 0x36:
3532
            s->override = R_SS;
3533
            goto next_byte;
3534
        case 0x3e:
3535
            s->override = R_DS;
3536
            goto next_byte;
3537
        case 0x26:
3538
            s->override = R_ES;
3539
            goto next_byte;
3540
        case 0x64:
3541
            s->override = R_FS;
3542
            goto next_byte;
3543
        case 0x65:
3544
            s->override = R_GS;
3545
            goto next_byte;
3546
        case 0x66:
3547
            prefixes |= PREFIX_DATA;
3548
            goto next_byte;
3549
        case 0x67:
3550
            prefixes |= PREFIX_ADR;
3551
            goto next_byte;
3552
        case 0x40 ... 0x4f:
3553
            /* REX prefix */
3554
            rex_w = (b >> 3) & 1;
3555
            rex_r = (b & 0x4) << 1;
3556
            s->rex_x = (b & 0x2) << 2;
3557
            REX_B(s) = (b & 0x1) << 3;
3558
            x86_64_hregs = 1; /* select uniform byte register addressing */
3559
            goto next_byte;
3560
        }
3561
        if (rex_w == 1) {
3562
            /* 0x66 is ignored if rex.w is set */
3563
            dflag = 2;
3564
        } else {
3565
            if (prefixes & PREFIX_DATA)
3566
                dflag ^= 1;
3567
        }
3568
        if (!(prefixes & PREFIX_ADR))
3569
            aflag = 2;
3570
    } else
3571
#endif
3572
    {
3573
        switch (b) {
3574
        case 0xf3:
3575
            prefixes |= PREFIX_REPZ;
3576
            goto next_byte;
3577
        case 0xf2:
3578
            prefixes |= PREFIX_REPNZ;
3579
            goto next_byte;
3580
        case 0xf0:
3581
            prefixes |= PREFIX_LOCK;
3582
            goto next_byte;
3583
        case 0x2e:
3584
            s->override = R_CS;
3585
            goto next_byte;
3586
        case 0x36:
3587
            s->override = R_SS;
3588
            goto next_byte;
3589
        case 0x3e:
3590
            s->override = R_DS;
3591
            goto next_byte;
3592
        case 0x26:
3593
            s->override = R_ES;
3594
            goto next_byte;
3595
        case 0x64:
3596
            s->override = R_FS;
3597
            goto next_byte;
3598
        case 0x65:
3599
            s->override = R_GS;
3600
            goto next_byte;
3601
        case 0x66:
3602
            prefixes |= PREFIX_DATA;
3603
            goto next_byte;
3604
        case 0x67:
3605
            prefixes |= PREFIX_ADR;
3606
            goto next_byte;
3607
        }
3608
        if (prefixes & PREFIX_DATA)
3609
            dflag ^= 1;
3610
        if (prefixes & PREFIX_ADR)
3611
            aflag ^= 1;
3612
    }
3613

    
3614
    s->prefix = prefixes;
3615
    s->aflag = aflag;
3616
    s->dflag = dflag;
3617

    
3618
    /* lock generation */
3619
    if (prefixes & PREFIX_LOCK)
3620
        tcg_gen_helper_0_0(helper_lock);
3621

    
3622
    /* now check op code */
3623
 reswitch:
3624
    switch(b) {
3625
    case 0x0f:
3626
        /**************************/
3627
        /* extended op code */
3628
        b = ldub_code(s->pc++) | 0x100;
3629
        goto reswitch;
3630

    
3631
        /**************************/
3632
        /* arith & logic */
3633
    case 0x00 ... 0x05:
3634
    case 0x08 ... 0x0d:
3635
    case 0x10 ... 0x15:
3636
    case 0x18 ... 0x1d:
3637
    case 0x20 ... 0x25:
3638
    case 0x28 ... 0x2d:
3639
    case 0x30 ... 0x35:
3640
    case 0x38 ... 0x3d:
3641
        {
3642
            int op, f, val;
3643
            op = (b >> 3) & 7;
3644
            f = (b >> 1) & 3;
3645

    
3646
            if ((b & 1) == 0)
3647
                ot = OT_BYTE;
3648
            else
3649
                ot = dflag + OT_WORD;
3650

    
3651
            switch(f) {
3652
            case 0: /* OP Ev, Gv */
3653
                modrm = ldub_code(s->pc++);
3654
                reg = ((modrm >> 3) & 7) | rex_r;
3655
                mod = (modrm >> 6) & 3;
3656
                rm = (modrm & 7) | REX_B(s);
3657
                if (mod != 3) {
3658
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3659
                    opreg = OR_TMP0;
3660
                } else if (op == OP_XORL && rm == reg) {
3661
                xor_zero:
3662
                    /* xor reg, reg optimisation */
3663
                    gen_op_movl_T0_0();
3664
                    s->cc_op = CC_OP_LOGICB + ot;
3665
                    gen_op_mov_reg_T0(ot, reg);
3666
                    gen_op_update1_cc();
3667
                    break;
3668
                } else {
3669
                    opreg = rm;
3670
                }
3671
                gen_op_mov_TN_reg(ot, 1, reg);
3672
                gen_op(s, op, ot, opreg);
3673
                break;
3674
            case 1: /* OP Gv, Ev */
3675
                modrm = ldub_code(s->pc++);
3676
                mod = (modrm >> 6) & 3;
3677
                reg = ((modrm >> 3) & 7) | rex_r;
3678
                rm = (modrm & 7) | REX_B(s);
3679
                if (mod != 3) {
3680
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3681
                    gen_op_ld_T1_A0(ot + s->mem_index);
3682
                } else if (op == OP_XORL && rm == reg) {
3683
                    goto xor_zero;
3684
                } else {
3685
                    gen_op_mov_TN_reg(ot, 1, rm);
3686
                }
3687
                gen_op(s, op, ot, reg);
3688
                break;
3689
            case 2: /* OP A, Iv */
3690
                val = insn_get(s, ot);
3691
                gen_op_movl_T1_im(val);
3692
                gen_op(s, op, ot, OR_EAX);
3693
                break;
3694
            }
3695
        }
3696
        break;
3697

    
3698
    case 0x80: /* GRP1 */
3699
    case 0x81:
3700
    case 0x82:
3701
    case 0x83:
3702
        {
3703
            int val;
3704

    
3705
            if ((b & 1) == 0)
3706
                ot = OT_BYTE;
3707
            else
3708
                ot = dflag + OT_WORD;
3709

    
3710
            modrm = ldub_code(s->pc++);
3711
            mod = (modrm >> 6) & 3;
3712
            rm = (modrm & 7) | REX_B(s);
3713
            op = (modrm >> 3) & 7;
3714

    
3715
            if (mod != 3) {
3716
                if (b == 0x83)
3717
                    s->rip_offset = 1;
3718
                else
3719
                    s->rip_offset = insn_const_size(ot);
3720
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3721
                opreg = OR_TMP0;
3722
            } else {
3723
                opreg = rm;
3724
            }
3725

    
3726
            switch(b) {
3727
            default:
3728
            case 0x80:
3729
            case 0x81:
3730
            case 0x82:
3731
                val = insn_get(s, ot);
3732
                break;
3733
            case 0x83:
3734
                val = (int8_t)insn_get(s, OT_BYTE);
3735
                break;
3736
            }
3737
            gen_op_movl_T1_im(val);
3738
            gen_op(s, op, ot, opreg);
3739
        }
3740
        break;
3741

    
3742
        /**************************/
3743
        /* inc, dec, and other misc arith */
3744
    case 0x40 ... 0x47: /* inc Gv */
3745
        ot = dflag ? OT_LONG : OT_WORD;
3746
        gen_inc(s, ot, OR_EAX + (b & 7), 1);
3747
        break;
3748
    case 0x48 ... 0x4f: /* dec Gv */
3749
        ot = dflag ? OT_LONG : OT_WORD;
3750
        gen_inc(s, ot, OR_EAX + (b & 7), -1);
3751
        break;
3752
    case 0xf6: /* GRP3 */
3753
    case 0xf7:
3754
        if ((b & 1) == 0)
3755
            ot = OT_BYTE;
3756
        else
3757
            ot = dflag + OT_WORD;
3758

    
3759
        modrm = ldub_code(s->pc++);
3760
        mod = (modrm >> 6) & 3;
3761
        rm = (modrm & 7) | REX_B(s);
3762
        op = (modrm >> 3) & 7;
3763
        if (mod != 3) {
3764
            if (op == 0)
3765
                s->rip_offset = insn_const_size(ot);
3766
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3767
            gen_op_ld_T0_A0(ot + s->mem_index);
3768
        } else {
3769
            gen_op_mov_TN_reg(ot, 0, rm);
3770
        }
3771

    
3772
        switch(op) {
3773
        case 0: /* test */
3774
            val = insn_get(s, ot);
3775
            gen_op_movl_T1_im(val);
3776
            gen_op_testl_T0_T1_cc();
3777
            s->cc_op = CC_OP_LOGICB + ot;
3778
            break;
3779
        case 2: /* not */
3780
            tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
3781
            if (mod != 3) {
3782
                gen_op_st_T0_A0(ot + s->mem_index);
3783
            } else {
3784
                gen_op_mov_reg_T0(ot, rm);
3785
            }
3786
            break;
3787
        case 3: /* neg */
3788
            tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
3789
            if (mod != 3) {
3790
                gen_op_st_T0_A0(ot + s->mem_index);
3791
            } else {
3792
                gen_op_mov_reg_T0(ot, rm);
3793
            }
3794
            gen_op_update_neg_cc();
3795
            s->cc_op = CC_OP_SUBB + ot;
3796
            break;
3797
        case 4: /* mul */
3798
            switch(ot) {
3799
            case OT_BYTE:
3800
                gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
3801
                tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
3802
                tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
3803
                /* XXX: use 32 bit mul which could be faster */
3804
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3805
                gen_op_mov_reg_T0(OT_WORD, R_EAX);
3806
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3807
                tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
3808
                s->cc_op = CC_OP_MULB;
3809
                break;
3810
            case OT_WORD:
3811
                gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
3812
                tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
3813
                tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
3814
                /* XXX: use 32 bit mul which could be faster */
3815
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3816
                gen_op_mov_reg_T0(OT_WORD, R_EAX);
3817
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3818
                tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
3819
                gen_op_mov_reg_T0(OT_WORD, R_EDX);
3820
                tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3821
                s->cc_op = CC_OP_MULW;
3822
                break;
3823
            default:
3824
            case OT_LONG:
3825
#ifdef TARGET_X86_64
3826
                gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3827
                tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
3828
                tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
3829
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3830
                gen_op_mov_reg_T0(OT_LONG, R_EAX);
3831
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3832
                tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
3833
                gen_op_mov_reg_T0(OT_LONG, R_EDX);
3834
                tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3835
#else
3836
                {
3837
                    TCGv t0, t1;
3838
                    t0 = tcg_temp_new(TCG_TYPE_I64);
3839
                    t1 = tcg_temp_new(TCG_TYPE_I64);
3840
                    gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3841
                    tcg_gen_extu_i32_i64(t0, cpu_T[0]);
3842
                    tcg_gen_extu_i32_i64(t1, cpu_T[1]);
3843
                    tcg_gen_mul_i64(t0, t0, t1);
3844
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3845
                    gen_op_mov_reg_T0(OT_LONG, R_EAX);
3846
                    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3847
                    tcg_gen_shri_i64(t0, t0, 32);
3848
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3849
                    gen_op_mov_reg_T0(OT_LONG, R_EDX);
3850
                    tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3851
                }
3852
#endif
3853
                s->cc_op = CC_OP_MULL;
3854
                break;
3855
#ifdef TARGET_X86_64
3856
            case OT_QUAD:
3857
                tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
3858
                s->cc_op = CC_OP_MULQ;
3859
                break;
3860
#endif
3861
            }
3862
            break;
3863
        case 5: /* imul */
3864
            switch(ot) {
3865
            case OT_BYTE:
3866
                gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
3867
                tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
3868
                tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
3869
                /* XXX: use 32 bit mul which could be faster */
3870
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3871
                gen_op_mov_reg_T0(OT_WORD, R_EAX);
3872
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3873
                tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
3874
                tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3875
                s->cc_op = CC_OP_MULB;
3876
                break;
3877
            case OT_WORD:
3878
                gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
3879
                tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
3880
                tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
3881
                /* XXX: use 32 bit mul which could be faster */
3882
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3883
                gen_op_mov_reg_T0(OT_WORD, R_EAX);
3884
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3885
                tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
3886
                tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3887
                tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
3888
                gen_op_mov_reg_T0(OT_WORD, R_EDX);
3889
                s->cc_op = CC_OP_MULW;
3890
                break;
3891
            default:
3892
            case OT_LONG:
3893
#ifdef TARGET_X86_64
3894
                gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3895
                tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
3896
                tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
3897
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3898
                gen_op_mov_reg_T0(OT_LONG, R_EAX);
3899
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3900
                tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
3901
                tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3902
                tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
3903
                gen_op_mov_reg_T0(OT_LONG, R_EDX);
3904
#else
3905
                {
3906
                    TCGv t0, t1;
3907
                    t0 = tcg_temp_new(TCG_TYPE_I64);
3908
                    t1 = tcg_temp_new(TCG_TYPE_I64);
3909
                    gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3910
                    tcg_gen_ext_i32_i64(t0, cpu_T[0]);
3911
                    tcg_gen_ext_i32_i64(t1, cpu_T[1]);
3912
                    tcg_gen_mul_i64(t0, t0, t1);
3913
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3914
                    gen_op_mov_reg_T0(OT_LONG, R_EAX);
3915
                    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3916
                    tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
3917
                    tcg_gen_shri_i64(t0, t0, 32);
3918
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3919
                    gen_op_mov_reg_T0(OT_LONG, R_EDX);
3920
                    tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3921
                }
3922
#endif
3923
                s->cc_op = CC_OP_MULL;
3924
                break;
3925
#ifdef TARGET_X86_64
3926
            case OT_QUAD:
3927
                tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
3928
                s->cc_op = CC_OP_MULQ;
3929
                break;
3930
#endif
3931
            }
3932
            break;
3933
        case 6: /* div */
3934
            switch(ot) {
3935
            case OT_BYTE:
3936
                gen_jmp_im(pc_start - s->cs_base);
3937
                tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
3938
                break;
3939
            case OT_WORD:
3940
                gen_jmp_im(pc_start - s->cs_base);
3941
                tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
3942
                break;
3943
            default:
3944
            case OT_LONG:
3945
                gen_jmp_im(pc_start - s->cs_base);
3946
                tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
3947
                break;
3948
#ifdef TARGET_X86_64
3949
            case OT_QUAD:
3950
                gen_jmp_im(pc_start - s->cs_base);
3951
                tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
3952
                break;
3953
#endif
3954
            }
3955
            break;
3956
        case 7: /* idiv */
3957
            switch(ot) {
3958
            case OT_BYTE:
3959
                gen_jmp_im(pc_start - s->cs_base);
3960
                tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
3961
                break;
3962
            case OT_WORD:
3963
                gen_jmp_im(pc_start - s->cs_base);
3964
                tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
3965
                break;
3966
            default:
3967
            case OT_LONG:
3968
                gen_jmp_im(pc_start - s->cs_base);
3969
                tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
3970
                break;
3971
#ifdef TARGET_X86_64
3972
            case OT_QUAD:
3973
                gen_jmp_im(pc_start - s->cs_base);
3974
                tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
3975
                break;
3976
#endif
3977
            }
3978
            break;
3979
        default:
3980
            goto illegal_op;
3981
        }
3982
        break;
3983

    
3984
    case 0xfe: /* GRP4 */
3985
    case 0xff: /* GRP5 */
3986
        if ((b & 1) == 0)
3987
            ot = OT_BYTE;
3988
        else
3989
            ot = dflag + OT_WORD;
3990

    
3991
        modrm = ldub_code(s->pc++);
3992
        mod = (modrm >> 6) & 3;
3993
        rm = (modrm & 7) | REX_B(s);
3994
        op = (modrm >> 3) & 7;
3995
        if (op >= 2 && b == 0xfe) {
3996
            goto illegal_op;
3997
        }
3998
        if (CODE64(s)) {
3999
            if (op == 2 || op == 4) {
4000
                /* operand size for jumps is 64 bit */
4001
                ot = OT_QUAD;
4002
            } else if (op == 3 || op == 5) {
4003
                /* for call calls, the operand is 16 or 32 bit, even
4004
                   in long mode */
4005
                ot = dflag ? OT_LONG : OT_WORD;
4006
            } else if (op == 6) {
4007
                /* default push size is 64 bit */
4008
                ot = dflag ? OT_QUAD : OT_WORD;
4009
            }
4010
        }
4011
        if (mod != 3) {
4012
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4013
            if (op >= 2 && op != 3 && op != 5)
4014
                gen_op_ld_T0_A0(ot + s->mem_index);
4015
        } else {
4016
            gen_op_mov_TN_reg(ot, 0, rm);
4017
        }
4018

    
4019
        switch(op) {
4020
        case 0: /* inc Ev */
4021
            if (mod != 3)
4022
                opreg = OR_TMP0;
4023
            else
4024
                opreg = rm;
4025
            gen_inc(s, ot, opreg, 1);
4026
            break;
4027
        case 1: /* dec Ev */
4028
            if (mod != 3)
4029
                opreg = OR_TMP0;
4030
            else
4031
                opreg = rm;
4032
            gen_inc(s, ot, opreg, -1);
4033
            break;
4034
        case 2: /* call Ev */
4035
            /* XXX: optimize if memory (no 'and' is necessary) */
4036
            if (s->dflag == 0)
4037
                gen_op_andl_T0_ffff();
4038
            next_eip = s->pc - s->cs_base;
4039
            gen_movtl_T1_im(next_eip);
4040
            gen_push_T1(s);
4041
            gen_op_jmp_T0();
4042
            gen_eob(s);
4043
            break;
4044
        case 3: /* lcall Ev */
4045
            gen_op_ld_T1_A0(ot + s->mem_index);
4046
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4047
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4048
        do_lcall:
4049
            if (s->pe && !s->vm86) {
4050
                if (s->cc_op != CC_OP_DYNAMIC)
4051
                    gen_op_set_cc_op(s->cc_op);
4052
                gen_jmp_im(pc_start - s->cs_base);
4053
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4054
                tcg_gen_helper_0_4(helper_lcall_protected,
4055
                                   cpu_tmp2_i32, cpu_T[1],
4056
                                   tcg_const_i32(dflag), 
4057
                                   tcg_const_i32(s->pc - pc_start));
4058
            } else {
4059
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4060
                tcg_gen_helper_0_4(helper_lcall_real,
4061
                                   cpu_tmp2_i32, cpu_T[1],
4062
                                   tcg_const_i32(dflag), 
4063
                                   tcg_const_i32(s->pc - s->cs_base));
4064
            }
4065
            gen_eob(s);
4066
            break;
4067
        case 4: /* jmp Ev */
4068
            if (s->dflag == 0)
4069
                gen_op_andl_T0_ffff();
4070
            gen_op_jmp_T0();
4071
            gen_eob(s);
4072
            break;
4073
        case 5: /* ljmp Ev */
4074
            gen_op_ld_T1_A0(ot + s->mem_index);
4075
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4076
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4077
        do_ljmp:
4078
            if (s->pe && !s->vm86) {
4079
                if (s->cc_op != CC_OP_DYNAMIC)
4080
                    gen_op_set_cc_op(s->cc_op);
4081
                gen_jmp_im(pc_start - s->cs_base);
4082
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4083
                tcg_gen_helper_0_3(helper_ljmp_protected,
4084
                                   cpu_tmp2_i32,
4085
                                   cpu_T[1],
4086
                                   tcg_const_i32(s->pc - pc_start));
4087
            } else {
4088
                gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
4089
                gen_op_movl_T0_T1();
4090
                gen_op_jmp_T0();
4091
            }
4092
            gen_eob(s);
4093
            break;
4094
        case 6: /* push Ev */
4095
            gen_push_T0(s);
4096
            break;
4097
        default:
4098
            goto illegal_op;
4099
        }
4100
        break;
4101

    
4102
    case 0x84: /* test Ev, Gv */
4103
    case 0x85:
4104
        if ((b & 1) == 0)
4105
            ot = OT_BYTE;
4106
        else
4107
            ot = dflag + OT_WORD;
4108

    
4109
        modrm = ldub_code(s->pc++);
4110
        mod = (modrm >> 6) & 3;
4111
        rm = (modrm & 7) | REX_B(s);
4112
        reg = ((modrm >> 3) & 7) | rex_r;
4113

    
4114
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4115
        gen_op_mov_TN_reg(ot, 1, reg);
4116
        gen_op_testl_T0_T1_cc();
4117
        s->cc_op = CC_OP_LOGICB + ot;
4118
        break;
4119

    
4120
    case 0xa8: /* test eAX, Iv */
4121
    case 0xa9:
4122
        if ((b & 1) == 0)
4123
            ot = OT_BYTE;
4124
        else
4125
            ot = dflag + OT_WORD;
4126
        val = insn_get(s, ot);
4127

    
4128
        gen_op_mov_TN_reg(ot, 0, OR_EAX);
4129
        gen_op_movl_T1_im(val);
4130
        gen_op_testl_T0_T1_cc();
4131
        s->cc_op = CC_OP_LOGICB + ot;
4132
        break;
4133

    
4134
    case 0x98: /* CWDE/CBW */
4135
#ifdef TARGET_X86_64
4136
        if (dflag == 2) {
4137
            gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4138
            tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4139
            gen_op_mov_reg_T0(OT_QUAD, R_EAX);
4140
        } else
4141
#endif
4142
        if (dflag == 1) {
4143
            gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4144
            tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4145
            gen_op_mov_reg_T0(OT_LONG, R_EAX);
4146
        } else {
4147
            gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
4148
            tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4149
            gen_op_mov_reg_T0(OT_WORD, R_EAX);
4150
        }
4151
        break;
4152
    case 0x99: /* CDQ/CWD */
4153
#ifdef TARGET_X86_64
4154
        if (dflag == 2) {
4155
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4156
            tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
4157
            gen_op_mov_reg_T0(OT_QUAD, R_EDX);
4158
        } else
4159
#endif
4160
        if (dflag == 1) {
4161
            gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4162
            tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4163
            tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
4164
            gen_op_mov_reg_T0(OT_LONG, R_EDX);
4165
        } else {
4166
            gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4167
            tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4168
            tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
4169
            gen_op_mov_reg_T0(OT_WORD, R_EDX);
4170
        }
4171
        break;
4172
    case 0x1af: /* imul Gv, Ev */
4173
    case 0x69: /* imul Gv, Ev, I */
4174
    case 0x6b:
4175
        ot = dflag + OT_WORD;
4176
        modrm = ldub_code(s->pc++);
4177
        reg = ((modrm >> 3) & 7) | rex_r;
4178
        if (b == 0x69)
4179
            s->rip_offset = insn_const_size(ot);
4180
        else if (b == 0x6b)
4181
            s->rip_offset = 1;
4182
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4183
        if (b == 0x69) {
4184
            val = insn_get(s, ot);
4185
            gen_op_movl_T1_im(val);
4186
        } else if (b == 0x6b) {
4187
            val = (int8_t)insn_get(s, OT_BYTE);
4188
            gen_op_movl_T1_im(val);
4189
        } else {
4190
            gen_op_mov_TN_reg(ot, 1, reg);
4191
        }
4192

    
4193
#ifdef TARGET_X86_64
4194
        if (ot == OT_QUAD) {
4195
            tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
4196
        } else
4197
#endif
4198
        if (ot == OT_LONG) {
4199
#ifdef TARGET_X86_64
4200
                tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4201
                tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4202
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4203
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4204
                tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4205
                tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4206
#else
4207
                {
4208
                    TCGv t0, t1;
4209
                    t0 = tcg_temp_new(TCG_TYPE_I64);
4210
                    t1 = tcg_temp_new(TCG_TYPE_I64);
4211
                    tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4212
                    tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4213
                    tcg_gen_mul_i64(t0, t0, t1);
4214
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4215
                    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4216
                    tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4217
                    tcg_gen_shri_i64(t0, t0, 32);
4218
                    tcg_gen_trunc_i64_i32(cpu_T[1], t0);
4219
                    tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
4220
                }
4221
#endif
4222
        } else {
4223
            tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4224
            tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4225
            /* XXX: use 32 bit mul which could be faster */
4226
            tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4227
            tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4228
            tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4229
            tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4230
        }
4231
        gen_op_mov_reg_T0(ot, reg);
4232
        s->cc_op = CC_OP_MULB + ot;
4233
        break;
4234
    case 0x1c0:
4235
    case 0x1c1: /* xadd Ev, Gv */
4236
        if ((b & 1) == 0)
4237
            ot = OT_BYTE;
4238
        else
4239
            ot = dflag + OT_WORD;
4240
        modrm = ldub_code(s->pc++);
4241
        reg = ((modrm >> 3) & 7) | rex_r;
4242
        mod = (modrm >> 6) & 3;
4243
        if (mod == 3) {
4244
            rm = (modrm & 7) | REX_B(s);
4245
            gen_op_mov_TN_reg(ot, 0, reg);
4246
            gen_op_mov_TN_reg(ot, 1, rm);
4247
            gen_op_addl_T0_T1();
4248
            gen_op_mov_reg_T1(ot, reg);
4249
            gen_op_mov_reg_T0(ot, rm);
4250
        } else {
4251
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4252
            gen_op_mov_TN_reg(ot, 0, reg);
4253
            gen_op_ld_T1_A0(ot + s->mem_index);
4254
            gen_op_addl_T0_T1();
4255
            gen_op_st_T0_A0(ot + s->mem_index);
4256
            gen_op_mov_reg_T1(ot, reg);
4257
        }
4258
        gen_op_update2_cc();
4259
        s->cc_op = CC_OP_ADDB + ot;
4260
        break;
4261
    case 0x1b0:
4262
    case 0x1b1: /* cmpxchg Ev, Gv */
4263
        {
4264
            int label1;
4265

    
4266
            if ((b & 1) == 0)
4267
                ot = OT_BYTE;
4268
            else
4269
                ot = dflag + OT_WORD;
4270
            modrm = ldub_code(s->pc++);
4271
            reg = ((modrm >> 3) & 7) | rex_r;
4272
            mod = (modrm >> 6) & 3;
4273
            gen_op_mov_TN_reg(ot, 1, reg);
4274
            if (mod == 3) {
4275
                rm = (modrm & 7) | REX_B(s);
4276
                gen_op_mov_TN_reg(ot, 0, rm);
4277
            } else {
4278
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4279
                gen_op_ld_T0_A0(ot + s->mem_index);
4280
                rm = 0; /* avoid warning */
4281
            }
4282
            label1 = gen_new_label();
4283
            tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_EAX]));
4284
            tcg_gen_sub_tl(cpu_T3, cpu_T3, cpu_T[0]);
4285
            gen_extu(ot, cpu_T3);
4286
            tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
4287
            tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
4288
            gen_op_mov_reg_T0(ot, R_EAX);
4289
            gen_set_label(label1);
4290
            if (mod == 3) {
4291
                gen_op_mov_reg_T1(ot, rm);
4292
            } else {
4293
                gen_op_st_T1_A0(ot + s->mem_index);
4294
            }
4295
            tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4296
            tcg_gen_mov_tl(cpu_cc_dst, cpu_T3);
4297
            s->cc_op = CC_OP_SUBB + ot;
4298
        }
4299
        break;
4300
    case 0x1c7: /* cmpxchg8b */
4301
        modrm = ldub_code(s->pc++);
4302
        mod = (modrm >> 6) & 3;
4303
        if ((mod == 3) || ((modrm & 0x38) != 0x8))
4304
            goto illegal_op;
4305
        gen_jmp_im(pc_start - s->cs_base);
4306
        if (s->cc_op != CC_OP_DYNAMIC)
4307
            gen_op_set_cc_op(s->cc_op);
4308
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4309
        tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
4310
        s->cc_op = CC_OP_EFLAGS;
4311
        break;
4312

    
4313
        /**************************/
4314
        /* push/pop */
4315
    case 0x50 ... 0x57: /* push */
4316
        gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
4317
        gen_push_T0(s);
4318
        break;
4319
    case 0x58 ... 0x5f: /* pop */
4320
        if (CODE64(s)) {
4321
            ot = dflag ? OT_QUAD : OT_WORD;
4322
        } else {
4323
            ot = dflag + OT_WORD;
4324
        }
4325
        gen_pop_T0(s);
4326
        /* NOTE: order is important for pop %sp */
4327
        gen_pop_update(s);
4328
        gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
4329
        break;
4330
    case 0x60: /* pusha */
4331
        if (CODE64(s))
4332
            goto illegal_op;
4333
        gen_pusha(s);
4334
        break;
4335
    case 0x61: /* popa */
4336
        if (CODE64(s))
4337
            goto illegal_op;
4338
        gen_popa(s);
4339
        break;
4340
    case 0x68: /* push Iv */
4341
    case 0x6a:
4342
        if (CODE64(s)) {
4343
            ot = dflag ? OT_QUAD : OT_WORD;
4344
        } else {
4345
            ot = dflag + OT_WORD;
4346
        }
4347
        if (b == 0x68)
4348
            val = insn_get(s, ot);
4349
        else
4350
            val = (int8_t)insn_get(s, OT_BYTE);
4351
        gen_op_movl_T0_im(val);
4352
        gen_push_T0(s);
4353
        break;
4354
    case 0x8f: /* pop Ev */
4355
        if (CODE64(s)) {
4356
            ot = dflag ? OT_QUAD : OT_WORD;
4357
        } else {
4358
            ot = dflag + OT_WORD;
4359
        }
4360
        modrm = ldub_code(s->pc++);
4361
        mod = (modrm >> 6) & 3;
4362
        gen_pop_T0(s);
4363
        if (mod == 3) {
4364
            /* NOTE: order is important for pop %sp */
4365
            gen_pop_update(s);
4366
            rm = (modrm & 7) | REX_B(s);
4367
            gen_op_mov_reg_T0(ot, rm);
4368
        } else {
4369
            /* NOTE: order is important too for MMU exceptions */
4370
            s->popl_esp_hack = 1 << ot;
4371
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4372
            s->popl_esp_hack = 0;
4373
            gen_pop_update(s);
4374
        }
4375
        break;
4376
    case 0xc8: /* enter */
4377
        {
4378
            int level;
4379
            val = lduw_code(s->pc);
4380
            s->pc += 2;
4381
            level = ldub_code(s->pc++);
4382
            gen_enter(s, val, level);
4383
        }
4384
        break;
4385
    case 0xc9: /* leave */
4386
        /* XXX: exception not precise (ESP is updated before potential exception) */
4387
        if (CODE64(s)) {
4388
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4389
            gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4390
        } else if (s->ss32) {
4391
            gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4392
            gen_op_mov_reg_T0(OT_LONG, R_ESP);
4393
        } else {
4394
            gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4395
            gen_op_mov_reg_T0(OT_WORD, R_ESP);
4396
        }
4397
        gen_pop_T0(s);
4398
        if (CODE64(s)) {
4399
            ot = dflag ? OT_QUAD : OT_WORD;
4400
        } else {
4401
            ot = dflag + OT_WORD;
4402
        }
4403
        gen_op_mov_reg_T0(ot, R_EBP);
4404
        gen_pop_update(s);
4405
        break;
4406
    case 0x06: /* push es */
4407
    case 0x0e: /* push cs */
4408
    case 0x16: /* push ss */
4409
    case 0x1e: /* push ds */
4410
        if (CODE64(s))
4411
            goto illegal_op;
4412
        gen_op_movl_T0_seg(b >> 3);
4413
        gen_push_T0(s);
4414
        break;
4415
    case 0x1a0: /* push fs */
4416
    case 0x1a8: /* push gs */
4417
        gen_op_movl_T0_seg((b >> 3) & 7);
4418
        gen_push_T0(s);
4419
        break;
4420
    case 0x07: /* pop es */
4421
    case 0x17: /* pop ss */
4422
    case 0x1f: /* pop ds */
4423
        if (CODE64(s))
4424
            goto illegal_op;
4425
        reg = b >> 3;
4426
        gen_pop_T0(s);
4427
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4428
        gen_pop_update(s);
4429
        if (reg == R_SS) {
4430
            /* if reg == SS, inhibit interrupts/trace. */
4431
            /* If several instructions disable interrupts, only the
4432
               _first_ does it */
4433
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4434
                tcg_gen_helper_0_0(helper_set_inhibit_irq);
4435
            s->tf = 0;
4436
        }
4437
        if (s->is_jmp) {
4438
            gen_jmp_im(s->pc - s->cs_base);
4439
            gen_eob(s);
4440
        }
4441
        break;
4442
    case 0x1a1: /* pop fs */
4443
    case 0x1a9: /* pop gs */
4444
        gen_pop_T0(s);
4445
        gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4446
        gen_pop_update(s);
4447
        if (s->is_jmp) {
4448
            gen_jmp_im(s->pc - s->cs_base);
4449
            gen_eob(s);
4450
        }
4451
        break;
4452

    
4453
        /**************************/
4454
        /* mov */
4455
    case 0x88:
4456
    case 0x89: /* mov Gv, Ev */
4457
        if ((b & 1) == 0)
4458
            ot = OT_BYTE;
4459
        else
4460
            ot = dflag + OT_WORD;
4461
        modrm = ldub_code(s->pc++);
4462
        reg = ((modrm >> 3) & 7) | rex_r;
4463

    
4464
        /* generate a generic store */
4465
        gen_ldst_modrm(s, modrm, ot, reg, 1);
4466
        break;
4467
    case 0xc6:
4468
    case 0xc7: /* mov Ev, Iv */
4469
        if ((b & 1) == 0)
4470
            ot = OT_BYTE;
4471
        else
4472
            ot = dflag + OT_WORD;
4473
        modrm = ldub_code(s->pc++);
4474
        mod = (modrm >> 6) & 3;
4475
        if (mod != 3) {
4476
            s->rip_offset = insn_const_size(ot);
4477
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4478
        }
4479
        val = insn_get(s, ot);
4480
        gen_op_movl_T0_im(val);
4481
        if (mod != 3)
4482
            gen_op_st_T0_A0(ot + s->mem_index);
4483
        else
4484
            gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4485
        break;
4486
    case 0x8a:
4487
    case 0x8b: /* mov Ev, Gv */
4488
        if ((b & 1) == 0)
4489
            ot = OT_BYTE;
4490
        else
4491
            ot = OT_WORD + dflag;
4492
        modrm = ldub_code(s->pc++);
4493
        reg = ((modrm >> 3) & 7) | rex_r;
4494

    
4495
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4496
        gen_op_mov_reg_T0(ot, reg);
4497
        break;
4498
    case 0x8e: /* mov seg, Gv */
4499
        modrm = ldub_code(s->pc++);
4500
        reg = (modrm >> 3) & 7;
4501
        if (reg >= 6 || reg == R_CS)
4502
            goto illegal_op;
4503
        gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4504
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4505
        if (reg == R_SS) {
4506
            /* if reg == SS, inhibit interrupts/trace */
4507
            /* If several instructions disable interrupts, only the
4508
               _first_ does it */
4509
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4510
                tcg_gen_helper_0_0(helper_set_inhibit_irq);
4511
            s->tf = 0;
4512
        }
4513
        if (s->is_jmp) {
4514
            gen_jmp_im(s->pc - s->cs_base);
4515
            gen_eob(s);
4516
        }
4517
        break;
4518
    case 0x8c: /* mov Gv, seg */
4519
        modrm = ldub_code(s->pc++);
4520
        reg = (modrm >> 3) & 7;
4521
        mod = (modrm >> 6) & 3;
4522
        if (reg >= 6)
4523
            goto illegal_op;
4524
        gen_op_movl_T0_seg(reg);
4525
        if (mod == 3)
4526
            ot = OT_WORD + dflag;
4527
        else
4528
            ot = OT_WORD;
4529
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4530
        break;
4531

    
4532
    case 0x1b6: /* movzbS Gv, Eb */
4533
    case 0x1b7: /* movzwS Gv, Eb */
4534
    case 0x1be: /* movsbS Gv, Eb */
4535
    case 0x1bf: /* movswS Gv, Eb */
4536
        {
4537
            int d_ot;
4538
            /* d_ot is the size of destination */
4539
            d_ot = dflag + OT_WORD;
4540
            /* ot is the size of source */
4541
            ot = (b & 1) + OT_BYTE;
4542
            modrm = ldub_code(s->pc++);
4543
            reg = ((modrm >> 3) & 7) | rex_r;
4544
            mod = (modrm >> 6) & 3;
4545
            rm = (modrm & 7) | REX_B(s);
4546

    
4547
            if (mod == 3) {
4548
                gen_op_mov_TN_reg(ot, 0, rm);
4549
                switch(ot | (b & 8)) {
4550
                case OT_BYTE:
4551
                    tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4552
                    break;
4553
                case OT_BYTE | 8:
4554
                    tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4555
                    break;
4556
                case OT_WORD:
4557
                    tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4558
                    break;
4559
                default:
4560
                case OT_WORD | 8:
4561
                    tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4562
                    break;
4563
                }
4564
                gen_op_mov_reg_T0(d_ot, reg);
4565
            } else {
4566
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4567
                if (b & 8) {
4568
                    gen_op_lds_T0_A0(ot + s->mem_index);
4569
                } else {
4570
                    gen_op_ldu_T0_A0(ot + s->mem_index);
4571
                }
4572
                gen_op_mov_reg_T0(d_ot, reg);
4573
            }
4574
        }
4575
        break;
4576

    
4577
    case 0x8d: /* lea */
4578
        ot = dflag + OT_WORD;
4579
        modrm = ldub_code(s->pc++);
4580
        mod = (modrm >> 6) & 3;
4581
        if (mod == 3)
4582
            goto illegal_op;
4583
        reg = ((modrm >> 3) & 7) | rex_r;
4584
        /* we must ensure that no segment is added */
4585
        s->override = -1;
4586
        val = s->addseg;
4587
        s->addseg = 0;
4588
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4589
        s->addseg = val;
4590
        gen_op_mov_reg_A0(ot - OT_WORD, reg);
4591
        break;
4592

    
4593
    case 0xa0: /* mov EAX, Ov */
4594
    case 0xa1:
4595
    case 0xa2: /* mov Ov, EAX */
4596
    case 0xa3:
4597
        {
4598
            target_ulong offset_addr;
4599

    
4600
            if ((b & 1) == 0)
4601
                ot = OT_BYTE;
4602
            else
4603
                ot = dflag + OT_WORD;
4604
#ifdef TARGET_X86_64
4605
            if (s->aflag == 2) {
4606
                offset_addr = ldq_code(s->pc);
4607
                s->pc += 8;
4608
                gen_op_movq_A0_im(offset_addr);
4609
            } else
4610
#endif
4611
            {
4612
                if (s->aflag) {
4613
                    offset_addr = insn_get(s, OT_LONG);
4614
                } else {
4615
                    offset_addr = insn_get(s, OT_WORD);
4616
                }
4617
                gen_op_movl_A0_im(offset_addr);
4618
            }
4619
            gen_add_A0_ds_seg(s);
4620
            if ((b & 2) == 0) {
4621
                gen_op_ld_T0_A0(ot + s->mem_index);
4622
                gen_op_mov_reg_T0(ot, R_EAX);
4623
            } else {
4624
                gen_op_mov_TN_reg(ot, 0, R_EAX);
4625
                gen_op_st_T0_A0(ot + s->mem_index);
4626
            }
4627
        }
4628
        break;
4629
    case 0xd7: /* xlat */
4630
#ifdef TARGET_X86_64
4631
        if (s->aflag == 2) {
4632
            gen_op_movq_A0_reg(R_EBX);
4633
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4634
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4635
            tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4636
        } else
4637
#endif
4638
        {
4639
            gen_op_movl_A0_reg(R_EBX);
4640
            gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4641
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4642
            tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4643
            if (s->aflag == 0)
4644
                gen_op_andl_A0_ffff();
4645
            else
4646
                tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
4647
        }
4648
        gen_add_A0_ds_seg(s);
4649
        gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4650
        gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4651
        break;
4652
    case 0xb0 ... 0xb7: /* mov R, Ib */
4653
        val = insn_get(s, OT_BYTE);
4654
        gen_op_movl_T0_im(val);
4655
        gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4656
        break;
4657
    case 0xb8 ... 0xbf: /* mov R, Iv */
4658
#ifdef TARGET_X86_64
4659
        if (dflag == 2) {
4660
            uint64_t tmp;
4661
            /* 64 bit case */
4662
            tmp = ldq_code(s->pc);
4663
            s->pc += 8;
4664
            reg = (b & 7) | REX_B(s);
4665
            gen_movtl_T0_im(tmp);
4666
            gen_op_mov_reg_T0(OT_QUAD, reg);
4667
        } else
4668
#endif
4669
        {
4670
            ot = dflag ? OT_LONG : OT_WORD;
4671
            val = insn_get(s, ot);
4672
            reg = (b & 7) | REX_B(s);
4673
            gen_op_movl_T0_im(val);
4674
            gen_op_mov_reg_T0(ot, reg);
4675
        }
4676
        break;
4677

    
4678
    case 0x91 ... 0x97: /* xchg R, EAX */
4679
        ot = dflag + OT_WORD;
4680
        reg = (b & 7) | REX_B(s);
4681
        rm = R_EAX;
4682
        goto do_xchg_reg;
4683
    case 0x86:
4684
    case 0x87: /* xchg Ev, Gv */
4685
        if ((b & 1) == 0)
4686
            ot = OT_BYTE;
4687
        else
4688
            ot = dflag + OT_WORD;
4689
        modrm = ldub_code(s->pc++);
4690
        reg = ((modrm >> 3) & 7) | rex_r;
4691
        mod = (modrm >> 6) & 3;
4692
        if (mod == 3) {
4693
            rm = (modrm & 7) | REX_B(s);
4694
        do_xchg_reg:
4695
            gen_op_mov_TN_reg(ot, 0, reg);
4696
            gen_op_mov_TN_reg(ot, 1, rm);
4697
            gen_op_mov_reg_T0(ot, rm);
4698
            gen_op_mov_reg_T1(ot, reg);
4699
        } else {
4700
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4701
            gen_op_mov_TN_reg(ot, 0, reg);
4702
            /* for xchg, lock is implicit */
4703
            if (!(prefixes & PREFIX_LOCK))
4704
                tcg_gen_helper_0_0(helper_lock);
4705
            gen_op_ld_T1_A0(ot + s->mem_index);
4706
            gen_op_st_T0_A0(ot + s->mem_index);
4707
            if (!(prefixes & PREFIX_LOCK))
4708
                tcg_gen_helper_0_0(helper_unlock);
4709
            gen_op_mov_reg_T1(ot, reg);
4710
        }
4711
        break;
4712
    case 0xc4: /* les Gv */
4713
        if (CODE64(s))
4714
            goto illegal_op;
4715
        op = R_ES;
4716
        goto do_lxx;
4717
    case 0xc5: /* lds Gv */
4718
        if (CODE64(s))
4719
            goto illegal_op;
4720
        op = R_DS;
4721
        goto do_lxx;
4722
    case 0x1b2: /* lss Gv */
4723
        op = R_SS;
4724
        goto do_lxx;
4725
    case 0x1b4: /* lfs Gv */
4726
        op = R_FS;
4727
        goto do_lxx;
4728
    case 0x1b5: /* lgs Gv */
4729
        op = R_GS;
4730
    do_lxx:
4731
        ot = dflag ? OT_LONG : OT_WORD;
4732
        modrm = ldub_code(s->pc++);
4733
        reg = ((modrm >> 3) & 7) | rex_r;
4734
        mod = (modrm >> 6) & 3;
4735
        if (mod == 3)
4736
            goto illegal_op;
4737
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4738
        gen_op_ld_T1_A0(ot + s->mem_index);
4739
        gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4740
        /* load the segment first to handle exceptions properly */
4741
        gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4742
        gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4743
        /* then put the data */
4744
        gen_op_mov_reg_T1(ot, reg);
4745
        if (s->is_jmp) {
4746
            gen_jmp_im(s->pc - s->cs_base);
4747
            gen_eob(s);
4748
        }
4749
        break;
4750

    
4751
        /************************/
4752
        /* shifts */
4753
    case 0xc0:
4754
    case 0xc1:
4755
        /* shift Ev,Ib */
4756
        shift = 2;
4757
    grp2:
4758
        {
4759
            if ((b & 1) == 0)
4760
                ot = OT_BYTE;
4761
            else
4762
                ot = dflag + OT_WORD;
4763

    
4764
            modrm = ldub_code(s->pc++);
4765
            mod = (modrm >> 6) & 3;
4766
            op = (modrm >> 3) & 7;
4767

    
4768
            if (mod != 3) {
4769
                if (shift == 2) {
4770
                    s->rip_offset = 1;
4771
                }
4772
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4773
                opreg = OR_TMP0;
4774
            } else {
4775
                opreg = (modrm & 7) | REX_B(s);
4776
            }
4777

    
4778
            /* simpler op */
4779
            if (shift == 0) {
4780
                gen_shift(s, op, ot, opreg, OR_ECX);
4781
            } else {
4782
                if (shift == 2) {
4783
                    shift = ldub_code(s->pc++);
4784
                }
4785
                gen_shifti(s, op, ot, opreg, shift);
4786
            }
4787
        }
4788
        break;
4789
    case 0xd0:
4790
    case 0xd1:
4791
        /* shift Ev,1 */
4792
        shift = 1;
4793
        goto grp2;
4794
    case 0xd2:
4795
    case 0xd3:
4796
        /* shift Ev,cl */
4797
        shift = 0;
4798
        goto grp2;
4799

    
4800
    case 0x1a4: /* shld imm */
4801
        op = 0;
4802
        shift = 1;
4803
        goto do_shiftd;
4804
    case 0x1a5: /* shld cl */
4805
        op = 0;
4806
        shift = 0;
4807
        goto do_shiftd;
4808
    case 0x1ac: /* shrd imm */
4809
        op = 1;
4810
        shift = 1;
4811
        goto do_shiftd;
4812
    case 0x1ad: /* shrd cl */
4813
        op = 1;
4814
        shift = 0;
4815
    do_shiftd:
4816
        ot = dflag + OT_WORD;
4817
        modrm = ldub_code(s->pc++);
4818
        mod = (modrm >> 6) & 3;
4819
        rm = (modrm & 7) | REX_B(s);
4820
        reg = ((modrm >> 3) & 7) | rex_r;
4821
        if (mod != 3) {
4822
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4823
            opreg = OR_TMP0;
4824
        } else {
4825
            opreg = rm;
4826
        }
4827
        gen_op_mov_TN_reg(ot, 1, reg);
4828

    
4829
        if (shift) {
4830
            val = ldub_code(s->pc++);
4831
            tcg_gen_movi_tl(cpu_T3, val);
4832
        } else {
4833
            tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
4834
        }
4835
        gen_shiftd_rm_T1_T3(s, ot, opreg, op);
4836
        break;
4837

    
4838
        /************************/
4839
        /* floats */
4840
    case 0xd8 ... 0xdf:
4841
        if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4842
            /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4843
            /* XXX: what to do if illegal op ? */
4844
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4845
            break;
4846
        }
4847
        modrm = ldub_code(s->pc++);
4848
        mod = (modrm >> 6) & 3;
4849
        rm = modrm & 7;
4850
        op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4851
        if (mod != 3) {
4852
            /* memory op */
4853
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4854
            switch(op) {
4855
            case 0x00 ... 0x07: /* fxxxs */
4856
            case 0x10 ... 0x17: /* fixxxl */
4857
            case 0x20 ... 0x27: /* fxxxl */
4858
            case 0x30 ... 0x37: /* fixxx */
4859
                {
4860
                    int op1;
4861
                    op1 = op & 7;
4862

    
4863
                    switch(op >> 4) {
4864
                    case 0:
4865
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4866
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4867
                        tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
4868
                        break;
4869
                    case 1:
4870
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4871
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4872
                        tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4873
                        break;
4874
                    case 2:
4875
                        tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
4876
                                          (s->mem_index >> 2) - 1);
4877
                        tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
4878
                        break;
4879
                    case 3:
4880
                    default:
4881
                        gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4882
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4883
                        tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4884
                        break;
4885
                    }
4886

    
4887
                    tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
4888
                    if (op1 == 3) {
4889
                        /* fcomp needs pop */
4890
                        tcg_gen_helper_0_0(helper_fpop);
4891
                    }
4892
                }
4893
                break;
4894
            case 0x08: /* flds */
4895
            case 0x0a: /* fsts */
4896
            case 0x0b: /* fstps */
4897
            case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4898
            case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4899
            case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4900
                switch(op & 7) {
4901
                case 0:
4902
                    switch(op >> 4) {
4903
                    case 0:
4904
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4905
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4906
                        tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
4907
                        break;
4908
                    case 1:
4909
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4910
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4911
                        tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4912
                        break;
4913
                    case 2:
4914
                        tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
4915
                                          (s->mem_index >> 2) - 1);
4916
                        tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
4917
                        break;
4918
                    case 3:
4919
                    default:
4920
                        gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4921
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4922
                        tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4923
                        break;
4924
                    }
4925
                    break;
4926
                case 1:
4927
                    /* XXX: the corresponding CPUID bit must be tested ! */
4928
                    switch(op >> 4) {
4929
                    case 1:
4930
                        tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
4931
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4932
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4933
                        break;
4934
                    case 2:
4935
                        tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
4936
                        tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
4937
                                          (s->mem_index >> 2) - 1);
4938
                        break;
4939
                    case 3:
4940
                    default:
4941
                        tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
4942
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4943
                        gen_op_st_T0_A0(OT_WORD + s->mem_index);
4944
                        break;
4945
                    }
4946
                    tcg_gen_helper_0_0(helper_fpop);
4947
                    break;
4948
                default:
4949
                    switch(op >> 4) {
4950
                    case 0:
4951
                        tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
4952
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4953
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4954
                        break;
4955
                    case 1:
4956
                        tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
4957
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4958
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4959
                        break;
4960
                    case 2:
4961
                        tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
4962
                        tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
4963
                                          (s->mem_index >> 2) - 1);
4964
                        break;
4965
                    case 3:
4966
                    default:
4967
                        tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
4968
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4969
                        gen_op_st_T0_A0(OT_WORD + s->mem_index);
4970
                        break;
4971
                    }
4972
                    if ((op & 7) == 3)
4973
                        tcg_gen_helper_0_0(helper_fpop);
4974
                    break;
4975
                }
4976
                break;
4977
            case 0x0c: /* fldenv mem */
4978
                if (s->cc_op != CC_OP_DYNAMIC)
4979
                    gen_op_set_cc_op(s->cc_op);
4980
                gen_jmp_im(pc_start - s->cs_base);
4981
                tcg_gen_helper_0_2(helper_fldenv, 
4982
                                   cpu_A0, tcg_const_i32(s->dflag));
4983
                break;
4984
            case 0x0d: /* fldcw mem */
4985
                gen_op_ld_T0_A0(OT_WORD + s->mem_index);
4986
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4987
                tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
4988
                break;
4989
            case 0x0e: /* fnstenv mem */
4990
                if (s->cc_op != CC_OP_DYNAMIC)
4991
                    gen_op_set_cc_op(s->cc_op);
4992
                gen_jmp_im(pc_start - s->cs_base);
4993
                tcg_gen_helper_0_2(helper_fstenv,
4994
                                   cpu_A0, tcg_const_i32(s->dflag));
4995
                break;
4996
            case 0x0f: /* fnstcw mem */
4997
                tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
4998
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4999
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
5000
                break;
5001
            case 0x1d: /* fldt mem */
5002
                if (s->cc_op != CC_OP_DYNAMIC)
5003
                    gen_op_set_cc_op(s->cc_op);
5004
                gen_jmp_im(pc_start - s->cs_base);
5005
                tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
5006
                break;
5007
            case 0x1f: /* fstpt mem */
5008
                if (s->cc_op != CC_OP_DYNAMIC)
5009
                    gen_op_set_cc_op(s->cc_op);
5010
                gen_jmp_im(pc_start - s->cs_base);
5011
                tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
5012
                tcg_gen_helper_0_0(helper_fpop);
5013
                break;
5014
            case 0x2c: /* frstor mem */
5015
                if (s->cc_op != CC_OP_DYNAMIC)
5016
                    gen_op_set_cc_op(s->cc_op);
5017
                gen_jmp_im(pc_start - s->cs_base);
5018
                tcg_gen_helper_0_2(helper_frstor,
5019
                                   cpu_A0, tcg_const_i32(s->dflag));
5020
                break;
5021
            case 0x2e: /* fnsave mem */
5022
                if (s->cc_op != CC_OP_DYNAMIC)
5023
                    gen_op_set_cc_op(s->cc_op);
5024
                gen_jmp_im(pc_start - s->cs_base);
5025
                tcg_gen_helper_0_2(helper_fsave,
5026
                                   cpu_A0, tcg_const_i32(s->dflag));
5027
                break;
5028
            case 0x2f: /* fnstsw mem */
5029
                tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5030
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5031
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
5032
                break;
5033
            case 0x3c: /* fbld */
5034
                if (s->cc_op != CC_OP_DYNAMIC)
5035
                    gen_op_set_cc_op(s->cc_op);
5036
                gen_jmp_im(pc_start - s->cs_base);
5037
                tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
5038
                break;
5039
            case 0x3e: /* fbstp */
5040
                if (s->cc_op != CC_OP_DYNAMIC)
5041
                    gen_op_set_cc_op(s->cc_op);
5042
                gen_jmp_im(pc_start - s->cs_base);
5043
                tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
5044
                tcg_gen_helper_0_0(helper_fpop);
5045
                break;
5046
            case 0x3d: /* fildll */
5047
                tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
5048
                                  (s->mem_index >> 2) - 1);
5049
                tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
5050
                break;
5051
            case 0x3f: /* fistpll */
5052
                tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
5053
                tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
5054
                                  (s->mem_index >> 2) - 1);
5055
                tcg_gen_helper_0_0(helper_fpop);
5056
                break;
5057
            default:
5058
                goto illegal_op;
5059
            }
5060
        } else {
5061
            /* register float ops */
5062
            opreg = rm;
5063

    
5064
            switch(op) {
5065
            case 0x08: /* fld sti */
5066
                tcg_gen_helper_0_0(helper_fpush);
5067
                tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
5068
                break;
5069
            case 0x09: /* fxchg sti */
5070
            case 0x29: /* fxchg4 sti, undocumented op */
5071
            case 0x39: /* fxchg7 sti, undocumented op */
5072
                tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
5073
                break;
5074
            case 0x0a: /* grp d9/2 */
5075
                switch(rm) {
5076
                case 0: /* fnop */
5077
                    /* check exceptions (FreeBSD FPU probe) */
5078
                    if (s->cc_op != CC_OP_DYNAMIC)
5079
                        gen_op_set_cc_op(s->cc_op);
5080
                    gen_jmp_im(pc_start - s->cs_base);
5081
                    tcg_gen_helper_0_0(helper_fwait);
5082
                    break;
5083
                default:
5084
                    goto illegal_op;
5085
                }
5086
                break;
5087
            case 0x0c: /* grp d9/4 */
5088
                switch(rm) {
5089
                case 0: /* fchs */
5090
                    tcg_gen_helper_0_0(helper_fchs_ST0);
5091
                    break;
5092
                case 1: /* fabs */
5093
                    tcg_gen_helper_0_0(helper_fabs_ST0);
5094
                    break;
5095
                case 4: /* ftst */
5096
                    tcg_gen_helper_0_0(helper_fldz_FT0);
5097
                    tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5098
                    break;
5099
                case 5: /* fxam */
5100
                    tcg_gen_helper_0_0(helper_fxam_ST0);
5101
                    break;
5102
                default:
5103
                    goto illegal_op;
5104
                }
5105
                break;
5106
            case 0x0d: /* grp d9/5 */
5107
                {
5108
                    switch(rm) {
5109
                    case 0:
5110
                        tcg_gen_helper_0_0(helper_fpush);
5111
                        tcg_gen_helper_0_0(helper_fld1_ST0);
5112
                        break;
5113
                    case 1:
5114
                        tcg_gen_helper_0_0(helper_fpush);
5115
                        tcg_gen_helper_0_0(helper_fldl2t_ST0);
5116
                        break;
5117
                    case 2:
5118
                        tcg_gen_helper_0_0(helper_fpush);
5119
                        tcg_gen_helper_0_0(helper_fldl2e_ST0);
5120
                        break;
5121
                    case 3:
5122
                        tcg_gen_helper_0_0(helper_fpush);
5123
                        tcg_gen_helper_0_0(helper_fldpi_ST0);
5124
                        break;
5125
                    case 4:
5126
                        tcg_gen_helper_0_0(helper_fpush);
5127
                        tcg_gen_helper_0_0(helper_fldlg2_ST0);
5128
                        break;
5129
                    case 5:
5130
                        tcg_gen_helper_0_0(helper_fpush);
5131
                        tcg_gen_helper_0_0(helper_fldln2_ST0);
5132
                        break;
5133
                    case 6:
5134
                        tcg_gen_helper_0_0(helper_fpush);
5135
                        tcg_gen_helper_0_0(helper_fldz_ST0);
5136
                        break;
5137
                    default:
5138
                        goto illegal_op;
5139
                    }
5140
                }
5141
                break;
5142
            case 0x0e: /* grp d9/6 */
5143
                switch(rm) {
5144
                case 0: /* f2xm1 */
5145
                    tcg_gen_helper_0_0(helper_f2xm1);
5146
                    break;
5147
                case 1: /* fyl2x */
5148
                    tcg_gen_helper_0_0(helper_fyl2x);
5149
                    break;
5150
                case 2: /* fptan */
5151
                    tcg_gen_helper_0_0(helper_fptan);
5152
                    break;
5153
                case 3: /* fpatan */
5154
                    tcg_gen_helper_0_0(helper_fpatan);
5155
                    break;
5156
                case 4: /* fxtract */
5157
                    tcg_gen_helper_0_0(helper_fxtract);
5158
                    break;
5159
                case 5: /* fprem1 */
5160
                    tcg_gen_helper_0_0(helper_fprem1);
5161
                    break;
5162
                case 6: /* fdecstp */
5163
                    tcg_gen_helper_0_0(helper_fdecstp);
5164
                    break;
5165
                default:
5166
                case 7: /* fincstp */
5167
                    tcg_gen_helper_0_0(helper_fincstp);
5168
                    break;
5169
                }
5170
                break;
5171
            case 0x0f: /* grp d9/7 */
5172
                switch(rm) {
5173
                case 0: /* fprem */
5174
                    tcg_gen_helper_0_0(helper_fprem);
5175
                    break;
5176
                case 1: /* fyl2xp1 */
5177
                    tcg_gen_helper_0_0(helper_fyl2xp1);
5178
                    break;
5179
                case 2: /* fsqrt */
5180
                    tcg_gen_helper_0_0(helper_fsqrt);
5181
                    break;
5182
                case 3: /* fsincos */
5183
                    tcg_gen_helper_0_0(helper_fsincos);
5184
                    break;
5185
                case 5: /* fscale */
5186
                    tcg_gen_helper_0_0(helper_fscale);
5187
                    break;
5188
                case 4: /* frndint */
5189
                    tcg_gen_helper_0_0(helper_frndint);
5190
                    break;
5191
                case 6: /* fsin */
5192
                    tcg_gen_helper_0_0(helper_fsin);
5193
                    break;
5194
                default:
5195
                case 7: /* fcos */
5196
                    tcg_gen_helper_0_0(helper_fcos);
5197
                    break;
5198
                }
5199
                break;
5200
            case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5201
            case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5202
            case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5203
                {
5204
                    int op1;
5205

    
5206
                    op1 = op & 7;
5207
                    if (op >= 0x20) {
5208
                        tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
5209
                        if (op >= 0x30)
5210
                            tcg_gen_helper_0_0(helper_fpop);
5211
                    } else {
5212
                        tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5213
                        tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5214
                    }
5215
                }
5216
                break;
5217
            case 0x02: /* fcom */
5218
            case 0x22: /* fcom2, undocumented op */
5219
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5220
                tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5221
                break;
5222
            case 0x03: /* fcomp */
5223
            case 0x23: /* fcomp3, undocumented op */
5224
            case 0x32: /* fcomp5, undocumented op */
5225
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5226
                tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5227
                tcg_gen_helper_0_0(helper_fpop);
5228
                break;
5229
            case 0x15: /* da/5 */
5230
                switch(rm) {
5231
                case 1: /* fucompp */
5232
                    tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5233
                    tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5234
                    tcg_gen_helper_0_0(helper_fpop);
5235
                    tcg_gen_helper_0_0(helper_fpop);
5236
                    break;
5237
                default:
5238
                    goto illegal_op;
5239
                }
5240
                break;
5241
            case 0x1c:
5242
                switch(rm) {
5243
                case 0: /* feni (287 only, just do nop here) */
5244
                    break;
5245
                case 1: /* fdisi (287 only, just do nop here) */
5246
                    break;
5247
                case 2: /* fclex */
5248
                    tcg_gen_helper_0_0(helper_fclex);
5249
                    break;
5250
                case 3: /* fninit */
5251
                    tcg_gen_helper_0_0(helper_fninit);
5252
                    break;
5253
                case 4: /* fsetpm (287 only, just do nop here) */
5254
                    break;
5255
                default:
5256
                    goto illegal_op;
5257
                }
5258
                break;
5259
            case 0x1d: /* fucomi */
5260
                if (s->cc_op != CC_OP_DYNAMIC)
5261
                    gen_op_set_cc_op(s->cc_op);
5262
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5263
                tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5264
                s->cc_op = CC_OP_EFLAGS;
5265
                break;
5266
            case 0x1e: /* fcomi */
5267
                if (s->cc_op != CC_OP_DYNAMIC)
5268
                    gen_op_set_cc_op(s->cc_op);
5269
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5270
                tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5271
                s->cc_op = CC_OP_EFLAGS;
5272
                break;
5273
            case 0x28: /* ffree sti */
5274
                tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5275
                break;
5276
            case 0x2a: /* fst sti */
5277
                tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5278
                break;
5279
            case 0x2b: /* fstp sti */
5280
            case 0x0b: /* fstp1 sti, undocumented op */
5281
            case 0x3a: /* fstp8 sti, undocumented op */
5282
            case 0x3b: /* fstp9 sti, undocumented op */
5283
                tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5284
                tcg_gen_helper_0_0(helper_fpop);
5285
                break;
5286
            case 0x2c: /* fucom st(i) */
5287
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5288
                tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5289
                break;
5290
            case 0x2d: /* fucomp st(i) */
5291
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5292
                tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5293
                tcg_gen_helper_0_0(helper_fpop);
5294
                break;
5295
            case 0x33: /* de/3 */
5296
                switch(rm) {
5297
                case 1: /* fcompp */
5298
                    tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5299
                    tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5300
                    tcg_gen_helper_0_0(helper_fpop);
5301
                    tcg_gen_helper_0_0(helper_fpop);
5302
                    break;
5303
                default:
5304
                    goto illegal_op;
5305
                }
5306
                break;
5307
            case 0x38: /* ffreep sti, undocumented op */
5308
                tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5309
                tcg_gen_helper_0_0(helper_fpop);
5310
                break;
5311
            case 0x3c: /* df/4 */
5312
                switch(rm) {
5313
                case 0:
5314
                    tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5315
                    tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5316
                    gen_op_mov_reg_T0(OT_WORD, R_EAX);
5317
                    break;
5318
                default:
5319
                    goto illegal_op;
5320
                }
5321
                break;
5322
            case 0x3d: /* fucomip */
5323
                if (s->cc_op != CC_OP_DYNAMIC)
5324
                    gen_op_set_cc_op(s->cc_op);
5325
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5326
                tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5327
                tcg_gen_helper_0_0(helper_fpop);
5328
                s->cc_op = CC_OP_EFLAGS;
5329
                break;
5330
            case 0x3e: /* fcomip */
5331
                if (s->cc_op != CC_OP_DYNAMIC)
5332
                    gen_op_set_cc_op(s->cc_op);
5333
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5334
                tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5335
                tcg_gen_helper_0_0(helper_fpop);
5336
                s->cc_op = CC_OP_EFLAGS;
5337
                break;
5338
            case 0x10 ... 0x13: /* fcmovxx */
5339
            case 0x18 ... 0x1b:
5340
                {
5341
                    int op1, l1;
5342
                    const static uint8_t fcmov_cc[8] = {
5343
                        (JCC_B << 1),
5344
                        (JCC_Z << 1),
5345
                        (JCC_BE << 1),
5346
                        (JCC_P << 1),
5347
                    };
5348
                    op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
5349
                    gen_setcc(s, op1);
5350
                    l1 = gen_new_label();
5351
                    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), l1);
5352
                    tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
5353
                    gen_set_label(l1);
5354
                }
5355
                break;
5356
            default:
5357
                goto illegal_op;
5358
            }
5359
        }
5360
        break;
5361
        /************************/
5362
        /* string ops */
5363

    
5364
    case 0xa4: /* movsS */
5365
    case 0xa5:
5366
        if ((b & 1) == 0)
5367
            ot = OT_BYTE;
5368
        else
5369
            ot = dflag + OT_WORD;
5370

    
5371
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5372
            gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5373
        } else {
5374
            gen_movs(s, ot);
5375
        }
5376
        break;
5377

    
5378
    case 0xaa: /* stosS */
5379
    case 0xab:
5380
        if ((b & 1) == 0)
5381
            ot = OT_BYTE;
5382
        else
5383
            ot = dflag + OT_WORD;
5384

    
5385
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5386
            gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5387
        } else {
5388
            gen_stos(s, ot);
5389
        }
5390
        break;
5391
    case 0xac: /* lodsS */
5392
    case 0xad:
5393
        if ((b & 1) == 0)
5394
            ot = OT_BYTE;
5395
        else
5396
            ot = dflag + OT_WORD;
5397
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5398
            gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5399
        } else {
5400
            gen_lods(s, ot);
5401
        }
5402
        break;
5403
    case 0xae: /* scasS */
5404
    case 0xaf:
5405
        if ((b & 1) == 0)
5406
            ot = OT_BYTE;
5407
        else
5408
            ot = dflag + OT_WORD;
5409
        if (prefixes & PREFIX_REPNZ) {
5410
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5411
        } else if (prefixes & PREFIX_REPZ) {
5412
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5413
        } else {
5414
            gen_scas(s, ot);
5415
            s->cc_op = CC_OP_SUBB + ot;
5416
        }
5417
        break;
5418

    
5419
    case 0xa6: /* cmpsS */
5420
    case 0xa7:
5421
        if ((b & 1) == 0)
5422
            ot = OT_BYTE;
5423
        else
5424
            ot = dflag + OT_WORD;
5425
        if (prefixes & PREFIX_REPNZ) {
5426
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5427
        } else if (prefixes & PREFIX_REPZ) {
5428
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5429
        } else {
5430
            gen_cmps(s, ot);
5431
            s->cc_op = CC_OP_SUBB + ot;
5432
        }
5433
        break;
5434
    case 0x6c: /* insS */
5435
    case 0x6d:
5436
        if ((b & 1) == 0)
5437
            ot = OT_BYTE;
5438
        else
5439
            ot = dflag ? OT_LONG : OT_WORD;
5440
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5441
        gen_op_andl_T0_ffff();
5442
        gen_check_io(s, ot, pc_start - s->cs_base, 
5443
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
5444
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5445
            gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5446
        } else {
5447
            gen_ins(s, ot);
5448
        }
5449
        break;
5450
    case 0x6e: /* outsS */
5451
    case 0x6f:
5452
        if ((b & 1) == 0)
5453
            ot = OT_BYTE;
5454
        else
5455
            ot = dflag ? OT_LONG : OT_WORD;
5456
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5457
        gen_op_andl_T0_ffff();
5458
        gen_check_io(s, ot, pc_start - s->cs_base,
5459
                     svm_is_rep(prefixes) | 4);
5460
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5461
            gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5462
        } else {
5463
            gen_outs(s, ot);
5464
        }
5465
        break;
5466

    
5467
        /************************/
5468
        /* port I/O */
5469

    
5470
    case 0xe4:
5471
    case 0xe5:
5472
        if ((b & 1) == 0)
5473
            ot = OT_BYTE;
5474
        else
5475
            ot = dflag ? OT_LONG : OT_WORD;
5476
        val = ldub_code(s->pc++);
5477
        gen_op_movl_T0_im(val);
5478
        gen_check_io(s, ot, pc_start - s->cs_base,
5479
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5480
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5481
        tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5482
        gen_op_mov_reg_T1(ot, R_EAX);
5483
        break;
5484
    case 0xe6:
5485
    case 0xe7:
5486
        if ((b & 1) == 0)
5487
            ot = OT_BYTE;
5488
        else
5489
            ot = dflag ? OT_LONG : OT_WORD;
5490
        val = ldub_code(s->pc++);
5491
        gen_op_movl_T0_im(val);
5492
        gen_check_io(s, ot, pc_start - s->cs_base,
5493
                     svm_is_rep(prefixes));
5494
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5495

    
5496
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5497
        tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5498
        tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5499
        tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5500
        break;
5501
    case 0xec:
5502
    case 0xed:
5503
        if ((b & 1) == 0)
5504
            ot = OT_BYTE;
5505
        else
5506
            ot = dflag ? OT_LONG : OT_WORD;
5507
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5508
        gen_op_andl_T0_ffff();
5509
        gen_check_io(s, ot, pc_start - s->cs_base,
5510
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5511
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5512
        tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5513
        gen_op_mov_reg_T1(ot, R_EAX);
5514
        break;
5515
    case 0xee:
5516
    case 0xef:
5517
        if ((b & 1) == 0)
5518
            ot = OT_BYTE;
5519
        else
5520
            ot = dflag ? OT_LONG : OT_WORD;
5521
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5522
        gen_op_andl_T0_ffff();
5523
        gen_check_io(s, ot, pc_start - s->cs_base,
5524
                     svm_is_rep(prefixes));
5525
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5526

    
5527
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5528
        tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5529
        tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5530
        tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5531
        break;
5532

    
5533
        /************************/
5534
        /* control */
5535
    case 0xc2: /* ret im */
5536
        val = ldsw_code(s->pc);
5537
        s->pc += 2;
5538
        gen_pop_T0(s);
5539
        if (CODE64(s) && s->dflag)
5540
            s->dflag = 2;
5541
        gen_stack_update(s, val + (2 << s->dflag));
5542
        if (s->dflag == 0)
5543
            gen_op_andl_T0_ffff();
5544
        gen_op_jmp_T0();
5545
        gen_eob(s);
5546
        break;
5547
    case 0xc3: /* ret */
5548
        gen_pop_T0(s);
5549
        gen_pop_update(s);
5550
        if (s->dflag == 0)
5551
            gen_op_andl_T0_ffff();
5552
        gen_op_jmp_T0();
5553
        gen_eob(s);
5554
        break;
5555
    case 0xca: /* lret im */
5556
        val = ldsw_code(s->pc);
5557
        s->pc += 2;
5558
    do_lret:
5559
        if (s->pe && !s->vm86) {
5560
            if (s->cc_op != CC_OP_DYNAMIC)
5561
                gen_op_set_cc_op(s->cc_op);
5562
            gen_jmp_im(pc_start - s->cs_base);
5563
            tcg_gen_helper_0_2(helper_lret_protected,
5564
                               tcg_const_i32(s->dflag), 
5565
                               tcg_const_i32(val));
5566
        } else {
5567
            gen_stack_A0(s);
5568
            /* pop offset */
5569
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5570
            if (s->dflag == 0)
5571
                gen_op_andl_T0_ffff();
5572
            /* NOTE: keeping EIP updated is not a problem in case of
5573
               exception */
5574
            gen_op_jmp_T0();
5575
            /* pop selector */
5576
            gen_op_addl_A0_im(2 << s->dflag);
5577
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5578
            gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5579
            /* add stack offset */
5580
            gen_stack_update(s, val + (4 << s->dflag));
5581
        }
5582
        gen_eob(s);
5583
        break;
5584
    case 0xcb: /* lret */
5585
        val = 0;
5586
        goto do_lret;
5587
    case 0xcf: /* iret */
5588
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5589
            break;
5590
        if (!s->pe) {
5591
            /* real mode */
5592
            tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5593
            s->cc_op = CC_OP_EFLAGS;
5594
        } else if (s->vm86) {
5595
            if (s->iopl != 3) {
5596
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5597
            } else {
5598
                tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5599
                s->cc_op = CC_OP_EFLAGS;
5600
            }
5601
        } else {
5602
            if (s->cc_op != CC_OP_DYNAMIC)
5603
                gen_op_set_cc_op(s->cc_op);
5604
            gen_jmp_im(pc_start - s->cs_base);
5605
            tcg_gen_helper_0_2(helper_iret_protected,
5606
                               tcg_const_i32(s->dflag), 
5607
                               tcg_const_i32(s->pc - s->cs_base));
5608
            s->cc_op = CC_OP_EFLAGS;
5609
        }
5610
        gen_eob(s);
5611
        break;
5612
    case 0xe8: /* call im */
5613
        {
5614
            if (dflag)
5615
                tval = (int32_t)insn_get(s, OT_LONG);
5616
            else
5617
                tval = (int16_t)insn_get(s, OT_WORD);
5618
            next_eip = s->pc - s->cs_base;
5619
            tval += next_eip;
5620
            if (s->dflag == 0)
5621
                tval &= 0xffff;
5622
            gen_movtl_T0_im(next_eip);
5623
            gen_push_T0(s);
5624
            gen_jmp(s, tval);
5625
        }
5626
        break;
5627
    case 0x9a: /* lcall im */
5628
        {
5629
            unsigned int selector, offset;
5630

    
5631
            if (CODE64(s))
5632
                goto illegal_op;
5633
            ot = dflag ? OT_LONG : OT_WORD;
5634
            offset = insn_get(s, ot);
5635
            selector = insn_get(s, OT_WORD);
5636

    
5637
            gen_op_movl_T0_im(selector);
5638
            gen_op_movl_T1_imu(offset);
5639
        }
5640
        goto do_lcall;
5641
    case 0xe9: /* jmp im */
5642
        if (dflag)
5643
            tval = (int32_t)insn_get(s, OT_LONG);
5644
        else
5645
            tval = (int16_t)insn_get(s, OT_WORD);
5646
        tval += s->pc - s->cs_base;
5647
        if (s->dflag == 0)
5648
            tval &= 0xffff;
5649
        gen_jmp(s, tval);
5650
        break;
5651
    case 0xea: /* ljmp im */
5652
        {
5653
            unsigned int selector, offset;
5654

    
5655
            if (CODE64(s))
5656
                goto illegal_op;
5657
            ot = dflag ? OT_LONG : OT_WORD;
5658
            offset = insn_get(s, ot);
5659
            selector = insn_get(s, OT_WORD);
5660

    
5661
            gen_op_movl_T0_im(selector);
5662
            gen_op_movl_T1_imu(offset);
5663
        }
5664
        goto do_ljmp;
5665
    case 0xeb: /* jmp Jb */
5666
        tval = (int8_t)insn_get(s, OT_BYTE);
5667
        tval += s->pc - s->cs_base;
5668
        if (s->dflag == 0)
5669
            tval &= 0xffff;
5670
        gen_jmp(s, tval);
5671
        break;
5672
    case 0x70 ... 0x7f: /* jcc Jb */
5673
        tval = (int8_t)insn_get(s, OT_BYTE);
5674
        goto do_jcc;
5675
    case 0x180 ... 0x18f: /* jcc Jv */
5676
        if (dflag) {
5677
            tval = (int32_t)insn_get(s, OT_LONG);
5678
        } else {
5679
            tval = (int16_t)insn_get(s, OT_WORD);
5680
        }
5681
    do_jcc:
5682
        next_eip = s->pc - s->cs_base;
5683
        tval += next_eip;
5684
        if (s->dflag == 0)
5685
            tval &= 0xffff;
5686
        gen_jcc(s, b, tval, next_eip);
5687
        break;
5688

    
5689
    case 0x190 ... 0x19f: /* setcc Gv */
5690
        modrm = ldub_code(s->pc++);
5691
        gen_setcc(s, b);
5692
        gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5693
        break;
5694
    case 0x140 ... 0x14f: /* cmov Gv, Ev */
5695
        ot = dflag + OT_WORD;
5696
        modrm = ldub_code(s->pc++);
5697
        reg = ((modrm >> 3) & 7) | rex_r;
5698
        mod = (modrm >> 6) & 3;
5699
        gen_setcc(s, b);
5700
        if (mod != 3) {
5701
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5702
            gen_op_ld_T1_A0(ot + s->mem_index);
5703
        } else {
5704
            rm = (modrm & 7) | REX_B(s);
5705
            gen_op_mov_TN_reg(ot, 1, rm);
5706
        }
5707
        gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5708
        break;
5709

    
5710
        /************************/
5711
        /* flags */
5712
    case 0x9c: /* pushf */
5713
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5714
            break;
5715
        if (s->vm86 && s->iopl != 3) {
5716
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5717
        } else {
5718
            if (s->cc_op != CC_OP_DYNAMIC)
5719
                gen_op_set_cc_op(s->cc_op);
5720
            gen_op_movl_T0_eflags();
5721
            gen_push_T0(s);
5722
        }
5723
        break;
5724
    case 0x9d: /* popf */
5725
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5726
            break;
5727
        if (s->vm86 && s->iopl != 3) {
5728
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5729
        } else {
5730
            gen_pop_T0(s);
5731
            if (s->cpl == 0) {
5732
                if (s->dflag) {
5733
                    gen_op_movl_eflags_T0_cpl0();
5734
                } else {
5735
                    gen_op_movw_eflags_T0_cpl0();
5736
                }
5737
            } else {
5738
                if (s->cpl <= s->iopl) {
5739
                    if (s->dflag) {
5740
                        gen_op_movl_eflags_T0_io();
5741
                    } else {
5742
                        gen_op_movw_eflags_T0_io();
5743
                    }
5744
                } else {
5745
                    if (s->dflag) {
5746
                        gen_op_movl_eflags_T0();
5747
                    } else {
5748
                        gen_op_movw_eflags_T0();
5749
                    }
5750
                }
5751
            }
5752
            gen_pop_update(s);
5753
            s->cc_op = CC_OP_EFLAGS;
5754
            /* abort translation because TF flag may change */
5755
            gen_jmp_im(s->pc - s->cs_base);
5756
            gen_eob(s);
5757
        }
5758
        break;
5759
    case 0x9e: /* sahf */
5760
        if (CODE64(s))
5761
            goto illegal_op;
5762
        gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5763
        if (s->cc_op != CC_OP_DYNAMIC)
5764
            gen_op_set_cc_op(s->cc_op);
5765
        gen_op_movb_eflags_T0();
5766
        s->cc_op = CC_OP_EFLAGS;
5767
        break;
5768
    case 0x9f: /* lahf */
5769
        if (CODE64(s))
5770
            goto illegal_op;
5771
        if (s->cc_op != CC_OP_DYNAMIC)
5772
            gen_op_set_cc_op(s->cc_op);
5773
        gen_op_movl_T0_eflags();
5774
        gen_op_mov_reg_T0(OT_BYTE, R_AH);
5775
        break;
5776
    case 0xf5: /* cmc */
5777
        if (s->cc_op != CC_OP_DYNAMIC)
5778
            gen_op_set_cc_op(s->cc_op);
5779
        gen_op_cmc();
5780
        s->cc_op = CC_OP_EFLAGS;
5781
        break;
5782
    case 0xf8: /* clc */
5783
        if (s->cc_op != CC_OP_DYNAMIC)
5784
            gen_op_set_cc_op(s->cc_op);
5785
        gen_op_clc();
5786
        s->cc_op = CC_OP_EFLAGS;
5787
        break;
5788
    case 0xf9: /* stc */
5789
        if (s->cc_op != CC_OP_DYNAMIC)
5790
            gen_op_set_cc_op(s->cc_op);
5791
        gen_op_stc();
5792
        s->cc_op = CC_OP_EFLAGS;
5793
        break;
5794
    case 0xfc: /* cld */
5795
        tcg_gen_movi_i32(cpu_tmp2_i32, 1);
5796
        tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5797
        break;
5798
    case 0xfd: /* std */
5799
        tcg_gen_movi_i32(cpu_tmp2_i32, -1);
5800
        tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5801
        break;
5802

    
5803
        /************************/
5804
        /* bit operations */
5805
    case 0x1ba: /* bt/bts/btr/btc Gv, im */
5806
        ot = dflag + OT_WORD;
5807
        modrm = ldub_code(s->pc++);
5808
        op = (modrm >> 3) & 7;
5809
        mod = (modrm >> 6) & 3;
5810
        rm = (modrm & 7) | REX_B(s);
5811
        if (mod != 3) {
5812
            s->rip_offset = 1;
5813
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5814
            gen_op_ld_T0_A0(ot + s->mem_index);
5815
        } else {
5816
            gen_op_mov_TN_reg(ot, 0, rm);
5817
        }
5818
        /* load shift */
5819
        val = ldub_code(s->pc++);
5820
        gen_op_movl_T1_im(val);
5821
        if (op < 4)
5822
            goto illegal_op;
5823
        op -= 4;
5824
        goto bt_op;
5825
    case 0x1a3: /* bt Gv, Ev */
5826
        op = 0;
5827
        goto do_btx;
5828
    case 0x1ab: /* bts */
5829
        op = 1;
5830
        goto do_btx;
5831
    case 0x1b3: /* btr */
5832
        op = 2;
5833
        goto do_btx;
5834
    case 0x1bb: /* btc */
5835
        op = 3;
5836
    do_btx:
5837
        ot = dflag + OT_WORD;
5838
        modrm = ldub_code(s->pc++);
5839
        reg = ((modrm >> 3) & 7) | rex_r;
5840
        mod = (modrm >> 6) & 3;
5841
        rm = (modrm & 7) | REX_B(s);
5842
        gen_op_mov_TN_reg(OT_LONG, 1, reg);
5843
        if (mod != 3) {
5844
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5845
            /* specific case: we need to add a displacement */
5846
            gen_exts(ot, cpu_T[1]);
5847
            tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
5848
            tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
5849
            tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
5850
            gen_op_ld_T0_A0(ot + s->mem_index);
5851
        } else {
5852
            gen_op_mov_TN_reg(ot, 0, rm);
5853
        }
5854
    bt_op:
5855
        tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
5856
        switch(op) {
5857
        case 0:
5858
            tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
5859
            tcg_gen_movi_tl(cpu_cc_dst, 0);
5860
            break;
5861
        case 1:
5862
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5863
            tcg_gen_movi_tl(cpu_tmp0, 1);
5864
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5865
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5866
            break;
5867
        case 2:
5868
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5869
            tcg_gen_movi_tl(cpu_tmp0, 1);
5870
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5871
            tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
5872
            tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5873
            break;
5874
        default:
5875
        case 3:
5876
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5877
            tcg_gen_movi_tl(cpu_tmp0, 1);
5878
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5879
            tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5880
            break;
5881
        }
5882
        s->cc_op = CC_OP_SARB + ot;
5883
        if (op != 0) {
5884
            if (mod != 3)
5885
                gen_op_st_T0_A0(ot + s->mem_index);
5886
            else
5887
                gen_op_mov_reg_T0(ot, rm);
5888
            tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
5889
            tcg_gen_movi_tl(cpu_cc_dst, 0);
5890
        }
5891
        break;
5892
    case 0x1bc: /* bsf */
5893
    case 0x1bd: /* bsr */
5894
        {
5895
            int label1;
5896
            ot = dflag + OT_WORD;
5897
            modrm = ldub_code(s->pc++);
5898
            reg = ((modrm >> 3) & 7) | rex_r;
5899
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5900
            gen_extu(ot, cpu_T[0]);
5901
            label1 = gen_new_label();
5902
            tcg_gen_movi_tl(cpu_cc_dst, 0);
5903
            tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), label1);
5904
            if (b & 1) {
5905
                tcg_gen_helper_1_1(helper_bsr, cpu_T[0], cpu_T[0]);
5906
            } else {
5907
                tcg_gen_helper_1_1(helper_bsf, cpu_T[0], cpu_T[0]);
5908
            }
5909
            gen_op_mov_reg_T0(ot, reg);
5910
            tcg_gen_movi_tl(cpu_cc_dst, 1);
5911
            gen_set_label(label1);
5912
            tcg_gen_discard_tl(cpu_cc_src);
5913
            s->cc_op = CC_OP_LOGICB + ot;
5914
        }
5915
        break;
5916
        /************************/
5917
        /* bcd */
5918
    case 0x27: /* daa */
5919
        if (CODE64(s))
5920
            goto illegal_op;
5921
        if (s->cc_op != CC_OP_DYNAMIC)
5922
            gen_op_set_cc_op(s->cc_op);
5923
        tcg_gen_helper_0_0(helper_daa);
5924
        s->cc_op = CC_OP_EFLAGS;
5925
        break;
5926
    case 0x2f: /* das */
5927
        if (CODE64(s))
5928
            goto illegal_op;
5929
        if (s->cc_op != CC_OP_DYNAMIC)
5930
            gen_op_set_cc_op(s->cc_op);
5931
        tcg_gen_helper_0_0(helper_das);
5932
        s->cc_op = CC_OP_EFLAGS;
5933
        break;
5934
    case 0x37: /* aaa */
5935
        if (CODE64(s))
5936
            goto illegal_op;
5937
        if (s->cc_op != CC_OP_DYNAMIC)
5938
            gen_op_set_cc_op(s->cc_op);
5939
        tcg_gen_helper_0_0(helper_aaa);
5940
        s->cc_op = CC_OP_EFLAGS;
5941
        break;
5942
    case 0x3f: /* aas */
5943
        if (CODE64(s))
5944
            goto illegal_op;
5945
        if (s->cc_op != CC_OP_DYNAMIC)
5946
            gen_op_set_cc_op(s->cc_op);
5947
        tcg_gen_helper_0_0(helper_aas);
5948
        s->cc_op = CC_OP_EFLAGS;
5949
        break;
5950
    case 0xd4: /* aam */
5951
        if (CODE64(s))
5952
            goto illegal_op;
5953
        val = ldub_code(s->pc++);
5954
        if (val == 0) {
5955
            gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5956
        } else {
5957
            tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
5958
            s->cc_op = CC_OP_LOGICB;
5959
        }
5960
        break;
5961
    case 0xd5: /* aad */
5962
        if (CODE64(s))
5963
            goto illegal_op;
5964
        val = ldub_code(s->pc++);
5965
        tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
5966
        s->cc_op = CC_OP_LOGICB;
5967
        break;
5968
        /************************/
5969
        /* misc */
5970
    case 0x90: /* nop */
5971
        /* XXX: xchg + rex handling */
5972
        /* XXX: correct lock test for all insn */
5973
        if (prefixes & PREFIX_LOCK)
5974
            goto illegal_op;
5975
        if (prefixes & PREFIX_REPZ) {
5976
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
5977
        }
5978
        break;
5979
    case 0x9b: /* fwait */
5980
        if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5981
            (HF_MP_MASK | HF_TS_MASK)) {
5982
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5983
        } else {
5984
            if (s->cc_op != CC_OP_DYNAMIC)
5985
                gen_op_set_cc_op(s->cc_op);
5986
            gen_jmp_im(pc_start - s->cs_base);
5987
            tcg_gen_helper_0_0(helper_fwait);
5988
        }
5989
        break;
5990
    case 0xcc: /* int3 */
5991
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5992
            break;
5993
        gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5994
        break;
5995
    case 0xcd: /* int N */
5996
        val = ldub_code(s->pc++);
5997
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5998
            break;
5999
        if (s->vm86 && s->iopl != 3) {
6000
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6001
        } else {
6002
            gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
6003
        }
6004
        break;
6005
    case 0xce: /* into */
6006
        if (CODE64(s))
6007
            goto illegal_op;
6008
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
6009
            break;
6010
        if (s->cc_op != CC_OP_DYNAMIC)
6011
            gen_op_set_cc_op(s->cc_op);
6012
        gen_jmp_im(pc_start - s->cs_base);
6013
        tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
6014
        break;
6015
    case 0xf1: /* icebp (undocumented, exits to external debugger) */
6016
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
6017
            break;
6018
#if 1
6019
        gen_debug(s, pc_start - s->cs_base);
6020
#else
6021
        /* start debug */
6022
        tb_flush(cpu_single_env);
6023
        cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
6024
#endif
6025
        break;
6026
    case 0xfa: /* cli */
6027
        if (!s->vm86) {
6028
            if (s->cpl <= s->iopl) {
6029
                tcg_gen_helper_0_0(helper_cli);
6030
            } else {
6031
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6032
            }
6033
        } else {
6034
            if (s->iopl == 3) {
6035
                tcg_gen_helper_0_0(helper_cli);
6036
            } else {
6037
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6038
            }
6039
        }
6040
        break;
6041
    case 0xfb: /* sti */
6042
        if (!s->vm86) {
6043
            if (s->cpl <= s->iopl) {
6044
            gen_sti:
6045
                tcg_gen_helper_0_0(helper_sti);
6046
                /* interruptions are enabled only the first insn after sti */
6047
                /* If several instructions disable interrupts, only the
6048
                   _first_ does it */
6049
                if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
6050
                    tcg_gen_helper_0_0(helper_set_inhibit_irq);
6051
                /* give a chance to handle pending irqs */
6052
                gen_jmp_im(s->pc - s->cs_base);
6053
                gen_eob(s);
6054
            } else {
6055
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6056
            }
6057
        } else {
6058
            if (s->iopl == 3) {
6059
                goto gen_sti;
6060
            } else {
6061
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6062
            }
6063
        }
6064
        break;
6065
    case 0x62: /* bound */
6066
        if (CODE64(s))
6067
            goto illegal_op;
6068
        ot = dflag ? OT_LONG : OT_WORD;
6069
        modrm = ldub_code(s->pc++);
6070
        reg = (modrm >> 3) & 7;
6071
        mod = (modrm >> 6) & 3;
6072
        if (mod == 3)
6073
            goto illegal_op;
6074
        gen_op_mov_TN_reg(ot, 0, reg);
6075
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6076
        gen_jmp_im(pc_start - s->cs_base);
6077
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6078
        if (ot == OT_WORD)
6079
            tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
6080
        else
6081
            tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
6082
        break;
6083
    case 0x1c8 ... 0x1cf: /* bswap reg */
6084
        reg = (b & 7) | REX_B(s);
6085
#ifdef TARGET_X86_64
6086
        if (dflag == 2) {
6087
            gen_op_mov_TN_reg(OT_QUAD, 0, reg);
6088
            tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
6089
            gen_op_mov_reg_T0(OT_QUAD, reg);
6090
        } else
6091
        {
6092
            TCGv tmp0;
6093
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
6094
            
6095
            tmp0 = tcg_temp_new(TCG_TYPE_I32);
6096
            tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
6097
            tcg_gen_bswap_i32(tmp0, tmp0);
6098
            tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
6099
            gen_op_mov_reg_T0(OT_LONG, reg);
6100
        }
6101
#else
6102
        {
6103
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
6104
            tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
6105
            gen_op_mov_reg_T0(OT_LONG, reg);
6106
        }
6107
#endif
6108
        break;
6109
    case 0xd6: /* salc */
6110
        if (CODE64(s))
6111
            goto illegal_op;
6112
        if (s->cc_op != CC_OP_DYNAMIC)
6113
            gen_op_set_cc_op(s->cc_op);
6114
        gen_op_salc();
6115
        break;
6116
    case 0xe0: /* loopnz */
6117
    case 0xe1: /* loopz */
6118
    case 0xe2: /* loop */
6119
    case 0xe3: /* jecxz */
6120
        {
6121
            int l1, l2, l3;
6122

    
6123
            tval = (int8_t)insn_get(s, OT_BYTE);
6124
            next_eip = s->pc - s->cs_base;
6125
            tval += next_eip;
6126
            if (s->dflag == 0)
6127
                tval &= 0xffff;
6128

    
6129
            l1 = gen_new_label();
6130
            l2 = gen_new_label();
6131
            l3 = gen_new_label();
6132
            b &= 3;
6133
            switch(b) {
6134
            case 0: /* loopnz */
6135
            case 1: /* loopz */
6136
                if (s->cc_op != CC_OP_DYNAMIC)
6137
                    gen_op_set_cc_op(s->cc_op);
6138
                gen_op_add_reg_im(s->aflag, R_ECX, -1);
6139
                gen_op_jz_ecx(s->aflag, l3);
6140
                gen_compute_eflags(cpu_tmp0);
6141
                tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
6142
                if (b == 0) {
6143
                    tcg_gen_brcond_tl(TCG_COND_EQ, 
6144
                                      cpu_tmp0, tcg_const_tl(0), l1);
6145
                } else {
6146
                    tcg_gen_brcond_tl(TCG_COND_NE, 
6147
                                      cpu_tmp0, tcg_const_tl(0), l1);
6148
                }
6149
                break;
6150
            case 2: /* loop */
6151
                gen_op_add_reg_im(s->aflag, R_ECX, -1);
6152
                gen_op_jnz_ecx(s->aflag, l1);
6153
                break;
6154
            default:
6155
            case 3: /* jcxz */
6156
                gen_op_jz_ecx(s->aflag, l1);
6157
                break;
6158
            }
6159

    
6160
            gen_set_label(l3);
6161
            gen_jmp_im(next_eip);
6162
            gen_op_jmp_label(l2);
6163

    
6164
            gen_set_label(l1);
6165
            gen_jmp_im(tval);
6166
            gen_set_label(l2);
6167
            gen_eob(s);
6168
        }
6169
        break;
6170
    case 0x130: /* wrmsr */
6171
    case 0x132: /* rdmsr */
6172
        if (s->cpl != 0) {
6173
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6174
        } else {
6175
            int retval = 0;
6176
            if (b & 2) {
6177
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
6178
                tcg_gen_helper_0_0(helper_rdmsr);
6179
            } else {
6180
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
6181
                tcg_gen_helper_0_0(helper_wrmsr);
6182
            }
6183
            if(retval)
6184
                gen_eob(s);
6185
        }
6186
        break;
6187
    case 0x131: /* rdtsc */
6188
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
6189
            break;
6190
        gen_jmp_im(pc_start - s->cs_base);
6191
        tcg_gen_helper_0_0(helper_rdtsc);
6192
        break;
6193
    case 0x133: /* rdpmc */
6194
        gen_jmp_im(pc_start - s->cs_base);
6195
        tcg_gen_helper_0_0(helper_rdpmc);
6196
        break;
6197
    case 0x134: /* sysenter */
6198
        if (CODE64(s))
6199
            goto illegal_op;
6200
        if (!s->pe) {
6201
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6202
        } else {
6203
            if (s->cc_op != CC_OP_DYNAMIC) {
6204
                gen_op_set_cc_op(s->cc_op);
6205
                s->cc_op = CC_OP_DYNAMIC;
6206
            }
6207
            gen_jmp_im(pc_start - s->cs_base);
6208
            tcg_gen_helper_0_0(helper_sysenter);
6209
            gen_eob(s);
6210
        }
6211
        break;
6212
    case 0x135: /* sysexit */
6213
        if (CODE64(s))
6214
            goto illegal_op;
6215
        if (!s->pe) {
6216
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6217
        } else {
6218
            if (s->cc_op != CC_OP_DYNAMIC) {
6219
                gen_op_set_cc_op(s->cc_op);
6220
                s->cc_op = CC_OP_DYNAMIC;
6221
            }
6222
            gen_jmp_im(pc_start - s->cs_base);
6223
            tcg_gen_helper_0_0(helper_sysexit);
6224
            gen_eob(s);
6225
        }
6226
        break;
6227
#ifdef TARGET_X86_64
6228
    case 0x105: /* syscall */
6229
        /* XXX: is it usable in real mode ? */
6230
        if (s->cc_op != CC_OP_DYNAMIC) {
6231
            gen_op_set_cc_op(s->cc_op);
6232
            s->cc_op = CC_OP_DYNAMIC;
6233
        }
6234
        gen_jmp_im(pc_start - s->cs_base);
6235
        tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
6236
        gen_eob(s);
6237
        break;
6238
    case 0x107: /* sysret */
6239
        if (!s->pe) {
6240
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6241
        } else {
6242
            if (s->cc_op != CC_OP_DYNAMIC) {
6243
                gen_op_set_cc_op(s->cc_op);
6244
                s->cc_op = CC_OP_DYNAMIC;
6245
            }
6246
            gen_jmp_im(pc_start - s->cs_base);
6247
            tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
6248
            /* condition codes are modified only in long mode */
6249
            if (s->lma)
6250
                s->cc_op = CC_OP_EFLAGS;
6251
            gen_eob(s);
6252
        }
6253
        break;
6254
#endif
6255
    case 0x1a2: /* cpuid */
6256
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
6257
            break;
6258
        tcg_gen_helper_0_0(helper_cpuid);
6259
        break;
6260
    case 0xf4: /* hlt */
6261
        if (s->cpl != 0) {
6262
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6263
        } else {
6264
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
6265
                break;
6266
            if (s->cc_op != CC_OP_DYNAMIC)
6267
                gen_op_set_cc_op(s->cc_op);
6268
            gen_jmp_im(s->pc - s->cs_base);
6269
            tcg_gen_helper_0_0(helper_hlt);
6270
            s->is_jmp = 3;
6271
        }
6272
        break;
6273
    case 0x100:
6274
        modrm = ldub_code(s->pc++);
6275
        mod = (modrm >> 6) & 3;
6276
        op = (modrm >> 3) & 7;
6277
        switch(op) {
6278
        case 0: /* sldt */
6279
            if (!s->pe || s->vm86)
6280
                goto illegal_op;
6281
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
6282
                break;
6283
            gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
6284
            ot = OT_WORD;
6285
            if (mod == 3)
6286
                ot += s->dflag;
6287
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6288
            break;
6289
        case 2: /* lldt */
6290
            if (!s->pe || s->vm86)
6291
                goto illegal_op;
6292
            if (s->cpl != 0) {
6293
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6294
            } else {
6295
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
6296
                    break;
6297
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6298
                gen_jmp_im(pc_start - s->cs_base);
6299
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6300
                tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
6301
            }
6302
            break;
6303
        case 1: /* str */
6304
            if (!s->pe || s->vm86)
6305
                goto illegal_op;
6306
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
6307
                break;
6308
            gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
6309
            ot = OT_WORD;
6310
            if (mod == 3)
6311
                ot += s->dflag;
6312
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6313
            break;
6314
        case 3: /* ltr */
6315
            if (!s->pe || s->vm86)
6316
                goto illegal_op;
6317
            if (s->cpl != 0) {
6318
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6319
            } else {
6320
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
6321
                    break;
6322
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6323
                gen_jmp_im(pc_start - s->cs_base);
6324
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6325
                tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
6326
            }
6327
            break;
6328
        case 4: /* verr */
6329
        case 5: /* verw */
6330
            if (!s->pe || s->vm86)
6331
                goto illegal_op;
6332
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6333
            if (s->cc_op != CC_OP_DYNAMIC)
6334
                gen_op_set_cc_op(s->cc_op);
6335
            if (op == 4)
6336
                gen_op_verr();
6337
            else
6338
                gen_op_verw();
6339
            s->cc_op = CC_OP_EFLAGS;
6340
            break;
6341
        default:
6342
            goto illegal_op;
6343
        }
6344
        break;
6345
    case 0x101:
6346
        modrm = ldub_code(s->pc++);
6347
        mod = (modrm >> 6) & 3;
6348
        op = (modrm >> 3) & 7;
6349
        rm = modrm & 7;
6350
        switch(op) {
6351
        case 0: /* sgdt */
6352
            if (mod == 3)
6353
                goto illegal_op;
6354
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
6355
                break;
6356
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6357
            gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
6358
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
6359
            gen_add_A0_im(s, 2);
6360
            gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
6361
            if (!s->dflag)
6362
                gen_op_andl_T0_im(0xffffff);
6363
            gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6364
            break;
6365
        case 1:
6366
            if (mod == 3) {
6367
                switch (rm) {
6368
                case 0: /* monitor */
6369
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6370
                        s->cpl != 0)
6371
                        goto illegal_op;
6372
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
6373
                        break;
6374
                    gen_jmp_im(pc_start - s->cs_base);
6375
#ifdef TARGET_X86_64
6376
                    if (s->aflag == 2) {
6377
                        gen_op_movq_A0_reg(R_EAX);
6378
                    } else
6379
#endif
6380
                    {
6381
                        gen_op_movl_A0_reg(R_EAX);
6382
                        if (s->aflag == 0)
6383
                            gen_op_andl_A0_ffff();
6384
                    }
6385
                    gen_add_A0_ds_seg(s);
6386
                    tcg_gen_helper_0_1(helper_monitor, cpu_A0);
6387
                    break;
6388
                case 1: /* mwait */
6389
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6390
                        s->cpl != 0)
6391
                        goto illegal_op;
6392
                    if (s->cc_op != CC_OP_DYNAMIC) {
6393
                        gen_op_set_cc_op(s->cc_op);
6394
                        s->cc_op = CC_OP_DYNAMIC;
6395
                    }
6396
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
6397
                        break;
6398
                    gen_jmp_im(s->pc - s->cs_base);
6399
                    tcg_gen_helper_0_0(helper_mwait);
6400
                    gen_eob(s);
6401
                    break;
6402
                default:
6403
                    goto illegal_op;
6404
                }
6405
            } else { /* sidt */
6406
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
6407
                    break;
6408
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6409
                gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
6410
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
6411
                gen_add_A0_im(s, 2);
6412
                gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
6413
                if (!s->dflag)
6414
                    gen_op_andl_T0_im(0xffffff);
6415
                gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6416
            }
6417
            break;
6418
        case 2: /* lgdt */
6419
        case 3: /* lidt */
6420
            if (mod == 3) {
6421
                switch(rm) {
6422
                case 0: /* VMRUN */
6423
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
6424
                        break;
6425
                    if (s->cc_op != CC_OP_DYNAMIC)
6426
                        gen_op_set_cc_op(s->cc_op);
6427
                    gen_jmp_im(s->pc - s->cs_base);
6428
                    tcg_gen_helper_0_0(helper_vmrun);
6429
                    s->cc_op = CC_OP_EFLAGS;
6430
                    gen_eob(s);
6431
                    break;
6432
                case 1: /* VMMCALL */
6433
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
6434
                         break;
6435
                    /* FIXME: cause #UD if hflags & SVM */
6436
                    tcg_gen_helper_0_0(helper_vmmcall);
6437
                    break;
6438
                case 2: /* VMLOAD */
6439
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
6440
                         break;
6441
                    tcg_gen_helper_0_0(helper_vmload);
6442
                    break;
6443
                case 3: /* VMSAVE */
6444
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
6445
                         break;
6446
                    tcg_gen_helper_0_0(helper_vmsave);
6447
                    break;
6448
                case 4: /* STGI */
6449
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
6450
                         break;
6451
                    tcg_gen_helper_0_0(helper_stgi);
6452
                    break;
6453
                case 5: /* CLGI */
6454
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
6455
                         break;
6456
                    tcg_gen_helper_0_0(helper_clgi);
6457
                    break;
6458
                case 6: /* SKINIT */
6459
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
6460
                         break;
6461
                    tcg_gen_helper_0_0(helper_skinit);
6462
                    break;
6463
                case 7: /* INVLPGA */
6464
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
6465
                         break;
6466
                    tcg_gen_helper_0_0(helper_invlpga);
6467
                    break;
6468
                default:
6469
                    goto illegal_op;
6470
                }
6471
            } else if (s->cpl != 0) {
6472
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6473
            } else {
6474
                if (gen_svm_check_intercept(s, pc_start,
6475
                                            op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
6476
                    break;
6477
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6478
                gen_op_ld_T1_A0(OT_WORD + s->mem_index);
6479
                gen_add_A0_im(s, 2);
6480
                gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6481
                if (!s->dflag)
6482
                    gen_op_andl_T0_im(0xffffff);
6483
                if (op == 2) {
6484
                    gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
6485
                    gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
6486
                } else {
6487
                    gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
6488
                    gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
6489
                }
6490
            }
6491
            break;
6492
        case 4: /* smsw */
6493
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
6494
                break;
6495
            gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
6496
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6497
            break;
6498
        case 6: /* lmsw */
6499
            if (s->cpl != 0) {
6500
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6501
            } else {
6502
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
6503
                    break;
6504
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6505
                tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
6506
                gen_jmp_im(s->pc - s->cs_base);
6507
                gen_eob(s);
6508
            }
6509
            break;
6510
        case 7: /* invlpg */
6511
            if (s->cpl != 0) {
6512
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6513
            } else {
6514
                if (mod == 3) {
6515
#ifdef TARGET_X86_64
6516
                    if (CODE64(s) && rm == 0) {
6517
                        /* swapgs */
6518
                        gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
6519
                        gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
6520
                        gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
6521
                        gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
6522
                    } else
6523
#endif
6524
                    {
6525
                        goto illegal_op;
6526
                    }
6527
                } else {
6528
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
6529
                        break;
6530
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6531
                    tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
6532
                    gen_jmp_im(s->pc - s->cs_base);
6533
                    gen_eob(s);
6534
                }
6535
            }
6536
            break;
6537
        default:
6538
            goto illegal_op;
6539
        }
6540
        break;
6541
    case 0x108: /* invd */
6542
    case 0x109: /* wbinvd */
6543
        if (s->cpl != 0) {
6544
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6545
        } else {
6546
            if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6547
                break;
6548
            /* nothing to do */
6549
        }
6550
        break;
6551
    case 0x63: /* arpl or movslS (x86_64) */
6552
#ifdef TARGET_X86_64
6553
        if (CODE64(s)) {
6554
            int d_ot;
6555
            /* d_ot is the size of destination */
6556
            d_ot = dflag + OT_WORD;
6557

    
6558
            modrm = ldub_code(s->pc++);
6559
            reg = ((modrm >> 3) & 7) | rex_r;
6560
            mod = (modrm >> 6) & 3;
6561
            rm = (modrm & 7) | REX_B(s);
6562

    
6563
            if (mod == 3) {
6564
                gen_op_mov_TN_reg(OT_LONG, 0, rm);
6565
                /* sign extend */
6566
                if (d_ot == OT_QUAD)
6567
                    tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
6568
                gen_op_mov_reg_T0(d_ot, reg);
6569
            } else {
6570
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6571
                if (d_ot == OT_QUAD) {
6572
                    gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6573
                } else {
6574
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6575
                }
6576
                gen_op_mov_reg_T0(d_ot, reg);
6577
            }
6578
        } else
6579
#endif
6580
        {
6581
            if (!s->pe || s->vm86)
6582
                goto illegal_op;
6583
            ot = dflag ? OT_LONG : OT_WORD;
6584
            modrm = ldub_code(s->pc++);
6585
            reg = (modrm >> 3) & 7;
6586
            mod = (modrm >> 6) & 3;
6587
            rm = modrm & 7;
6588
            if (mod != 3) {
6589
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6590
                gen_op_ld_T0_A0(ot + s->mem_index);
6591
            } else {
6592
                gen_op_mov_TN_reg(ot, 0, rm);
6593
            }
6594
            gen_op_mov_TN_reg(ot, 1, reg);
6595
            if (s->cc_op != CC_OP_DYNAMIC)
6596
                gen_op_set_cc_op(s->cc_op);
6597
            gen_op_arpl();
6598
            s->cc_op = CC_OP_EFLAGS;
6599
            if (mod != 3) {
6600
                gen_op_st_T0_A0(ot + s->mem_index);
6601
            } else {
6602
                gen_op_mov_reg_T0(ot, rm);
6603
            }
6604
            gen_op_arpl_update();
6605
        }
6606
        break;
6607
    case 0x102: /* lar */
6608
    case 0x103: /* lsl */
6609
        if (!s->pe || s->vm86)
6610
            goto illegal_op;
6611
        ot = dflag ? OT_LONG : OT_WORD;
6612
        modrm = ldub_code(s->pc++);
6613
        reg = ((modrm >> 3) & 7) | rex_r;
6614
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6615
        gen_op_mov_TN_reg(ot, 1, reg);
6616
        if (s->cc_op != CC_OP_DYNAMIC)
6617
            gen_op_set_cc_op(s->cc_op);
6618
        if (b == 0x102)
6619
            gen_op_lar();
6620
        else
6621
            gen_op_lsl();
6622
        s->cc_op = CC_OP_EFLAGS;
6623
        gen_op_mov_reg_T1(ot, reg);
6624
        break;
6625
    case 0x118:
6626
        modrm = ldub_code(s->pc++);
6627
        mod = (modrm >> 6) & 3;
6628
        op = (modrm >> 3) & 7;
6629
        switch(op) {
6630
        case 0: /* prefetchnta */
6631
        case 1: /* prefetchnt0 */
6632
        case 2: /* prefetchnt0 */
6633
        case 3: /* prefetchnt0 */
6634
            if (mod == 3)
6635
                goto illegal_op;
6636
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6637
            /* nothing more to do */
6638
            break;
6639
        default: /* nop (multi byte) */
6640
            gen_nop_modrm(s, modrm);
6641
            break;
6642
        }
6643
        break;
6644
    case 0x119 ... 0x11f: /* nop (multi byte) */
6645
        modrm = ldub_code(s->pc++);
6646
        gen_nop_modrm(s, modrm);
6647
        break;
6648
    case 0x120: /* mov reg, crN */
6649
    case 0x122: /* mov crN, reg */
6650
        if (s->cpl != 0) {
6651
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6652
        } else {
6653
            modrm = ldub_code(s->pc++);
6654
            if ((modrm & 0xc0) != 0xc0)
6655
                goto illegal_op;
6656
            rm = (modrm & 7) | REX_B(s);
6657
            reg = ((modrm >> 3) & 7) | rex_r;
6658
            if (CODE64(s))
6659
                ot = OT_QUAD;
6660
            else
6661
                ot = OT_LONG;
6662
            switch(reg) {
6663
            case 0:
6664
            case 2:
6665
            case 3:
6666
            case 4:
6667
            case 8:
6668
                if (b & 2) {
6669
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6670
                    gen_op_mov_TN_reg(ot, 0, rm);
6671
                    tcg_gen_helper_0_2(helper_movl_crN_T0, 
6672
                                       tcg_const_i32(reg), cpu_T[0]);
6673
                    gen_jmp_im(s->pc - s->cs_base);
6674
                    gen_eob(s);
6675
                } else {
6676
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6677
#if !defined(CONFIG_USER_ONLY)
6678
                    if (reg == 8)
6679
                        tcg_gen_helper_1_0(helper_movtl_T0_cr8, cpu_T[0]);
6680
                    else
6681
#endif
6682
                        gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6683
                    gen_op_mov_reg_T0(ot, rm);
6684
                }
6685
                break;
6686
            default:
6687
                goto illegal_op;
6688
            }
6689
        }
6690
        break;
6691
    case 0x121: /* mov reg, drN */
6692
    case 0x123: /* mov drN, reg */
6693
        if (s->cpl != 0) {
6694
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6695
        } else {
6696
            modrm = ldub_code(s->pc++);
6697
            if ((modrm & 0xc0) != 0xc0)
6698
                goto illegal_op;
6699
            rm = (modrm & 7) | REX_B(s);
6700
            reg = ((modrm >> 3) & 7) | rex_r;
6701
            if (CODE64(s))
6702
                ot = OT_QUAD;
6703
            else
6704
                ot = OT_LONG;
6705
            /* XXX: do it dynamically with CR4.DE bit */
6706
            if (reg == 4 || reg == 5 || reg >= 8)
6707
                goto illegal_op;
6708
            if (b & 2) {
6709
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6710
                gen_op_mov_TN_reg(ot, 0, rm);
6711
                tcg_gen_helper_0_2(helper_movl_drN_T0,
6712
                                   tcg_const_i32(reg), cpu_T[0]);
6713
                gen_jmp_im(s->pc - s->cs_base);
6714
                gen_eob(s);
6715
            } else {
6716
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6717
                gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6718
                gen_op_mov_reg_T0(ot, rm);
6719
            }
6720
        }
6721
        break;
6722
    case 0x106: /* clts */
6723
        if (s->cpl != 0) {
6724
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6725
        } else {
6726
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6727
            tcg_gen_helper_0_0(helper_clts);
6728
            /* abort block because static cpu state changed */
6729
            gen_jmp_im(s->pc - s->cs_base);
6730
            gen_eob(s);
6731
        }
6732
        break;
6733
    /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6734
    case 0x1c3: /* MOVNTI reg, mem */
6735
        if (!(s->cpuid_features & CPUID_SSE2))
6736
            goto illegal_op;
6737
        ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6738
        modrm = ldub_code(s->pc++);
6739
        mod = (modrm >> 6) & 3;
6740
        if (mod == 3)
6741
            goto illegal_op;
6742
        reg = ((modrm >> 3) & 7) | rex_r;
6743
        /* generate a generic store */
6744
        gen_ldst_modrm(s, modrm, ot, reg, 1);
6745
        break;
6746
    case 0x1ae:
6747
        modrm = ldub_code(s->pc++);
6748
        mod = (modrm >> 6) & 3;
6749
        op = (modrm >> 3) & 7;
6750
        switch(op) {
6751
        case 0: /* fxsave */
6752
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6753
                (s->flags & HF_EM_MASK))
6754
                goto illegal_op;
6755
            if (s->flags & HF_TS_MASK) {
6756
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6757
                break;
6758
            }
6759
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6760
            if (s->cc_op != CC_OP_DYNAMIC)
6761
                gen_op_set_cc_op(s->cc_op);
6762
            gen_jmp_im(pc_start - s->cs_base);
6763
            tcg_gen_helper_0_2(helper_fxsave, 
6764
                               cpu_A0, tcg_const_i32((s->dflag == 2)));
6765
            break;
6766
        case 1: /* fxrstor */
6767
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6768
                (s->flags & HF_EM_MASK))
6769
                goto illegal_op;
6770
            if (s->flags & HF_TS_MASK) {
6771
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6772
                break;
6773
            }
6774
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6775
            if (s->cc_op != CC_OP_DYNAMIC)
6776
                gen_op_set_cc_op(s->cc_op);
6777
            gen_jmp_im(pc_start - s->cs_base);
6778
            tcg_gen_helper_0_2(helper_fxrstor,
6779
                               cpu_A0, tcg_const_i32((s->dflag == 2)));
6780
            break;
6781
        case 2: /* ldmxcsr */
6782
        case 3: /* stmxcsr */
6783
            if (s->flags & HF_TS_MASK) {
6784
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6785
                break;
6786
            }
6787
            if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6788
                mod == 3)
6789
                goto illegal_op;
6790
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6791
            if (op == 2) {
6792
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6793
                gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6794
            } else {
6795
                gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6796
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
6797
            }
6798
            break;
6799
        case 5: /* lfence */
6800
        case 6: /* mfence */
6801
            if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6802
                goto illegal_op;
6803
            break;
6804
        case 7: /* sfence / clflush */
6805
            if ((modrm & 0xc7) == 0xc0) {
6806
                /* sfence */
6807
                /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6808
                if (!(s->cpuid_features & CPUID_SSE))
6809
                    goto illegal_op;
6810
            } else {
6811
                /* clflush */
6812
                if (!(s->cpuid_features & CPUID_CLFLUSH))
6813
                    goto illegal_op;
6814
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6815
            }
6816
            break;
6817
        default:
6818
            goto illegal_op;
6819
        }
6820
        break;
6821
    case 0x10d: /* 3DNow! prefetch(w) */
6822
        modrm = ldub_code(s->pc++);
6823
        mod = (modrm >> 6) & 3;
6824
        if (mod == 3)
6825
            goto illegal_op;
6826
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6827
        /* ignore for now */
6828
        break;
6829
    case 0x1aa: /* rsm */
6830
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6831
            break;
6832
        if (!(s->flags & HF_SMM_MASK))
6833
            goto illegal_op;
6834
        if (s->cc_op != CC_OP_DYNAMIC) {
6835
            gen_op_set_cc_op(s->cc_op);
6836
            s->cc_op = CC_OP_DYNAMIC;
6837
        }
6838
        gen_jmp_im(s->pc - s->cs_base);
6839
        tcg_gen_helper_0_0(helper_rsm);
6840
        gen_eob(s);
6841
        break;
6842
    case 0x10e ... 0x10f:
6843
        /* 3DNow! instructions, ignore prefixes */
6844
        s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
6845
    case 0x110 ... 0x117:
6846
    case 0x128 ... 0x12f:
6847
    case 0x150 ... 0x177:
6848
    case 0x17c ... 0x17f:
6849
    case 0x1c2:
6850
    case 0x1c4 ... 0x1c6:
6851
    case 0x1d0 ... 0x1fe:
6852
        gen_sse(s, b, pc_start, rex_r);
6853
        break;
6854
    default:
6855
        goto illegal_op;
6856
    }
6857
    /* lock generation */
6858
    if (s->prefix & PREFIX_LOCK)
6859
        tcg_gen_helper_0_0(helper_unlock);
6860
    return s->pc;
6861
 illegal_op:
6862
    if (s->prefix & PREFIX_LOCK)
6863
        tcg_gen_helper_0_0(helper_unlock);
6864
    /* XXX: ensure that no lock was generated */
6865
    gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6866
    return s->pc;
6867
}
6868

    
6869
static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
6870
{
6871
    switch(macro_id) {
6872
#ifdef MACRO_TEST
6873
    case MACRO_TEST:
6874
        tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
6875
        break;
6876
#endif
6877
    }
6878
}
6879

    
6880
void optimize_flags_init(void)
6881
{
6882
#if TCG_TARGET_REG_BITS == 32
6883
    assert(sizeof(CCTable) == (1 << 3));
6884
#else
6885
    assert(sizeof(CCTable) == (1 << 4));
6886
#endif
6887
    tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
6888

    
6889
    cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
6890
#if TARGET_LONG_BITS > HOST_LONG_BITS
6891
    cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL, 
6892
                                  TCG_AREG0, offsetof(CPUState, t0), "T0");
6893
    cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
6894
                                  TCG_AREG0, offsetof(CPUState, t1), "T1");
6895
    cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
6896
                                TCG_AREG0, offsetof(CPUState, t2), "A0");
6897
#else
6898
    cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
6899
    cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
6900
    cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
6901
#endif
6902
    cpu_T3 = tcg_global_mem_new(TCG_TYPE_TL,
6903
                                TCG_AREG0, offsetof(CPUState, t3), "T3");
6904
#if defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS)
6905
    /* XXX: must be suppressed once there are less fixed registers */
6906
    cpu_tmp1_i64 = tcg_global_reg2_new_hack(TCG_TYPE_I64, TCG_AREG1, TCG_AREG2, "tmp1");
6907
#endif
6908
    cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
6909
                                   TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
6910
    cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
6911
                                    TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
6912
    cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
6913
                                    TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
6914
}
6915

    
6916
/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6917
   basic block 'tb'. If search_pc is TRUE, also generate PC
6918
   information for each intermediate instruction. */
6919
static inline int gen_intermediate_code_internal(CPUState *env,
6920
                                                 TranslationBlock *tb,
6921
                                                 int search_pc)
6922
{
6923
    DisasContext dc1, *dc = &dc1;
6924
    target_ulong pc_ptr;
6925
    uint16_t *gen_opc_end;
6926
    int j, lj, cflags;
6927
    uint64_t flags;
6928
    target_ulong pc_start;
6929
    target_ulong cs_base;
6930

    
6931
    /* generate intermediate code */
6932
    pc_start = tb->pc;
6933
    cs_base = tb->cs_base;
6934
    flags = tb->flags;
6935
    cflags = tb->cflags;
6936

    
6937
    dc->pe = (flags >> HF_PE_SHIFT) & 1;
6938
    dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6939
    dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6940
    dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6941
    dc->f_st = 0;
6942
    dc->vm86 = (flags >> VM_SHIFT) & 1;
6943
    dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6944
    dc->iopl = (flags >> IOPL_SHIFT) & 3;
6945
    dc->tf = (flags >> TF_SHIFT) & 1;
6946
    dc->singlestep_enabled = env->singlestep_enabled;
6947
    dc->cc_op = CC_OP_DYNAMIC;
6948
    dc->cs_base = cs_base;
6949
    dc->tb = tb;
6950
    dc->popl_esp_hack = 0;
6951
    /* select memory access functions */
6952
    dc->mem_index = 0;
6953
    if (flags & HF_SOFTMMU_MASK) {
6954
        if (dc->cpl == 3)
6955
            dc->mem_index = 2 * 4;
6956
        else
6957
            dc->mem_index = 1 * 4;
6958
    }
6959
    dc->cpuid_features = env->cpuid_features;
6960
    dc->cpuid_ext_features = env->cpuid_ext_features;
6961
    dc->cpuid_ext2_features = env->cpuid_ext2_features;
6962
#ifdef TARGET_X86_64
6963
    dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6964
    dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6965
#endif
6966
    dc->flags = flags;
6967
    dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6968
                    (flags & HF_INHIBIT_IRQ_MASK)
6969
#ifndef CONFIG_SOFTMMU
6970
                    || (flags & HF_SOFTMMU_MASK)
6971
#endif
6972
                    );
6973
#if 0
6974
    /* check addseg logic */
6975
    if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6976
        printf("ERROR addseg\n");
6977
#endif
6978

    
6979
    cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
6980
#if !(defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS))
6981
    cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
6982
#endif
6983
    cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
6984
    cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
6985
    cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
6986
    cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
6987
    cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
6988
    cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
6989
    cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
6990

    
6991
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6992

    
6993
    dc->is_jmp = DISAS_NEXT;
6994
    pc_ptr = pc_start;
6995
    lj = -1;
6996

    
6997
    for(;;) {
6998
        if (env->nb_breakpoints > 0) {
6999
            for(j = 0; j < env->nb_breakpoints; j++) {
7000
                if (env->breakpoints[j] == pc_ptr) {
7001
                    gen_debug(dc, pc_ptr - dc->cs_base);
7002
                    break;
7003
                }
7004
            }
7005
        }
7006
        if (search_pc) {
7007
            j = gen_opc_ptr - gen_opc_buf;
7008
            if (lj < j) {
7009
                lj++;
7010
                while (lj < j)
7011
                    gen_opc_instr_start[lj++] = 0;
7012
            }
7013
            gen_opc_pc[lj] = pc_ptr;
7014
            gen_opc_cc_op[lj] = dc->cc_op;
7015
            gen_opc_instr_start[lj] = 1;
7016
        }
7017
        pc_ptr = disas_insn(dc, pc_ptr);
7018
        /* stop translation if indicated */
7019
        if (dc->is_jmp)
7020
            break;
7021
        /* if single step mode, we generate only one instruction and
7022
           generate an exception */
7023
        /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
7024
           the flag and abort the translation to give the irqs a
7025
           change to be happen */
7026
        if (dc->tf || dc->singlestep_enabled ||
7027
            (flags & HF_INHIBIT_IRQ_MASK) ||
7028
            (cflags & CF_SINGLE_INSN)) {
7029
            gen_jmp_im(pc_ptr - dc->cs_base);
7030
            gen_eob(dc);
7031
            break;
7032
        }
7033
        /* if too long translation, stop generation too */
7034
        if (gen_opc_ptr >= gen_opc_end ||
7035
            (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
7036
            gen_jmp_im(pc_ptr - dc->cs_base);
7037
            gen_eob(dc);
7038
            break;
7039
        }
7040
    }
7041
    *gen_opc_ptr = INDEX_op_end;
7042
    /* we don't forget to fill the last values */
7043
    if (search_pc) {
7044
        j = gen_opc_ptr - gen_opc_buf;
7045
        lj++;
7046
        while (lj <= j)
7047
            gen_opc_instr_start[lj++] = 0;
7048
    }
7049

    
7050
#ifdef DEBUG_DISAS
7051
    if (loglevel & CPU_LOG_TB_CPU) {
7052
        cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
7053
    }
7054
    if (loglevel & CPU_LOG_TB_IN_ASM) {
7055
        int disas_flags;
7056
        fprintf(logfile, "----------------\n");
7057
        fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
7058
#ifdef TARGET_X86_64
7059
        if (dc->code64)
7060
            disas_flags = 2;
7061
        else
7062
#endif
7063
            disas_flags = !dc->code32;
7064
        target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
7065
        fprintf(logfile, "\n");
7066
        if (loglevel & CPU_LOG_TB_OP_OPT) {
7067
            fprintf(logfile, "OP before opt:\n");
7068
            tcg_dump_ops(&tcg_ctx, logfile);
7069
            fprintf(logfile, "\n");
7070
        }
7071
    }
7072
#endif
7073

    
7074
    if (!search_pc)
7075
        tb->size = pc_ptr - pc_start;
7076
    return 0;
7077
}
7078

    
7079
int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
7080
{
7081
    return gen_intermediate_code_internal(env, tb, 0);
7082
}
7083

    
7084
int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
7085
{
7086
    return gen_intermediate_code_internal(env, tb, 1);
7087
}
7088

    
7089
void gen_pc_load(CPUState *env, TranslationBlock *tb,
7090
                unsigned long searched_pc, int pc_pos, void *puc)
7091
{
7092
    int cc_op;
7093
#ifdef DEBUG_DISAS
7094
    if (loglevel & CPU_LOG_TB_OP) {
7095
        int i;
7096
        fprintf(logfile, "RESTORE:\n");
7097
        for(i = 0;i <= pc_pos; i++) {
7098
            if (gen_opc_instr_start[i]) {
7099
                fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7100
            }
7101
        }
7102
        fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7103
                searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7104
                (uint32_t)tb->cs_base);
7105
    }
7106
#endif
7107
    env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7108
    cc_op = gen_opc_cc_op[pc_pos];
7109
    if (cc_op != CC_OP_DYNAMIC)
7110
        env->cc_op = cc_op;
7111
}