Statistics
| Branch: | Revision:

root / target-i386 / translate.c @ 07be379f

History | View | Annotate | Download (227.9 kB)

1
/*
2
 *  i386 translation
3
 *
4
 *  Copyright (c) 2003 Fabrice Bellard
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20
#include <stdarg.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23
#include <string.h>
24
#include <inttypes.h>
25
#include <signal.h>
26
#include <assert.h>
27

    
28
#include "cpu.h"
29
#include "exec-all.h"
30
#include "disas.h"
31
#include "helper.h"
32
#include "tcg-op.h"
33

    
34
#define PREFIX_REPZ   0x01
35
#define PREFIX_REPNZ  0x02
36
#define PREFIX_LOCK   0x04
37
#define PREFIX_DATA   0x08
38
#define PREFIX_ADR    0x10
39

    
40
#ifdef TARGET_X86_64
41
#define X86_64_ONLY(x) x
42
#define X86_64_DEF(x...) x
43
#define CODE64(s) ((s)->code64)
44
#define REX_X(s) ((s)->rex_x)
45
#define REX_B(s) ((s)->rex_b)
46
/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
47
#if 1
48
#define BUGGY_64(x) NULL
49
#endif
50
#else
51
#define X86_64_ONLY(x) NULL
52
#define X86_64_DEF(x...)
53
#define CODE64(s) 0
54
#define REX_X(s) 0
55
#define REX_B(s) 0
56
#endif
57

    
58
//#define MACRO_TEST   1
59

    
60
/* global register indexes */
61
static TCGv cpu_env, cpu_T[2], cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst;
62
static TCGv cpu_T3;
63
/* local register indexes (only used inside old micro ops) */
64
static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
65
static TCGv cpu_tmp5, cpu_tmp6;
66

    
67
#ifdef TARGET_X86_64
68
static int x86_64_hregs;
69
#endif
70

    
71
typedef struct DisasContext {
72
    /* current insn context */
73
    int override; /* -1 if no override */
74
    int prefix;
75
    int aflag, dflag;
76
    target_ulong pc; /* pc = eip + cs_base */
77
    int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
78
                   static state change (stop translation) */
79
    /* current block context */
80
    target_ulong cs_base; /* base of CS segment */
81
    int pe;     /* protected mode */
82
    int code32; /* 32 bit code segment */
83
#ifdef TARGET_X86_64
84
    int lma;    /* long mode active */
85
    int code64; /* 64 bit code segment */
86
    int rex_x, rex_b;
87
#endif
88
    int ss32;   /* 32 bit stack segment */
89
    int cc_op;  /* current CC operation */
90
    int addseg; /* non zero if either DS/ES/SS have a non zero base */
91
    int f_st;   /* currently unused */
92
    int vm86;   /* vm86 mode */
93
    int cpl;
94
    int iopl;
95
    int tf;     /* TF cpu flag */
96
    int singlestep_enabled; /* "hardware" single step enabled */
97
    int jmp_opt; /* use direct block chaining for direct jumps */
98
    int mem_index; /* select memory access functions */
99
    uint64_t flags; /* all execution flags */
100
    struct TranslationBlock *tb;
101
    int popl_esp_hack; /* for correct popl with esp base handling */
102
    int rip_offset; /* only used in x86_64, but left for simplicity */
103
    int cpuid_features;
104
    int cpuid_ext_features;
105
    int cpuid_ext2_features;
106
} DisasContext;
107

    
108
static void gen_eob(DisasContext *s);
109
static void gen_jmp(DisasContext *s, target_ulong eip);
110
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
111

    
112
/* i386 arith/logic operations */
113
enum {
114
    OP_ADDL,
115
    OP_ORL,
116
    OP_ADCL,
117
    OP_SBBL,
118
    OP_ANDL,
119
    OP_SUBL,
120
    OP_XORL,
121
    OP_CMPL,
122
};
123

    
124
/* i386 shift ops */
125
enum {
126
    OP_ROL,
127
    OP_ROR,
128
    OP_RCL,
129
    OP_RCR,
130
    OP_SHL,
131
    OP_SHR,
132
    OP_SHL1, /* undocumented */
133
    OP_SAR = 7,
134
};
135

    
136
/* operand size */
137
enum {
138
    OT_BYTE = 0,
139
    OT_WORD,
140
    OT_LONG,
141
    OT_QUAD,
142
};
143

    
144
enum {
145
    /* I386 int registers */
146
    OR_EAX,   /* MUST be even numbered */
147
    OR_ECX,
148
    OR_EDX,
149
    OR_EBX,
150
    OR_ESP,
151
    OR_EBP,
152
    OR_ESI,
153
    OR_EDI,
154

    
155
    OR_TMP0 = 16,    /* temporary operand register */
156
    OR_TMP1,
157
    OR_A0, /* temporary register used when doing address evaluation */
158
};
159

    
160
static inline void gen_op_movl_T0_0(void)
161
{
162
    tcg_gen_movi_tl(cpu_T[0], 0);
163
}
164

    
165
static inline void gen_op_movl_T0_im(int32_t val)
166
{
167
    tcg_gen_movi_tl(cpu_T[0], val);
168
}
169

    
170
static inline void gen_op_movl_T0_imu(uint32_t val)
171
{
172
    tcg_gen_movi_tl(cpu_T[0], val);
173
}
174

    
175
static inline void gen_op_movl_T1_im(int32_t val)
176
{
177
    tcg_gen_movi_tl(cpu_T[1], val);
178
}
179

    
180
static inline void gen_op_movl_T1_imu(uint32_t val)
181
{
182
    tcg_gen_movi_tl(cpu_T[1], val);
183
}
184

    
185
static inline void gen_op_movl_A0_im(uint32_t val)
186
{
187
    tcg_gen_movi_tl(cpu_A0, val);
188
}
189

    
190
#ifdef TARGET_X86_64
191
static inline void gen_op_movq_A0_im(int64_t val)
192
{
193
    tcg_gen_movi_tl(cpu_A0, val);
194
}
195
#endif
196

    
197
static inline void gen_movtl_T0_im(target_ulong val)
198
{
199
    tcg_gen_movi_tl(cpu_T[0], val);
200
}
201

    
202
static inline void gen_movtl_T1_im(target_ulong val)
203
{
204
    tcg_gen_movi_tl(cpu_T[1], val);
205
}
206

    
207
static inline void gen_op_andl_T0_ffff(void)
208
{
209
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
210
}
211

    
212
static inline void gen_op_andl_T0_im(uint32_t val)
213
{
214
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
215
}
216

    
217
static inline void gen_op_movl_T0_T1(void)
218
{
219
    tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
220
}
221

    
222
static inline void gen_op_andl_A0_ffff(void)
223
{
224
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
225
}
226

    
227
#ifdef TARGET_X86_64
228

    
229
#define NB_OP_SIZES 4
230

    
231
#define DEF_REGS(prefix, suffix) \
232
  prefix ## EAX ## suffix,\
233
  prefix ## ECX ## suffix,\
234
  prefix ## EDX ## suffix,\
235
  prefix ## EBX ## suffix,\
236
  prefix ## ESP ## suffix,\
237
  prefix ## EBP ## suffix,\
238
  prefix ## ESI ## suffix,\
239
  prefix ## EDI ## suffix,\
240
  prefix ## R8 ## suffix,\
241
  prefix ## R9 ## suffix,\
242
  prefix ## R10 ## suffix,\
243
  prefix ## R11 ## suffix,\
244
  prefix ## R12 ## suffix,\
245
  prefix ## R13 ## suffix,\
246
  prefix ## R14 ## suffix,\
247
  prefix ## R15 ## suffix,
248

    
249
#else /* !TARGET_X86_64 */
250

    
251
#define NB_OP_SIZES 3
252

    
253
#define DEF_REGS(prefix, suffix) \
254
  prefix ## EAX ## suffix,\
255
  prefix ## ECX ## suffix,\
256
  prefix ## EDX ## suffix,\
257
  prefix ## EBX ## suffix,\
258
  prefix ## ESP ## suffix,\
259
  prefix ## EBP ## suffix,\
260
  prefix ## ESI ## suffix,\
261
  prefix ## EDI ## suffix,
262

    
263
#endif /* !TARGET_X86_64 */
264

    
265
#if defined(WORDS_BIGENDIAN)
266
#define REG_B_OFFSET (sizeof(target_ulong) - 1)
267
#define REG_H_OFFSET (sizeof(target_ulong) - 2)
268
#define REG_W_OFFSET (sizeof(target_ulong) - 2)
269
#define REG_L_OFFSET (sizeof(target_ulong) - 4)
270
#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
271
#else
272
#define REG_B_OFFSET 0
273
#define REG_H_OFFSET 1
274
#define REG_W_OFFSET 0
275
#define REG_L_OFFSET 0
276
#define REG_LH_OFFSET 4
277
#endif
278

    
279
static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
280
{
281
    switch(ot) {
282
    case OT_BYTE:
283
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
284
            tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
285
        } else {
286
            tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
287
        }
288
        break;
289
    case OT_WORD:
290
        tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
291
        break;
292
#ifdef TARGET_X86_64
293
    case OT_LONG:
294
        tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
295
        /* high part of register set to zero */
296
        tcg_gen_movi_tl(cpu_tmp0, 0);
297
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
298
        break;
299
    default:
300
    case OT_QUAD:
301
        tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
302
        break;
303
#else
304
    default:
305
    case OT_LONG:
306
        tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
307
        break;
308
#endif
309
    }
310
}
311

    
312
static inline void gen_op_mov_reg_T0(int ot, int reg)
313
{
314
    gen_op_mov_reg_TN(ot, 0, reg);
315
}
316

    
317
static inline void gen_op_mov_reg_T1(int ot, int reg)
318
{
319
    gen_op_mov_reg_TN(ot, 1, reg);
320
}
321

    
322
static inline void gen_op_mov_reg_A0(int size, int reg)
323
{
324
    switch(size) {
325
    case 0:
326
        tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
327
        break;
328
#ifdef TARGET_X86_64
329
    case 1:
330
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
331
        /* high part of register set to zero */
332
        tcg_gen_movi_tl(cpu_tmp0, 0);
333
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
334
        break;
335
    default:
336
    case 2:
337
        tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
338
        break;
339
#else
340
    default:
341
    case 1:
342
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
343
        break;
344
#endif
345
    }
346
}
347

    
348
static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
349
{
350
    switch(ot) {
351
    case OT_BYTE:
352
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
353
            goto std_case;
354
        } else {
355
            tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
356
        }
357
        break;
358
    default:
359
    std_case:
360
        tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
361
        break;
362
    }
363
}
364

    
365
static inline void gen_op_movl_A0_reg(int reg)
366
{
367
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
368
}
369

    
370
static inline void gen_op_addl_A0_im(int32_t val)
371
{
372
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
373
#ifdef TARGET_X86_64
374
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
375
#endif
376
}
377

    
378
#ifdef TARGET_X86_64
379
static inline void gen_op_addq_A0_im(int64_t val)
380
{
381
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
382
}
383
#endif
384
    
385
static void gen_add_A0_im(DisasContext *s, int val)
386
{
387
#ifdef TARGET_X86_64
388
    if (CODE64(s))
389
        gen_op_addq_A0_im(val);
390
    else
391
#endif
392
        gen_op_addl_A0_im(val);
393
}
394

    
395
static inline void gen_op_addl_T0_T1(void)
396
{
397
    tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
398
}
399

    
400
static inline void gen_op_jmp_T0(void)
401
{
402
    tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
403
}
404

    
405
static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
406
{
407
    switch(size) {
408
    case 0:
409
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
410
        tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
411
        tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
412
        break;
413
    case 1:
414
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
415
        tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
416
#ifdef TARGET_X86_64
417
        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
418
#endif
419
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
420
        break;
421
#ifdef TARGET_X86_64
422
    case 2:
423
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
424
        tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
425
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
426
        break;
427
#endif
428
    }
429
}
430

    
431
static inline void gen_op_add_reg_T0(int size, int reg)
432
{
433
    switch(size) {
434
    case 0:
435
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
436
        tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
437
        tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
438
        break;
439
    case 1:
440
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
441
        tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
442
#ifdef TARGET_X86_64
443
        tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
444
#endif
445
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
446
        break;
447
#ifdef TARGET_X86_64
448
    case 2:
449
        tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
450
        tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
451
        tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
452
        break;
453
#endif
454
    }
455
}
456

    
457
static inline void gen_op_set_cc_op(int32_t val)
458
{
459
    tcg_gen_movi_i32(cpu_cc_op, val);
460
}
461

    
462
static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
463
{
464
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
465
    if (shift != 0) 
466
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
467
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
468
#ifdef TARGET_X86_64
469
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
470
#endif
471
}
472

    
473
static inline void gen_op_movl_A0_seg(int reg)
474
{
475
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
476
}
477

    
478
static inline void gen_op_addl_A0_seg(int reg)
479
{
480
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
481
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
482
#ifdef TARGET_X86_64
483
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
484
#endif
485
}
486

    
487
#ifdef TARGET_X86_64
488
static inline void gen_op_movq_A0_seg(int reg)
489
{
490
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
491
}
492

    
493
static inline void gen_op_addq_A0_seg(int reg)
494
{
495
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
496
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
497
}
498

    
499
static inline void gen_op_movq_A0_reg(int reg)
500
{
501
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
502
}
503

    
504
static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
505
{
506
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
507
    if (shift != 0) 
508
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
509
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
510
}
511
#endif
512

    
513
static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
514
    [0] = {
515
        DEF_REGS(gen_op_cmovw_, _T1_T0)
516
    },
517
    [1] = {
518
        DEF_REGS(gen_op_cmovl_, _T1_T0)
519
    },
520
#ifdef TARGET_X86_64
521
    [2] = {
522
        DEF_REGS(gen_op_cmovq_, _T1_T0)
523
    },
524
#endif
525
};
526

    
527
static inline void gen_op_lds_T0_A0(int idx)
528
{
529
    int mem_index = (idx >> 2) - 1;
530
    switch(idx & 3) {
531
    case 0:
532
        tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
533
        break;
534
    case 1:
535
        tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
536
        break;
537
    default:
538
    case 2:
539
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
540
        break;
541
    }
542
}
543

    
544
/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
545
static inline void gen_op_ld_T0_A0(int idx)
546
{
547
    int mem_index = (idx >> 2) - 1;
548
    switch(idx & 3) {
549
    case 0:
550
        tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
551
        break;
552
    case 1:
553
        tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
554
        break;
555
    case 2:
556
        tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
557
        break;
558
    default:
559
    case 3:
560
        tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
561
        break;
562
    }
563
}
564

    
565
static inline void gen_op_ldu_T0_A0(int idx)
566
{
567
    gen_op_ld_T0_A0(idx);
568
}
569

    
570
static inline void gen_op_ld_T1_A0(int idx)
571
{
572
    int mem_index = (idx >> 2) - 1;
573
    switch(idx & 3) {
574
    case 0:
575
        tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
576
        break;
577
    case 1:
578
        tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
579
        break;
580
    case 2:
581
        tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
582
        break;
583
    default:
584
    case 3:
585
        tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
586
        break;
587
    }
588
}
589

    
590
static inline void gen_op_st_T0_A0(int idx)
591
{
592
    int mem_index = (idx >> 2) - 1;
593
    switch(idx & 3) {
594
    case 0:
595
        tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
596
        break;
597
    case 1:
598
        tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
599
        break;
600
    case 2:
601
        tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
602
        break;
603
    default:
604
    case 3:
605
        tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
606
        break;
607
    }
608
}
609

    
610
static inline void gen_op_st_T1_A0(int idx)
611
{
612
    int mem_index = (idx >> 2) - 1;
613
    switch(idx & 3) {
614
    case 0:
615
        tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
616
        break;
617
    case 1:
618
        tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
619
        break;
620
    case 2:
621
        tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
622
        break;
623
    default:
624
    case 3:
625
        tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
626
        break;
627
    }
628
}
629

    
630
static inline void gen_jmp_im(target_ulong pc)
631
{
632
    tcg_gen_movi_tl(cpu_tmp0, pc);
633
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
634
}
635

    
636
static inline void gen_string_movl_A0_ESI(DisasContext *s)
637
{
638
    int override;
639

    
640
    override = s->override;
641
#ifdef TARGET_X86_64
642
    if (s->aflag == 2) {
643
        if (override >= 0) {
644
            gen_op_movq_A0_seg(override);
645
            gen_op_addq_A0_reg_sN(0, R_ESI);
646
        } else {
647
            gen_op_movq_A0_reg(R_ESI);
648
        }
649
    } else
650
#endif
651
    if (s->aflag) {
652
        /* 32 bit address */
653
        if (s->addseg && override < 0)
654
            override = R_DS;
655
        if (override >= 0) {
656
            gen_op_movl_A0_seg(override);
657
            gen_op_addl_A0_reg_sN(0, R_ESI);
658
        } else {
659
            gen_op_movl_A0_reg(R_ESI);
660
        }
661
    } else {
662
        /* 16 address, always override */
663
        if (override < 0)
664
            override = R_DS;
665
        gen_op_movl_A0_reg(R_ESI);
666
        gen_op_andl_A0_ffff();
667
        gen_op_addl_A0_seg(override);
668
    }
669
}
670

    
671
static inline void gen_string_movl_A0_EDI(DisasContext *s)
672
{
673
#ifdef TARGET_X86_64
674
    if (s->aflag == 2) {
675
        gen_op_movq_A0_reg(R_EDI);
676
    } else
677
#endif
678
    if (s->aflag) {
679
        if (s->addseg) {
680
            gen_op_movl_A0_seg(R_ES);
681
            gen_op_addl_A0_reg_sN(0, R_EDI);
682
        } else {
683
            gen_op_movl_A0_reg(R_EDI);
684
        }
685
    } else {
686
        gen_op_movl_A0_reg(R_EDI);
687
        gen_op_andl_A0_ffff();
688
        gen_op_addl_A0_seg(R_ES);
689
    }
690
}
691

    
692
static inline void gen_op_movl_T0_Dshift(int ot) 
693
{
694
    tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
695
    tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
696
};
697

    
698
static void gen_extu(int ot, TCGv reg)
699
{
700
    switch(ot) {
701
    case OT_BYTE:
702
        tcg_gen_ext8u_tl(reg, reg);
703
        break;
704
    case OT_WORD:
705
        tcg_gen_ext16u_tl(reg, reg);
706
        break;
707
    case OT_LONG:
708
        tcg_gen_ext32u_tl(reg, reg);
709
        break;
710
    default:
711
        break;
712
    }
713
}
714

    
715
static void gen_exts(int ot, TCGv reg)
716
{
717
    switch(ot) {
718
    case OT_BYTE:
719
        tcg_gen_ext8s_tl(reg, reg);
720
        break;
721
    case OT_WORD:
722
        tcg_gen_ext16s_tl(reg, reg);
723
        break;
724
    case OT_LONG:
725
        tcg_gen_ext32s_tl(reg, reg);
726
        break;
727
    default:
728
        break;
729
    }
730
}
731

    
732
static inline void gen_op_jnz_ecx(int size, int label1)
733
{
734
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
735
    gen_extu(size + 1, cpu_tmp0);
736
    tcg_gen_brcond_tl(TCG_COND_NE, cpu_tmp0, tcg_const_tl(0), label1);
737
}
738

    
739
static inline void gen_op_jz_ecx(int size, int label1)
740
{
741
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
742
    gen_extu(size + 1, cpu_tmp0);
743
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_tmp0, tcg_const_tl(0), label1);
744
}
745

    
746
static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
747
    {
748
        gen_op_jnz_subb,
749
        gen_op_jnz_subw,
750
        gen_op_jnz_subl,
751
        X86_64_ONLY(gen_op_jnz_subq),
752
    },
753
    {
754
        gen_op_jz_subb,
755
        gen_op_jz_subw,
756
        gen_op_jz_subl,
757
        X86_64_ONLY(gen_op_jz_subq),
758
    },
759
};
760

    
761
static void *helper_in_func[3] = {
762
    helper_inb,
763
    helper_inw,
764
    helper_inl,
765
};
766

    
767
static void *helper_out_func[3] = {
768
    helper_outb,
769
    helper_outw,
770
    helper_outl,
771
};
772

    
773
static void *gen_check_io_func[3] = {
774
    helper_check_iob,
775
    helper_check_iow,
776
    helper_check_iol,
777
};
778

    
779
static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
780
                         uint32_t svm_flags)
781
{
782
    int state_saved;
783
    target_ulong next_eip;
784

    
785
    state_saved = 0;
786
    if (s->pe && (s->cpl > s->iopl || s->vm86)) {
787
        if (s->cc_op != CC_OP_DYNAMIC)
788
            gen_op_set_cc_op(s->cc_op);
789
        gen_jmp_im(cur_eip);
790
        state_saved = 1;
791
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
792
        tcg_gen_helper_0_1(gen_check_io_func[ot],
793
                           cpu_tmp2_i32);
794
    }
795
    if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
796
        if (!state_saved) {
797
            if (s->cc_op != CC_OP_DYNAMIC)
798
                gen_op_set_cc_op(s->cc_op);
799
            gen_jmp_im(cur_eip);
800
            state_saved = 1;
801
        }
802
        svm_flags |= (1 << (4 + ot));
803
        next_eip = s->pc - s->cs_base;
804
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
805
        tcg_gen_helper_0_3(helper_svm_check_io,
806
                           cpu_tmp2_i32,
807
                           tcg_const_i32(svm_flags),
808
                           tcg_const_i32(next_eip - cur_eip));
809
    }
810
}
811

    
812
static inline void gen_movs(DisasContext *s, int ot)
813
{
814
    gen_string_movl_A0_ESI(s);
815
    gen_op_ld_T0_A0(ot + s->mem_index);
816
    gen_string_movl_A0_EDI(s);
817
    gen_op_st_T0_A0(ot + s->mem_index);
818
    gen_op_movl_T0_Dshift(ot);
819
    gen_op_add_reg_T0(s->aflag, R_ESI);
820
    gen_op_add_reg_T0(s->aflag, R_EDI);
821
}
822

    
823
static inline void gen_update_cc_op(DisasContext *s)
824
{
825
    if (s->cc_op != CC_OP_DYNAMIC) {
826
        gen_op_set_cc_op(s->cc_op);
827
        s->cc_op = CC_OP_DYNAMIC;
828
    }
829
}
830

    
831
static void gen_op_update1_cc(void)
832
{
833
    tcg_gen_discard_tl(cpu_cc_src);
834
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
835
}
836

    
837
static void gen_op_update2_cc(void)
838
{
839
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
840
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
841
}
842

    
843
static inline void gen_op_cmpl_T0_T1_cc(void)
844
{
845
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
846
    tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
847
}
848

    
849
static inline void gen_op_testl_T0_T1_cc(void)
850
{
851
    tcg_gen_discard_tl(cpu_cc_src);
852
    tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
853
}
854

    
855
static void gen_op_update_neg_cc(void)
856
{
857
    tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
858
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
859
}
860

    
861
/* XXX: does not work with gdbstub "ice" single step - not a
862
   serious problem */
863
static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
864
{
865
    int l1, l2;
866

    
867
    l1 = gen_new_label();
868
    l2 = gen_new_label();
869
    gen_op_jnz_ecx(s->aflag, l1);
870
    gen_set_label(l2);
871
    gen_jmp_tb(s, next_eip, 1);
872
    gen_set_label(l1);
873
    return l2;
874
}
875

    
876
static inline void gen_stos(DisasContext *s, int ot)
877
{
878
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
879
    gen_string_movl_A0_EDI(s);
880
    gen_op_st_T0_A0(ot + s->mem_index);
881
    gen_op_movl_T0_Dshift(ot);
882
    gen_op_add_reg_T0(s->aflag, R_EDI);
883
}
884

    
885
static inline void gen_lods(DisasContext *s, int ot)
886
{
887
    gen_string_movl_A0_ESI(s);
888
    gen_op_ld_T0_A0(ot + s->mem_index);
889
    gen_op_mov_reg_T0(ot, R_EAX);
890
    gen_op_movl_T0_Dshift(ot);
891
    gen_op_add_reg_T0(s->aflag, R_ESI);
892
}
893

    
894
static inline void gen_scas(DisasContext *s, int ot)
895
{
896
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
897
    gen_string_movl_A0_EDI(s);
898
    gen_op_ld_T1_A0(ot + s->mem_index);
899
    gen_op_cmpl_T0_T1_cc();
900
    gen_op_movl_T0_Dshift(ot);
901
    gen_op_add_reg_T0(s->aflag, R_EDI);
902
}
903

    
904
static inline void gen_cmps(DisasContext *s, int ot)
905
{
906
    gen_string_movl_A0_ESI(s);
907
    gen_op_ld_T0_A0(ot + s->mem_index);
908
    gen_string_movl_A0_EDI(s);
909
    gen_op_ld_T1_A0(ot + s->mem_index);
910
    gen_op_cmpl_T0_T1_cc();
911
    gen_op_movl_T0_Dshift(ot);
912
    gen_op_add_reg_T0(s->aflag, R_ESI);
913
    gen_op_add_reg_T0(s->aflag, R_EDI);
914
}
915

    
916
static inline void gen_ins(DisasContext *s, int ot)
917
{
918
    gen_string_movl_A0_EDI(s);
919
    /* Note: we must do this dummy write first to be restartable in
920
       case of page fault. */
921
    gen_op_movl_T0_0();
922
    gen_op_st_T0_A0(ot + s->mem_index);
923
    gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
924
    tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
925
    tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
926
    tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
927
    gen_op_st_T0_A0(ot + s->mem_index);
928
    gen_op_movl_T0_Dshift(ot);
929
    gen_op_add_reg_T0(s->aflag, R_EDI);
930
}
931

    
932
static inline void gen_outs(DisasContext *s, int ot)
933
{
934
    gen_string_movl_A0_ESI(s);
935
    gen_op_ld_T0_A0(ot + s->mem_index);
936

    
937
    gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
938
    tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
939
    tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
940
    tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
941
    tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
942

    
943
    gen_op_movl_T0_Dshift(ot);
944
    gen_op_add_reg_T0(s->aflag, R_ESI);
945
}
946

    
947
/* same method as Valgrind : we generate jumps to current or next
948
   instruction */
949
#define GEN_REPZ(op)                                                          \
950
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
951
                                 target_ulong cur_eip, target_ulong next_eip) \
952
{                                                                             \
953
    int l2;\
954
    gen_update_cc_op(s);                                                      \
955
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
956
    gen_ ## op(s, ot);                                                        \
957
    gen_op_add_reg_im(s->aflag, R_ECX, -1);                                   \
958
    /* a loop would cause two single step exceptions if ECX = 1               \
959
       before rep string_insn */                                              \
960
    if (!s->jmp_opt)                                                          \
961
        gen_op_jz_ecx(s->aflag, l2);                                          \
962
    gen_jmp(s, cur_eip);                                                      \
963
}
964

    
965
#define GEN_REPZ2(op)                                                         \
966
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
967
                                   target_ulong cur_eip,                      \
968
                                   target_ulong next_eip,                     \
969
                                   int nz)                                    \
970
{                                                                             \
971
    int l2;\
972
    gen_update_cc_op(s);                                                      \
973
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
974
    gen_ ## op(s, ot);                                                        \
975
    gen_op_add_reg_im(s->aflag, R_ECX, -1);                                   \
976
    gen_op_set_cc_op(CC_OP_SUBB + ot);                                        \
977
    gen_op_string_jnz_sub[nz][ot](l2);\
978
    if (!s->jmp_opt)                                                          \
979
        gen_op_jz_ecx(s->aflag, l2);                                          \
980
    gen_jmp(s, cur_eip);                                                      \
981
}
982

    
983
GEN_REPZ(movs)
984
GEN_REPZ(stos)
985
GEN_REPZ(lods)
986
GEN_REPZ(ins)
987
GEN_REPZ(outs)
988
GEN_REPZ2(scas)
989
GEN_REPZ2(cmps)
990

    
991
enum {
992
    JCC_O,
993
    JCC_B,
994
    JCC_Z,
995
    JCC_BE,
996
    JCC_S,
997
    JCC_P,
998
    JCC_L,
999
    JCC_LE,
1000
};
1001

    
1002
static GenOpFunc1 *gen_jcc_sub[4][8] = {
1003
    [OT_BYTE] = {
1004
        NULL,
1005
        gen_op_jb_subb,
1006
        gen_op_jz_subb,
1007
        gen_op_jbe_subb,
1008
        gen_op_js_subb,
1009
        NULL,
1010
        gen_op_jl_subb,
1011
        gen_op_jle_subb,
1012
    },
1013
    [OT_WORD] = {
1014
        NULL,
1015
        gen_op_jb_subw,
1016
        gen_op_jz_subw,
1017
        gen_op_jbe_subw,
1018
        gen_op_js_subw,
1019
        NULL,
1020
        gen_op_jl_subw,
1021
        gen_op_jle_subw,
1022
    },
1023
    [OT_LONG] = {
1024
        NULL,
1025
        gen_op_jb_subl,
1026
        gen_op_jz_subl,
1027
        gen_op_jbe_subl,
1028
        gen_op_js_subl,
1029
        NULL,
1030
        gen_op_jl_subl,
1031
        gen_op_jle_subl,
1032
    },
1033
#ifdef TARGET_X86_64
1034
    [OT_QUAD] = {
1035
        NULL,
1036
        BUGGY_64(gen_op_jb_subq),
1037
        gen_op_jz_subq,
1038
        BUGGY_64(gen_op_jbe_subq),
1039
        gen_op_js_subq,
1040
        NULL,
1041
        BUGGY_64(gen_op_jl_subq),
1042
        BUGGY_64(gen_op_jle_subq),
1043
    },
1044
#endif
1045
};
1046

    
1047
static GenOpFunc *gen_setcc_slow[8] = {
1048
    gen_op_seto_T0_cc,
1049
    gen_op_setb_T0_cc,
1050
    gen_op_setz_T0_cc,
1051
    gen_op_setbe_T0_cc,
1052
    gen_op_sets_T0_cc,
1053
    gen_op_setp_T0_cc,
1054
    gen_op_setl_T0_cc,
1055
    gen_op_setle_T0_cc,
1056
};
1057

    
1058
static GenOpFunc *gen_setcc_sub[4][8] = {
1059
    [OT_BYTE] = {
1060
        NULL,
1061
        gen_op_setb_T0_subb,
1062
        gen_op_setz_T0_subb,
1063
        gen_op_setbe_T0_subb,
1064
        gen_op_sets_T0_subb,
1065
        NULL,
1066
        gen_op_setl_T0_subb,
1067
        gen_op_setle_T0_subb,
1068
    },
1069
    [OT_WORD] = {
1070
        NULL,
1071
        gen_op_setb_T0_subw,
1072
        gen_op_setz_T0_subw,
1073
        gen_op_setbe_T0_subw,
1074
        gen_op_sets_T0_subw,
1075
        NULL,
1076
        gen_op_setl_T0_subw,
1077
        gen_op_setle_T0_subw,
1078
    },
1079
    [OT_LONG] = {
1080
        NULL,
1081
        gen_op_setb_T0_subl,
1082
        gen_op_setz_T0_subl,
1083
        gen_op_setbe_T0_subl,
1084
        gen_op_sets_T0_subl,
1085
        NULL,
1086
        gen_op_setl_T0_subl,
1087
        gen_op_setle_T0_subl,
1088
    },
1089
#ifdef TARGET_X86_64
1090
    [OT_QUAD] = {
1091
        NULL,
1092
        gen_op_setb_T0_subq,
1093
        gen_op_setz_T0_subq,
1094
        gen_op_setbe_T0_subq,
1095
        gen_op_sets_T0_subq,
1096
        NULL,
1097
        gen_op_setl_T0_subq,
1098
        gen_op_setle_T0_subq,
1099
    },
1100
#endif
1101
};
1102

    
1103
static void *helper_fp_arith_ST0_FT0[8] = {
1104
    helper_fadd_ST0_FT0,
1105
    helper_fmul_ST0_FT0,
1106
    helper_fcom_ST0_FT0,
1107
    helper_fcom_ST0_FT0,
1108
    helper_fsub_ST0_FT0,
1109
    helper_fsubr_ST0_FT0,
1110
    helper_fdiv_ST0_FT0,
1111
    helper_fdivr_ST0_FT0,
1112
};
1113

    
1114
/* NOTE the exception in "r" op ordering */
1115
static void *helper_fp_arith_STN_ST0[8] = {
1116
    helper_fadd_STN_ST0,
1117
    helper_fmul_STN_ST0,
1118
    NULL,
1119
    NULL,
1120
    helper_fsubr_STN_ST0,
1121
    helper_fsub_STN_ST0,
1122
    helper_fdivr_STN_ST0,
1123
    helper_fdiv_STN_ST0,
1124
};
1125

    
1126
/* compute eflags.C to reg */
1127
static void gen_compute_eflags_c(TCGv reg)
1128
{
1129
#if TCG_TARGET_REG_BITS == 32
1130
    tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1131
    tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 
1132
                     (long)cc_table + offsetof(CCTable, compute_c));
1133
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1134
    tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE, 
1135
                 1, &cpu_tmp2_i32, 0, NULL);
1136
#else
1137
    tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1138
    tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1139
    tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64, 
1140
                     (long)cc_table + offsetof(CCTable, compute_c));
1141
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1142
    tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE, 
1143
                 1, &cpu_tmp2_i32, 0, NULL);
1144
#endif
1145
    tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1146
}
1147

    
1148
/* compute all eflags to cc_src */
1149
static void gen_compute_eflags(TCGv reg)
1150
{
1151
#if TCG_TARGET_REG_BITS == 32
1152
    tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1153
    tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 
1154
                     (long)cc_table + offsetof(CCTable, compute_all));
1155
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1156
    tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE, 
1157
                 1, &cpu_tmp2_i32, 0, NULL);
1158
#else
1159
    tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1160
    tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1161
    tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64, 
1162
                     (long)cc_table + offsetof(CCTable, compute_all));
1163
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1164
    tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE, 
1165
                 1, &cpu_tmp2_i32, 0, NULL);
1166
#endif
1167
    tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1168
}
1169

    
1170
/* if d == OR_TMP0, it means memory operand (address in A0) */
1171
static void gen_op(DisasContext *s1, int op, int ot, int d)
1172
{
1173
    if (d != OR_TMP0) {
1174
        gen_op_mov_TN_reg(ot, 0, d);
1175
    } else {
1176
        gen_op_ld_T0_A0(ot + s1->mem_index);
1177
    }
1178
    switch(op) {
1179
    case OP_ADCL:
1180
        if (s1->cc_op != CC_OP_DYNAMIC)
1181
            gen_op_set_cc_op(s1->cc_op);
1182
        gen_compute_eflags_c(cpu_tmp4);
1183
        tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1184
        tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1185
        if (d != OR_TMP0)
1186
            gen_op_mov_reg_T0(ot, d);
1187
        else
1188
            gen_op_st_T0_A0(ot + s1->mem_index);
1189
        tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1190
        tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1191
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1192
        tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1193
        tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1194
        s1->cc_op = CC_OP_DYNAMIC;
1195
        break;
1196
    case OP_SBBL:
1197
        if (s1->cc_op != CC_OP_DYNAMIC)
1198
            gen_op_set_cc_op(s1->cc_op);
1199
        gen_compute_eflags_c(cpu_tmp4);
1200
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1201
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1202
        if (d != OR_TMP0)
1203
            gen_op_mov_reg_T0(ot, d);
1204
        else
1205
            gen_op_st_T0_A0(ot + s1->mem_index);
1206
        tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1207
        tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1208
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1209
        tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1210
        tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1211
        s1->cc_op = CC_OP_DYNAMIC;
1212
        break;
1213
    case OP_ADDL:
1214
        gen_op_addl_T0_T1();
1215
        if (d != OR_TMP0)
1216
            gen_op_mov_reg_T0(ot, d);
1217
        else
1218
            gen_op_st_T0_A0(ot + s1->mem_index);
1219
        gen_op_update2_cc();
1220
        s1->cc_op = CC_OP_ADDB + ot;
1221
        break;
1222
    case OP_SUBL:
1223
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1224
        if (d != OR_TMP0)
1225
            gen_op_mov_reg_T0(ot, d);
1226
        else
1227
            gen_op_st_T0_A0(ot + s1->mem_index);
1228
        gen_op_update2_cc();
1229
        s1->cc_op = CC_OP_SUBB + ot;
1230
        break;
1231
    default:
1232
    case OP_ANDL:
1233
        tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1234
        if (d != OR_TMP0)
1235
            gen_op_mov_reg_T0(ot, d);
1236
        else
1237
            gen_op_st_T0_A0(ot + s1->mem_index);
1238
        gen_op_update1_cc();
1239
        s1->cc_op = CC_OP_LOGICB + ot;
1240
        break;
1241
    case OP_ORL:
1242
        tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1243
        if (d != OR_TMP0)
1244
            gen_op_mov_reg_T0(ot, d);
1245
        else
1246
            gen_op_st_T0_A0(ot + s1->mem_index);
1247
        gen_op_update1_cc();
1248
        s1->cc_op = CC_OP_LOGICB + ot;
1249
        break;
1250
    case OP_XORL:
1251
        tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1252
        if (d != OR_TMP0)
1253
            gen_op_mov_reg_T0(ot, d);
1254
        else
1255
            gen_op_st_T0_A0(ot + s1->mem_index);
1256
        gen_op_update1_cc();
1257
        s1->cc_op = CC_OP_LOGICB + ot;
1258
        break;
1259
    case OP_CMPL:
1260
        gen_op_cmpl_T0_T1_cc();
1261
        s1->cc_op = CC_OP_SUBB + ot;
1262
        break;
1263
    }
1264
}
1265

    
1266
/* if d == OR_TMP0, it means memory operand (address in A0) */
1267
static void gen_inc(DisasContext *s1, int ot, int d, int c)
1268
{
1269
    if (d != OR_TMP0)
1270
        gen_op_mov_TN_reg(ot, 0, d);
1271
    else
1272
        gen_op_ld_T0_A0(ot + s1->mem_index);
1273
    if (s1->cc_op != CC_OP_DYNAMIC)
1274
        gen_op_set_cc_op(s1->cc_op);
1275
    if (c > 0) {
1276
        tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1277
        s1->cc_op = CC_OP_INCB + ot;
1278
    } else {
1279
        tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1280
        s1->cc_op = CC_OP_DECB + ot;
1281
    }
1282
    if (d != OR_TMP0)
1283
        gen_op_mov_reg_T0(ot, d);
1284
    else
1285
        gen_op_st_T0_A0(ot + s1->mem_index);
1286
    gen_compute_eflags_c(cpu_cc_src);
1287
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1288
}
1289

    
1290
/* XXX: add faster immediate case */
1291
static void gen_shift_rm_T1(DisasContext *s, int ot, int op1, 
1292
                            int is_right, int is_arith)
1293
{
1294
    target_ulong mask;
1295
    int shift_label;
1296
    
1297
    if (ot == OT_QUAD)
1298
        mask = 0x3f;
1299
    else
1300
        mask = 0x1f;
1301

    
1302
    /* load */
1303
    if (op1 == OR_TMP0)
1304
        gen_op_ld_T0_A0(ot + s->mem_index);
1305
    else
1306
        gen_op_mov_TN_reg(ot, 0, op1);
1307

    
1308
    tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1309

    
1310
    tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1311

    
1312
    if (is_right) {
1313
        if (is_arith) {
1314
            gen_exts(ot, cpu_T[0]);
1315
            tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1316
            tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1317
        } else {
1318
            gen_extu(ot, cpu_T[0]);
1319
            tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1320
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1321
        }
1322
    } else {
1323
        tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1324
        tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1325
    }
1326

    
1327
    /* store */
1328
    if (op1 == OR_TMP0)
1329
        gen_op_st_T0_A0(ot + s->mem_index);
1330
    else
1331
        gen_op_mov_reg_T0(ot, op1);
1332
        
1333
    /* update eflags if non zero shift */
1334
    if (s->cc_op != CC_OP_DYNAMIC)
1335
        gen_op_set_cc_op(s->cc_op);
1336

    
1337
    shift_label = gen_new_label();
1338
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), shift_label);
1339

    
1340
    tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1341
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1342
    if (is_right)
1343
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1344
    else
1345
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1346
        
1347
    gen_set_label(shift_label);
1348
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1349
}
1350

    
1351
static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1352
{
1353
    if (arg2 >= 0)
1354
        tcg_gen_shli_tl(ret, arg1, arg2);
1355
    else
1356
        tcg_gen_shri_tl(ret, arg1, -arg2);
1357
}
1358

    
1359
/* XXX: add faster immediate case */
1360
static void gen_rot_rm_T1(DisasContext *s, int ot, int op1, 
1361
                          int is_right)
1362
{
1363
    target_ulong mask;
1364
    int label1, label2, data_bits;
1365
    
1366
    if (ot == OT_QUAD)
1367
        mask = 0x3f;
1368
    else
1369
        mask = 0x1f;
1370

    
1371
    /* load */
1372
    if (op1 == OR_TMP0)
1373
        gen_op_ld_T0_A0(ot + s->mem_index);
1374
    else
1375
        gen_op_mov_TN_reg(ot, 0, op1);
1376

    
1377
    tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1378

    
1379
    /* Must test zero case to avoid using undefined behaviour in TCG
1380
       shifts. */
1381
    label1 = gen_new_label();
1382
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label1);
1383
    
1384
    if (ot <= OT_WORD)
1385
        tcg_gen_andi_tl(cpu_tmp0, cpu_T[1], (1 << (3 + ot)) - 1);
1386
    else
1387
        tcg_gen_mov_tl(cpu_tmp0, cpu_T[1]);
1388
    
1389
    gen_extu(ot, cpu_T[0]);
1390
    tcg_gen_mov_tl(cpu_T3, cpu_T[0]);
1391

    
1392
    data_bits = 8 << ot;
1393
    /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1394
       fix TCG definition) */
1395
    if (is_right) {
1396
        tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1397
        tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1398
        tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1399
    } else {
1400
        tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1401
        tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1402
        tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1403
    }
1404
    tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1405

    
1406
    gen_set_label(label1);
1407
    /* store */
1408
    if (op1 == OR_TMP0)
1409
        gen_op_st_T0_A0(ot + s->mem_index);
1410
    else
1411
        gen_op_mov_reg_T0(ot, op1);
1412
    
1413
    /* update eflags */
1414
    if (s->cc_op != CC_OP_DYNAMIC)
1415
        gen_op_set_cc_op(s->cc_op);
1416

    
1417
    label2 = gen_new_label();
1418
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label2);
1419

    
1420
    gen_compute_eflags(cpu_cc_src);
1421
    tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1422
    tcg_gen_xor_tl(cpu_tmp0, cpu_T3, cpu_T[0]);
1423
    tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1424
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1425
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1426
    if (is_right) {
1427
        tcg_gen_shri_tl(cpu_T[0], cpu_T[0], data_bits - 1);
1428
    }
1429
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_C);
1430
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
1431
    
1432
    tcg_gen_discard_tl(cpu_cc_dst);
1433
    tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1434
        
1435
    gen_set_label(label2);
1436
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1437
}
1438

    
1439
static void *helper_rotc[8] = {
1440
    helper_rclb,
1441
    helper_rclw,
1442
    helper_rcll,
1443
    X86_64_ONLY(helper_rclq),
1444
    helper_rcrb,
1445
    helper_rcrw,
1446
    helper_rcrl,
1447
    X86_64_ONLY(helper_rcrq),
1448
};
1449

    
1450
/* XXX: add faster immediate = 1 case */
1451
static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1, 
1452
                           int is_right)
1453
{
1454
    int label1;
1455

    
1456
    if (s->cc_op != CC_OP_DYNAMIC)
1457
        gen_op_set_cc_op(s->cc_op);
1458

    
1459
    /* load */
1460
    if (op1 == OR_TMP0)
1461
        gen_op_ld_T0_A0(ot + s->mem_index);
1462
    else
1463
        gen_op_mov_TN_reg(ot, 0, op1);
1464
    
1465
    tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
1466
                       cpu_T[0], cpu_T[0], cpu_T[1]);
1467
    /* store */
1468
    if (op1 == OR_TMP0)
1469
        gen_op_st_T0_A0(ot + s->mem_index);
1470
    else
1471
        gen_op_mov_reg_T0(ot, op1);
1472

    
1473
    /* update eflags */
1474
    label1 = gen_new_label();
1475
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(-1), label1);
1476

    
1477
    tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1478
    tcg_gen_discard_tl(cpu_cc_dst);
1479
    tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1480
        
1481
    gen_set_label(label1);
1482
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1483
}
1484

    
1485
/* XXX: add faster immediate case */
1486
static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1, 
1487
                                int is_right)
1488
{
1489
    int label1, label2, data_bits;
1490
    target_ulong mask;
1491

    
1492
    if (ot == OT_QUAD)
1493
        mask = 0x3f;
1494
    else
1495
        mask = 0x1f;
1496

    
1497
    /* load */
1498
    if (op1 == OR_TMP0)
1499
        gen_op_ld_T0_A0(ot + s->mem_index);
1500
    else
1501
        gen_op_mov_TN_reg(ot, 0, op1);
1502

    
1503
    tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
1504
    /* Must test zero case to avoid using undefined behaviour in TCG
1505
       shifts. */
1506
    label1 = gen_new_label();
1507
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
1508
    
1509
    tcg_gen_addi_tl(cpu_tmp5, cpu_T3, -1);
1510
    if (ot == OT_WORD) {
1511
        /* Note: we implement the Intel behaviour for shift count > 16 */
1512
        if (is_right) {
1513
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1514
            tcg_gen_shli_tl(cpu_tmp0, cpu_T[1], 16);
1515
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1516
            tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1517

    
1518
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1519
            
1520
            /* only needed if count > 16, but a test would complicate */
1521
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1522
            tcg_gen_shl_tl(cpu_tmp0, cpu_T[0], cpu_tmp5);
1523

    
1524
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1525

    
1526
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1527
        } else {
1528
            /* XXX: not optimal */
1529
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1530
            tcg_gen_shli_tl(cpu_T[1], cpu_T[1], 16);
1531
            tcg_gen_or_tl(cpu_T[1], cpu_T[1], cpu_T[0]);
1532
            tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1533
            
1534
            tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1535
            tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
1536
            tcg_gen_shr_tl(cpu_tmp6, cpu_T[1], cpu_tmp0);
1537
            tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
1538

    
1539
            tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1540
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1541
            tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1542
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1543
        }
1544
    } else {
1545
        data_bits = 8 << ot;
1546
        if (is_right) {
1547
            if (ot == OT_LONG)
1548
                tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1549

    
1550
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1551

    
1552
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1553
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1554
            tcg_gen_shl_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1555
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1556
            
1557
        } else {
1558
            if (ot == OT_LONG)
1559
                tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1560

    
1561
            tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1562
            
1563
            tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1564
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1565
            tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1566
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1567
        }
1568
    }
1569
    tcg_gen_mov_tl(cpu_T[1], cpu_tmp4);
1570

    
1571
    gen_set_label(label1);
1572
    /* store */
1573
    if (op1 == OR_TMP0)
1574
        gen_op_st_T0_A0(ot + s->mem_index);
1575
    else
1576
        gen_op_mov_reg_T0(ot, op1);
1577
    
1578
    /* update eflags */
1579
    if (s->cc_op != CC_OP_DYNAMIC)
1580
        gen_op_set_cc_op(s->cc_op);
1581

    
1582
    label2 = gen_new_label();
1583
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label2);
1584

    
1585
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1586
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1587
    if (is_right) {
1588
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1589
    } else {
1590
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1591
    }
1592
    gen_set_label(label2);
1593
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1594
}
1595

    
1596
static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1597
{
1598
    if (s != OR_TMP1)
1599
        gen_op_mov_TN_reg(ot, 1, s);
1600
    switch(op) {
1601
    case OP_ROL:
1602
        gen_rot_rm_T1(s1, ot, d, 0);
1603
        break;
1604
    case OP_ROR:
1605
        gen_rot_rm_T1(s1, ot, d, 1);
1606
        break;
1607
    case OP_SHL:
1608
    case OP_SHL1:
1609
        gen_shift_rm_T1(s1, ot, d, 0, 0);
1610
        break;
1611
    case OP_SHR:
1612
        gen_shift_rm_T1(s1, ot, d, 1, 0);
1613
        break;
1614
    case OP_SAR:
1615
        gen_shift_rm_T1(s1, ot, d, 1, 1);
1616
        break;
1617
    case OP_RCL:
1618
        gen_rotc_rm_T1(s1, ot, d, 0);
1619
        break;
1620
    case OP_RCR:
1621
        gen_rotc_rm_T1(s1, ot, d, 1);
1622
        break;
1623
    }
1624
}
1625

    
1626
static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1627
{
1628
    /* currently not optimized */
1629
    gen_op_movl_T1_im(c);
1630
    gen_shift(s1, op, ot, d, OR_TMP1);
1631
}
1632

    
1633
static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1634
{
1635
    target_long disp;
1636
    int havesib;
1637
    int base;
1638
    int index;
1639
    int scale;
1640
    int opreg;
1641
    int mod, rm, code, override, must_add_seg;
1642

    
1643
    override = s->override;
1644
    must_add_seg = s->addseg;
1645
    if (override >= 0)
1646
        must_add_seg = 1;
1647
    mod = (modrm >> 6) & 3;
1648
    rm = modrm & 7;
1649

    
1650
    if (s->aflag) {
1651

    
1652
        havesib = 0;
1653
        base = rm;
1654
        index = 0;
1655
        scale = 0;
1656

    
1657
        if (base == 4) {
1658
            havesib = 1;
1659
            code = ldub_code(s->pc++);
1660
            scale = (code >> 6) & 3;
1661
            index = ((code >> 3) & 7) | REX_X(s);
1662
            base = (code & 7);
1663
        }
1664
        base |= REX_B(s);
1665

    
1666
        switch (mod) {
1667
        case 0:
1668
            if ((base & 7) == 5) {
1669
                base = -1;
1670
                disp = (int32_t)ldl_code(s->pc);
1671
                s->pc += 4;
1672
                if (CODE64(s) && !havesib) {
1673
                    disp += s->pc + s->rip_offset;
1674
                }
1675
            } else {
1676
                disp = 0;
1677
            }
1678
            break;
1679
        case 1:
1680
            disp = (int8_t)ldub_code(s->pc++);
1681
            break;
1682
        default:
1683
        case 2:
1684
            disp = ldl_code(s->pc);
1685
            s->pc += 4;
1686
            break;
1687
        }
1688

    
1689
        if (base >= 0) {
1690
            /* for correct popl handling with esp */
1691
            if (base == 4 && s->popl_esp_hack)
1692
                disp += s->popl_esp_hack;
1693
#ifdef TARGET_X86_64
1694
            if (s->aflag == 2) {
1695
                gen_op_movq_A0_reg(base);
1696
                if (disp != 0) {
1697
                    gen_op_addq_A0_im(disp);
1698
                }
1699
            } else
1700
#endif
1701
            {
1702
                gen_op_movl_A0_reg(base);
1703
                if (disp != 0)
1704
                    gen_op_addl_A0_im(disp);
1705
            }
1706
        } else {
1707
#ifdef TARGET_X86_64
1708
            if (s->aflag == 2) {
1709
                gen_op_movq_A0_im(disp);
1710
            } else
1711
#endif
1712
            {
1713
                gen_op_movl_A0_im(disp);
1714
            }
1715
        }
1716
        /* XXX: index == 4 is always invalid */
1717
        if (havesib && (index != 4 || scale != 0)) {
1718
#ifdef TARGET_X86_64
1719
            if (s->aflag == 2) {
1720
                gen_op_addq_A0_reg_sN(scale, index);
1721
            } else
1722
#endif
1723
            {
1724
                gen_op_addl_A0_reg_sN(scale, index);
1725
            }
1726
        }
1727
        if (must_add_seg) {
1728
            if (override < 0) {
1729
                if (base == R_EBP || base == R_ESP)
1730
                    override = R_SS;
1731
                else
1732
                    override = R_DS;
1733
            }
1734
#ifdef TARGET_X86_64
1735
            if (s->aflag == 2) {
1736
                gen_op_addq_A0_seg(override);
1737
            } else
1738
#endif
1739
            {
1740
                gen_op_addl_A0_seg(override);
1741
            }
1742
        }
1743
    } else {
1744
        switch (mod) {
1745
        case 0:
1746
            if (rm == 6) {
1747
                disp = lduw_code(s->pc);
1748
                s->pc += 2;
1749
                gen_op_movl_A0_im(disp);
1750
                rm = 0; /* avoid SS override */
1751
                goto no_rm;
1752
            } else {
1753
                disp = 0;
1754
            }
1755
            break;
1756
        case 1:
1757
            disp = (int8_t)ldub_code(s->pc++);
1758
            break;
1759
        default:
1760
        case 2:
1761
            disp = lduw_code(s->pc);
1762
            s->pc += 2;
1763
            break;
1764
        }
1765
        switch(rm) {
1766
        case 0:
1767
            gen_op_movl_A0_reg(R_EBX);
1768
            gen_op_addl_A0_reg_sN(0, R_ESI);
1769
            break;
1770
        case 1:
1771
            gen_op_movl_A0_reg(R_EBX);
1772
            gen_op_addl_A0_reg_sN(0, R_EDI);
1773
            break;
1774
        case 2:
1775
            gen_op_movl_A0_reg(R_EBP);
1776
            gen_op_addl_A0_reg_sN(0, R_ESI);
1777
            break;
1778
        case 3:
1779
            gen_op_movl_A0_reg(R_EBP);
1780
            gen_op_addl_A0_reg_sN(0, R_EDI);
1781
            break;
1782
        case 4:
1783
            gen_op_movl_A0_reg(R_ESI);
1784
            break;
1785
        case 5:
1786
            gen_op_movl_A0_reg(R_EDI);
1787
            break;
1788
        case 6:
1789
            gen_op_movl_A0_reg(R_EBP);
1790
            break;
1791
        default:
1792
        case 7:
1793
            gen_op_movl_A0_reg(R_EBX);
1794
            break;
1795
        }
1796
        if (disp != 0)
1797
            gen_op_addl_A0_im(disp);
1798
        gen_op_andl_A0_ffff();
1799
    no_rm:
1800
        if (must_add_seg) {
1801
            if (override < 0) {
1802
                if (rm == 2 || rm == 3 || rm == 6)
1803
                    override = R_SS;
1804
                else
1805
                    override = R_DS;
1806
            }
1807
            gen_op_addl_A0_seg(override);
1808
        }
1809
    }
1810

    
1811
    opreg = OR_A0;
1812
    disp = 0;
1813
    *reg_ptr = opreg;
1814
    *offset_ptr = disp;
1815
}
1816

    
1817
static void gen_nop_modrm(DisasContext *s, int modrm)
1818
{
1819
    int mod, rm, base, code;
1820

    
1821
    mod = (modrm >> 6) & 3;
1822
    if (mod == 3)
1823
        return;
1824
    rm = modrm & 7;
1825

    
1826
    if (s->aflag) {
1827

    
1828
        base = rm;
1829

    
1830
        if (base == 4) {
1831
            code = ldub_code(s->pc++);
1832
            base = (code & 7);
1833
        }
1834

    
1835
        switch (mod) {
1836
        case 0:
1837
            if (base == 5) {
1838
                s->pc += 4;
1839
            }
1840
            break;
1841
        case 1:
1842
            s->pc++;
1843
            break;
1844
        default:
1845
        case 2:
1846
            s->pc += 4;
1847
            break;
1848
        }
1849
    } else {
1850
        switch (mod) {
1851
        case 0:
1852
            if (rm == 6) {
1853
                s->pc += 2;
1854
            }
1855
            break;
1856
        case 1:
1857
            s->pc++;
1858
            break;
1859
        default:
1860
        case 2:
1861
            s->pc += 2;
1862
            break;
1863
        }
1864
    }
1865
}
1866

    
1867
/* used for LEA and MOV AX, mem */
1868
static void gen_add_A0_ds_seg(DisasContext *s)
1869
{
1870
    int override, must_add_seg;
1871
    must_add_seg = s->addseg;
1872
    override = R_DS;
1873
    if (s->override >= 0) {
1874
        override = s->override;
1875
        must_add_seg = 1;
1876
    } else {
1877
        override = R_DS;
1878
    }
1879
    if (must_add_seg) {
1880
#ifdef TARGET_X86_64
1881
        if (CODE64(s)) {
1882
            gen_op_addq_A0_seg(override);
1883
        } else
1884
#endif
1885
        {
1886
            gen_op_addl_A0_seg(override);
1887
        }
1888
    }
1889
}
1890

    
1891
/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1892
   OR_TMP0 */
1893
static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1894
{
1895
    int mod, rm, opreg, disp;
1896

    
1897
    mod = (modrm >> 6) & 3;
1898
    rm = (modrm & 7) | REX_B(s);
1899
    if (mod == 3) {
1900
        if (is_store) {
1901
            if (reg != OR_TMP0)
1902
                gen_op_mov_TN_reg(ot, 0, reg);
1903
            gen_op_mov_reg_T0(ot, rm);
1904
        } else {
1905
            gen_op_mov_TN_reg(ot, 0, rm);
1906
            if (reg != OR_TMP0)
1907
                gen_op_mov_reg_T0(ot, reg);
1908
        }
1909
    } else {
1910
        gen_lea_modrm(s, modrm, &opreg, &disp);
1911
        if (is_store) {
1912
            if (reg != OR_TMP0)
1913
                gen_op_mov_TN_reg(ot, 0, reg);
1914
            gen_op_st_T0_A0(ot + s->mem_index);
1915
        } else {
1916
            gen_op_ld_T0_A0(ot + s->mem_index);
1917
            if (reg != OR_TMP0)
1918
                gen_op_mov_reg_T0(ot, reg);
1919
        }
1920
    }
1921
}
1922

    
1923
static inline uint32_t insn_get(DisasContext *s, int ot)
1924
{
1925
    uint32_t ret;
1926

    
1927
    switch(ot) {
1928
    case OT_BYTE:
1929
        ret = ldub_code(s->pc);
1930
        s->pc++;
1931
        break;
1932
    case OT_WORD:
1933
        ret = lduw_code(s->pc);
1934
        s->pc += 2;
1935
        break;
1936
    default:
1937
    case OT_LONG:
1938
        ret = ldl_code(s->pc);
1939
        s->pc += 4;
1940
        break;
1941
    }
1942
    return ret;
1943
}
1944

    
1945
static inline int insn_const_size(unsigned int ot)
1946
{
1947
    if (ot <= OT_LONG)
1948
        return 1 << ot;
1949
    else
1950
        return 4;
1951
}
1952

    
1953
static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1954
{
1955
    TranslationBlock *tb;
1956
    target_ulong pc;
1957

    
1958
    pc = s->cs_base + eip;
1959
    tb = s->tb;
1960
    /* NOTE: we handle the case where the TB spans two pages here */
1961
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1962
        (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK))  {
1963
        /* jump to same page: we can use a direct jump */
1964
        tcg_gen_goto_tb(tb_num);
1965
        gen_jmp_im(eip);
1966
        tcg_gen_exit_tb((long)tb + tb_num);
1967
    } else {
1968
        /* jump to another page: currently not optimized */
1969
        gen_jmp_im(eip);
1970
        gen_eob(s);
1971
    }
1972
}
1973

    
1974
static inline void gen_jcc(DisasContext *s, int b,
1975
                           target_ulong val, target_ulong next_eip)
1976
{
1977
    TranslationBlock *tb;
1978
    int inv, jcc_op;
1979
    GenOpFunc1 *func;
1980
    target_ulong tmp;
1981
    int l1, l2;
1982

    
1983
    inv = b & 1;
1984
    jcc_op = (b >> 1) & 7;
1985

    
1986
    if (s->jmp_opt) {
1987
        switch(s->cc_op) {
1988
            /* we optimize the cmp/jcc case */
1989
        case CC_OP_SUBB:
1990
        case CC_OP_SUBW:
1991
        case CC_OP_SUBL:
1992
        case CC_OP_SUBQ:
1993
            func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1994
            break;
1995

    
1996
            /* some jumps are easy to compute */
1997
        case CC_OP_ADDB:
1998
        case CC_OP_ADDW:
1999
        case CC_OP_ADDL:
2000
        case CC_OP_ADDQ:
2001

    
2002
        case CC_OP_ADCB:
2003
        case CC_OP_ADCW:
2004
        case CC_OP_ADCL:
2005
        case CC_OP_ADCQ:
2006

    
2007
        case CC_OP_SBBB:
2008
        case CC_OP_SBBW:
2009
        case CC_OP_SBBL:
2010
        case CC_OP_SBBQ:
2011

    
2012
        case CC_OP_LOGICB:
2013
        case CC_OP_LOGICW:
2014
        case CC_OP_LOGICL:
2015
        case CC_OP_LOGICQ:
2016

    
2017
        case CC_OP_INCB:
2018
        case CC_OP_INCW:
2019
        case CC_OP_INCL:
2020
        case CC_OP_INCQ:
2021

    
2022
        case CC_OP_DECB:
2023
        case CC_OP_DECW:
2024
        case CC_OP_DECL:
2025
        case CC_OP_DECQ:
2026

    
2027
        case CC_OP_SHLB:
2028
        case CC_OP_SHLW:
2029
        case CC_OP_SHLL:
2030
        case CC_OP_SHLQ:
2031

    
2032
        case CC_OP_SARB:
2033
        case CC_OP_SARW:
2034
        case CC_OP_SARL:
2035
        case CC_OP_SARQ:
2036
            switch(jcc_op) {
2037
            case JCC_Z:
2038
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2039
                break;
2040
            case JCC_S:
2041
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2042
                break;
2043
            default:
2044
                func = NULL;
2045
                break;
2046
            }
2047
            break;
2048
        default:
2049
            func = NULL;
2050
            break;
2051
        }
2052

    
2053
        if (s->cc_op != CC_OP_DYNAMIC) {
2054
            gen_op_set_cc_op(s->cc_op);
2055
            s->cc_op = CC_OP_DYNAMIC;
2056
        }
2057

    
2058
        if (!func) {
2059
            gen_setcc_slow[jcc_op]();
2060
            func = gen_op_jnz_T0_label;
2061
        }
2062

    
2063
        if (inv) {
2064
            tmp = val;
2065
            val = next_eip;
2066
            next_eip = tmp;
2067
        }
2068
        tb = s->tb;
2069

    
2070
        l1 = gen_new_label();
2071
        func(l1);
2072

    
2073
        gen_goto_tb(s, 0, next_eip);
2074

    
2075
        gen_set_label(l1);
2076
        gen_goto_tb(s, 1, val);
2077

    
2078
        s->is_jmp = 3;
2079
    } else {
2080

    
2081
        if (s->cc_op != CC_OP_DYNAMIC) {
2082
            gen_op_set_cc_op(s->cc_op);
2083
            s->cc_op = CC_OP_DYNAMIC;
2084
        }
2085
        gen_setcc_slow[jcc_op]();
2086
        if (inv) {
2087
            tmp = val;
2088
            val = next_eip;
2089
            next_eip = tmp;
2090
        }
2091
        l1 = gen_new_label();
2092
        l2 = gen_new_label();
2093
        gen_op_jnz_T0_label(l1);
2094
        gen_jmp_im(next_eip);
2095
        gen_op_jmp_label(l2);
2096
        gen_set_label(l1);
2097
        gen_jmp_im(val);
2098
        gen_set_label(l2);
2099
        gen_eob(s);
2100
    }
2101
}
2102

    
2103
static void gen_setcc(DisasContext *s, int b)
2104
{
2105
    int inv, jcc_op;
2106
    GenOpFunc *func;
2107

    
2108
    inv = b & 1;
2109
    jcc_op = (b >> 1) & 7;
2110
    switch(s->cc_op) {
2111
        /* we optimize the cmp/jcc case */
2112
    case CC_OP_SUBB:
2113
    case CC_OP_SUBW:
2114
    case CC_OP_SUBL:
2115
    case CC_OP_SUBQ:
2116
        func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
2117
        if (!func)
2118
            goto slow_jcc;
2119
        break;
2120

    
2121
        /* some jumps are easy to compute */
2122
    case CC_OP_ADDB:
2123
    case CC_OP_ADDW:
2124
    case CC_OP_ADDL:
2125
    case CC_OP_ADDQ:
2126

    
2127
    case CC_OP_LOGICB:
2128
    case CC_OP_LOGICW:
2129
    case CC_OP_LOGICL:
2130
    case CC_OP_LOGICQ:
2131

    
2132
    case CC_OP_INCB:
2133
    case CC_OP_INCW:
2134
    case CC_OP_INCL:
2135
    case CC_OP_INCQ:
2136

    
2137
    case CC_OP_DECB:
2138
    case CC_OP_DECW:
2139
    case CC_OP_DECL:
2140
    case CC_OP_DECQ:
2141

    
2142
    case CC_OP_SHLB:
2143
    case CC_OP_SHLW:
2144
    case CC_OP_SHLL:
2145
    case CC_OP_SHLQ:
2146
        switch(jcc_op) {
2147
        case JCC_Z:
2148
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2149
            break;
2150
        case JCC_S:
2151
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2152
            break;
2153
        default:
2154
            goto slow_jcc;
2155
        }
2156
        break;
2157
    default:
2158
    slow_jcc:
2159
        if (s->cc_op != CC_OP_DYNAMIC)
2160
            gen_op_set_cc_op(s->cc_op);
2161
        func = gen_setcc_slow[jcc_op];
2162
        break;
2163
    }
2164
    func();
2165
    if (inv) {
2166
        gen_op_xor_T0_1();
2167
    }
2168
}
2169

    
2170
/* move T0 to seg_reg and compute if the CPU state may change. Never
2171
   call this function with seg_reg == R_CS */
2172
static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2173
{
2174
    if (s->pe && !s->vm86) {
2175
        /* XXX: optimize by finding processor state dynamically */
2176
        if (s->cc_op != CC_OP_DYNAMIC)
2177
            gen_op_set_cc_op(s->cc_op);
2178
        gen_jmp_im(cur_eip);
2179
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2180
        tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2181
        /* abort translation because the addseg value may change or
2182
           because ss32 may change. For R_SS, translation must always
2183
           stop as a special handling must be done to disable hardware
2184
           interrupts for the next instruction */
2185
        if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2186
            s->is_jmp = 3;
2187
    } else {
2188
        gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
2189
        if (seg_reg == R_SS)
2190
            s->is_jmp = 3;
2191
    }
2192
}
2193

    
2194
static inline int svm_is_rep(int prefixes)
2195
{
2196
    return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2197
}
2198

    
2199
static inline int
2200
gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2201
                              uint32_t type, uint64_t param)
2202
{
2203
    if(!(s->flags & (INTERCEPT_SVM_MASK)))
2204
        /* no SVM activated */
2205
        return 0;
2206
    switch(type) {
2207
        /* CRx and DRx reads/writes */
2208
        case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2209
            if (s->cc_op != CC_OP_DYNAMIC) {
2210
                gen_op_set_cc_op(s->cc_op);
2211
            }
2212
            gen_jmp_im(pc_start - s->cs_base);
2213
            tcg_gen_helper_0_2(helper_svm_check_intercept_param, 
2214
                               tcg_const_i32(type), tcg_const_i64(param));
2215
            /* this is a special case as we do not know if the interception occurs
2216
               so we assume there was none */
2217
            return 0;
2218
        case SVM_EXIT_MSR:
2219
            if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2220
                if (s->cc_op != CC_OP_DYNAMIC) {
2221
                    gen_op_set_cc_op(s->cc_op);
2222
                }
2223
                gen_jmp_im(pc_start - s->cs_base);
2224
                tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2225
                                   tcg_const_i32(type), tcg_const_i64(param));
2226
                /* this is a special case as we do not know if the interception occurs
2227
                   so we assume there was none */
2228
                return 0;
2229
            }
2230
            break;
2231
        default:
2232
            if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2233
                if (s->cc_op != CC_OP_DYNAMIC) {
2234
                    gen_op_set_cc_op(s->cc_op);
2235
                }
2236
                gen_jmp_im(pc_start - s->cs_base);
2237
                tcg_gen_helper_0_2(helper_vmexit,
2238
                                   tcg_const_i32(type), tcg_const_i64(param));
2239
                /* we can optimize this one so TBs don't get longer
2240
                   than up to vmexit */
2241
                gen_eob(s);
2242
                return 1;
2243
            }
2244
    }
2245
    return 0;
2246
}
2247

    
2248
static inline int
2249
gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2250
{
2251
    return gen_svm_check_intercept_param(s, pc_start, type, 0);
2252
}
2253

    
2254
static inline void gen_stack_update(DisasContext *s, int addend)
2255
{
2256
#ifdef TARGET_X86_64
2257
    if (CODE64(s)) {
2258
        gen_op_add_reg_im(2, R_ESP, addend);
2259
    } else
2260
#endif
2261
    if (s->ss32) {
2262
        gen_op_add_reg_im(1, R_ESP, addend);
2263
    } else {
2264
        gen_op_add_reg_im(0, R_ESP, addend);
2265
    }
2266
}
2267

    
2268
/* generate a push. It depends on ss32, addseg and dflag */
2269
static void gen_push_T0(DisasContext *s)
2270
{
2271
#ifdef TARGET_X86_64
2272
    if (CODE64(s)) {
2273
        gen_op_movq_A0_reg(R_ESP);
2274
        if (s->dflag) {
2275
            gen_op_addq_A0_im(-8);
2276
            gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2277
        } else {
2278
            gen_op_addq_A0_im(-2);
2279
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2280
        }
2281
        gen_op_mov_reg_A0(2, R_ESP);
2282
    } else
2283
#endif
2284
    {
2285
        gen_op_movl_A0_reg(R_ESP);
2286
        if (!s->dflag)
2287
            gen_op_addl_A0_im(-2);
2288
        else
2289
            gen_op_addl_A0_im(-4);
2290
        if (s->ss32) {
2291
            if (s->addseg) {
2292
                tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2293
                gen_op_addl_A0_seg(R_SS);
2294
            }
2295
        } else {
2296
            gen_op_andl_A0_ffff();
2297
            tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2298
            gen_op_addl_A0_seg(R_SS);
2299
        }
2300
        gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2301
        if (s->ss32 && !s->addseg)
2302
            gen_op_mov_reg_A0(1, R_ESP);
2303
        else
2304
            gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2305
    }
2306
}
2307

    
2308
/* generate a push. It depends on ss32, addseg and dflag */
2309
/* slower version for T1, only used for call Ev */
2310
static void gen_push_T1(DisasContext *s)
2311
{
2312
#ifdef TARGET_X86_64
2313
    if (CODE64(s)) {
2314
        gen_op_movq_A0_reg(R_ESP);
2315
        if (s->dflag) {
2316
            gen_op_addq_A0_im(-8);
2317
            gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2318
        } else {
2319
            gen_op_addq_A0_im(-2);
2320
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2321
        }
2322
        gen_op_mov_reg_A0(2, R_ESP);
2323
    } else
2324
#endif
2325
    {
2326
        gen_op_movl_A0_reg(R_ESP);
2327
        if (!s->dflag)
2328
            gen_op_addl_A0_im(-2);
2329
        else
2330
            gen_op_addl_A0_im(-4);
2331
        if (s->ss32) {
2332
            if (s->addseg) {
2333
                gen_op_addl_A0_seg(R_SS);
2334
            }
2335
        } else {
2336
            gen_op_andl_A0_ffff();
2337
            gen_op_addl_A0_seg(R_SS);
2338
        }
2339
        gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2340

    
2341
        if (s->ss32 && !s->addseg)
2342
            gen_op_mov_reg_A0(1, R_ESP);
2343
        else
2344
            gen_stack_update(s, (-2) << s->dflag);
2345
    }
2346
}
2347

    
2348
/* two step pop is necessary for precise exceptions */
2349
static void gen_pop_T0(DisasContext *s)
2350
{
2351
#ifdef TARGET_X86_64
2352
    if (CODE64(s)) {
2353
        gen_op_movq_A0_reg(R_ESP);
2354
        gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2355
    } else
2356
#endif
2357
    {
2358
        gen_op_movl_A0_reg(R_ESP);
2359
        if (s->ss32) {
2360
            if (s->addseg)
2361
                gen_op_addl_A0_seg(R_SS);
2362
        } else {
2363
            gen_op_andl_A0_ffff();
2364
            gen_op_addl_A0_seg(R_SS);
2365
        }
2366
        gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2367
    }
2368
}
2369

    
2370
static void gen_pop_update(DisasContext *s)
2371
{
2372
#ifdef TARGET_X86_64
2373
    if (CODE64(s) && s->dflag) {
2374
        gen_stack_update(s, 8);
2375
    } else
2376
#endif
2377
    {
2378
        gen_stack_update(s, 2 << s->dflag);
2379
    }
2380
}
2381

    
2382
static void gen_stack_A0(DisasContext *s)
2383
{
2384
    gen_op_movl_A0_reg(R_ESP);
2385
    if (!s->ss32)
2386
        gen_op_andl_A0_ffff();
2387
    tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2388
    if (s->addseg)
2389
        gen_op_addl_A0_seg(R_SS);
2390
}
2391

    
2392
/* NOTE: wrap around in 16 bit not fully handled */
2393
static void gen_pusha(DisasContext *s)
2394
{
2395
    int i;
2396
    gen_op_movl_A0_reg(R_ESP);
2397
    gen_op_addl_A0_im(-16 <<  s->dflag);
2398
    if (!s->ss32)
2399
        gen_op_andl_A0_ffff();
2400
    tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2401
    if (s->addseg)
2402
        gen_op_addl_A0_seg(R_SS);
2403
    for(i = 0;i < 8; i++) {
2404
        gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2405
        gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2406
        gen_op_addl_A0_im(2 <<  s->dflag);
2407
    }
2408
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2409
}
2410

    
2411
/* NOTE: wrap around in 16 bit not fully handled */
2412
static void gen_popa(DisasContext *s)
2413
{
2414
    int i;
2415
    gen_op_movl_A0_reg(R_ESP);
2416
    if (!s->ss32)
2417
        gen_op_andl_A0_ffff();
2418
    tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2419
    tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 <<  s->dflag);
2420
    if (s->addseg)
2421
        gen_op_addl_A0_seg(R_SS);
2422
    for(i = 0;i < 8; i++) {
2423
        /* ESP is not reloaded */
2424
        if (i != 3) {
2425
            gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2426
            gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2427
        }
2428
        gen_op_addl_A0_im(2 <<  s->dflag);
2429
    }
2430
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2431
}
2432

    
2433
static void gen_enter(DisasContext *s, int esp_addend, int level)
2434
{
2435
    int ot, opsize;
2436

    
2437
    level &= 0x1f;
2438
#ifdef TARGET_X86_64
2439
    if (CODE64(s)) {
2440
        ot = s->dflag ? OT_QUAD : OT_WORD;
2441
        opsize = 1 << ot;
2442

    
2443
        gen_op_movl_A0_reg(R_ESP);
2444
        gen_op_addq_A0_im(-opsize);
2445
        tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2446

    
2447
        /* push bp */
2448
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2449
        gen_op_st_T0_A0(ot + s->mem_index);
2450
        if (level) {
2451
            /* XXX: must save state */
2452
            tcg_gen_helper_0_3(helper_enter64_level,
2453
                               tcg_const_i32(level),
2454
                               tcg_const_i32((ot == OT_QUAD)),
2455
                               cpu_T[1]);
2456
        }
2457
        gen_op_mov_reg_T1(ot, R_EBP);
2458
        tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2459
        gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2460
    } else
2461
#endif
2462
    {
2463
        ot = s->dflag + OT_WORD;
2464
        opsize = 2 << s->dflag;
2465

    
2466
        gen_op_movl_A0_reg(R_ESP);
2467
        gen_op_addl_A0_im(-opsize);
2468
        if (!s->ss32)
2469
            gen_op_andl_A0_ffff();
2470
        tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2471
        if (s->addseg)
2472
            gen_op_addl_A0_seg(R_SS);
2473
        /* push bp */
2474
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2475
        gen_op_st_T0_A0(ot + s->mem_index);
2476
        if (level) {
2477
            /* XXX: must save state */
2478
            tcg_gen_helper_0_3(helper_enter_level,
2479
                               tcg_const_i32(level),
2480
                               tcg_const_i32(s->dflag),
2481
                               cpu_T[1]);
2482
        }
2483
        gen_op_mov_reg_T1(ot, R_EBP);
2484
        tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2485
        gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2486
    }
2487
}
2488

    
2489
static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2490
{
2491
    if (s->cc_op != CC_OP_DYNAMIC)
2492
        gen_op_set_cc_op(s->cc_op);
2493
    gen_jmp_im(cur_eip);
2494
    tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
2495
    s->is_jmp = 3;
2496
}
2497

    
2498
/* an interrupt is different from an exception because of the
2499
   privilege checks */
2500
static void gen_interrupt(DisasContext *s, int intno,
2501
                          target_ulong cur_eip, target_ulong next_eip)
2502
{
2503
    if (s->cc_op != CC_OP_DYNAMIC)
2504
        gen_op_set_cc_op(s->cc_op);
2505
    gen_jmp_im(cur_eip);
2506
    tcg_gen_helper_0_2(helper_raise_interrupt, 
2507
                       tcg_const_i32(intno), 
2508
                       tcg_const_i32(next_eip - cur_eip));
2509
    s->is_jmp = 3;
2510
}
2511

    
2512
static void gen_debug(DisasContext *s, target_ulong cur_eip)
2513
{
2514
    if (s->cc_op != CC_OP_DYNAMIC)
2515
        gen_op_set_cc_op(s->cc_op);
2516
    gen_jmp_im(cur_eip);
2517
    tcg_gen_helper_0_0(helper_debug);
2518
    s->is_jmp = 3;
2519
}
2520

    
2521
/* generate a generic end of block. Trace exception is also generated
2522
   if needed */
2523
static void gen_eob(DisasContext *s)
2524
{
2525
    if (s->cc_op != CC_OP_DYNAMIC)
2526
        gen_op_set_cc_op(s->cc_op);
2527
    if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2528
        tcg_gen_helper_0_0(helper_reset_inhibit_irq);
2529
    }
2530
    if (s->singlestep_enabled) {
2531
        tcg_gen_helper_0_0(helper_debug);
2532
    } else if (s->tf) {
2533
        tcg_gen_helper_0_0(helper_single_step);
2534
    } else {
2535
        tcg_gen_exit_tb(0);
2536
    }
2537
    s->is_jmp = 3;
2538
}
2539

    
2540
/* generate a jump to eip. No segment change must happen before as a
2541
   direct call to the next block may occur */
2542
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2543
{
2544
    if (s->jmp_opt) {
2545
        if (s->cc_op != CC_OP_DYNAMIC) {
2546
            gen_op_set_cc_op(s->cc_op);
2547
            s->cc_op = CC_OP_DYNAMIC;
2548
        }
2549
        gen_goto_tb(s, tb_num, eip);
2550
        s->is_jmp = 3;
2551
    } else {
2552
        gen_jmp_im(eip);
2553
        gen_eob(s);
2554
    }
2555
}
2556

    
2557
static void gen_jmp(DisasContext *s, target_ulong eip)
2558
{
2559
    gen_jmp_tb(s, eip, 0);
2560
}
2561

    
2562
static inline void gen_ldq_env_A0(int idx, int offset)
2563
{
2564
    int mem_index = (idx >> 2) - 1;
2565
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2566
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
2567
}
2568

    
2569
static inline void gen_stq_env_A0(int idx, int offset)
2570
{
2571
    int mem_index = (idx >> 2) - 1;
2572
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2573
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2574
}
2575

    
2576
static inline void gen_ldo_env_A0(int idx, int offset)
2577
{
2578
    int mem_index = (idx >> 2) - 1;
2579
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2580
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2581
    tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2582
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2583
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2584
}
2585

    
2586
static inline void gen_sto_env_A0(int idx, int offset)
2587
{
2588
    int mem_index = (idx >> 2) - 1;
2589
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2590
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2591
    tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2592
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2593
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2594
}
2595

    
2596
static inline void gen_op_movo(int d_offset, int s_offset)
2597
{
2598
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2599
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2600
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2601
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
2602
}
2603

    
2604
static inline void gen_op_movq(int d_offset, int s_offset)
2605
{
2606
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2607
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2608
}
2609

    
2610
static inline void gen_op_movl(int d_offset, int s_offset)
2611
{
2612
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2613
    tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
2614
}
2615

    
2616
static inline void gen_op_movq_env_0(int d_offset)
2617
{
2618
    tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2619
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2620
}
2621

    
2622
#define SSE_SPECIAL ((void *)1)
2623
#define SSE_DUMMY ((void *)2)
2624

    
2625
#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2626
#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2627
                     helper_ ## x ## ss, helper_ ## x ## sd, }
2628

    
2629
static void *sse_op_table1[256][4] = {
2630
    /* 3DNow! extensions */
2631
    [0x0e] = { SSE_DUMMY }, /* femms */
2632
    [0x0f] = { SSE_DUMMY }, /* pf... */
2633
    /* pure SSE operations */
2634
    [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2635
    [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2636
    [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2637
    [0x13] = { SSE_SPECIAL, SSE_SPECIAL },  /* movlps, movlpd */
2638
    [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2639
    [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2640
    [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd, movshdup */
2641
    [0x17] = { SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd */
2642

    
2643
    [0x28] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2644
    [0x29] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2645
    [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2646
    [0x2b] = { SSE_SPECIAL, SSE_SPECIAL },  /* movntps, movntpd */
2647
    [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2648
    [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2649
    [0x2e] = { helper_ucomiss, helper_ucomisd },
2650
    [0x2f] = { helper_comiss, helper_comisd },
2651
    [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2652
    [0x51] = SSE_FOP(sqrt),
2653
    [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
2654
    [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
2655
    [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
2656
    [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
2657
    [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
2658
    [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
2659
    [0x58] = SSE_FOP(add),
2660
    [0x59] = SSE_FOP(mul),
2661
    [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
2662
               helper_cvtss2sd, helper_cvtsd2ss },
2663
    [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
2664
    [0x5c] = SSE_FOP(sub),
2665
    [0x5d] = SSE_FOP(min),
2666
    [0x5e] = SSE_FOP(div),
2667
    [0x5f] = SSE_FOP(max),
2668

    
2669
    [0xc2] = SSE_FOP(cmpeq),
2670
    [0xc6] = { helper_shufps, helper_shufpd },
2671

    
2672
    /* MMX ops and their SSE extensions */
2673
    [0x60] = MMX_OP2(punpcklbw),
2674
    [0x61] = MMX_OP2(punpcklwd),
2675
    [0x62] = MMX_OP2(punpckldq),
2676
    [0x63] = MMX_OP2(packsswb),
2677
    [0x64] = MMX_OP2(pcmpgtb),
2678
    [0x65] = MMX_OP2(pcmpgtw),
2679
    [0x66] = MMX_OP2(pcmpgtl),
2680
    [0x67] = MMX_OP2(packuswb),
2681
    [0x68] = MMX_OP2(punpckhbw),
2682
    [0x69] = MMX_OP2(punpckhwd),
2683
    [0x6a] = MMX_OP2(punpckhdq),
2684
    [0x6b] = MMX_OP2(packssdw),
2685
    [0x6c] = { NULL, helper_punpcklqdq_xmm },
2686
    [0x6d] = { NULL, helper_punpckhqdq_xmm },
2687
    [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2688
    [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2689
    [0x70] = { helper_pshufw_mmx,
2690
               helper_pshufd_xmm,
2691
               helper_pshufhw_xmm,
2692
               helper_pshuflw_xmm },
2693
    [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2694
    [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2695
    [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2696
    [0x74] = MMX_OP2(pcmpeqb),
2697
    [0x75] = MMX_OP2(pcmpeqw),
2698
    [0x76] = MMX_OP2(pcmpeql),
2699
    [0x77] = { SSE_DUMMY }, /* emms */
2700
    [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
2701
    [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
2702
    [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2703
    [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2704
    [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2705
    [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2706
    [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
2707
    [0xd1] = MMX_OP2(psrlw),
2708
    [0xd2] = MMX_OP2(psrld),
2709
    [0xd3] = MMX_OP2(psrlq),
2710
    [0xd4] = MMX_OP2(paddq),
2711
    [0xd5] = MMX_OP2(pmullw),
2712
    [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2713
    [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2714
    [0xd8] = MMX_OP2(psubusb),
2715
    [0xd9] = MMX_OP2(psubusw),
2716
    [0xda] = MMX_OP2(pminub),
2717
    [0xdb] = MMX_OP2(pand),
2718
    [0xdc] = MMX_OP2(paddusb),
2719
    [0xdd] = MMX_OP2(paddusw),
2720
    [0xde] = MMX_OP2(pmaxub),
2721
    [0xdf] = MMX_OP2(pandn),
2722
    [0xe0] = MMX_OP2(pavgb),
2723
    [0xe1] = MMX_OP2(psraw),
2724
    [0xe2] = MMX_OP2(psrad),
2725
    [0xe3] = MMX_OP2(pavgw),
2726
    [0xe4] = MMX_OP2(pmulhuw),
2727
    [0xe5] = MMX_OP2(pmulhw),
2728
    [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
2729
    [0xe7] = { SSE_SPECIAL , SSE_SPECIAL },  /* movntq, movntq */
2730
    [0xe8] = MMX_OP2(psubsb),
2731
    [0xe9] = MMX_OP2(psubsw),
2732
    [0xea] = MMX_OP2(pminsw),
2733
    [0xeb] = MMX_OP2(por),
2734
    [0xec] = MMX_OP2(paddsb),
2735
    [0xed] = MMX_OP2(paddsw),
2736
    [0xee] = MMX_OP2(pmaxsw),
2737
    [0xef] = MMX_OP2(pxor),
2738
    [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2739
    [0xf1] = MMX_OP2(psllw),
2740
    [0xf2] = MMX_OP2(pslld),
2741
    [0xf3] = MMX_OP2(psllq),
2742
    [0xf4] = MMX_OP2(pmuludq),
2743
    [0xf5] = MMX_OP2(pmaddwd),
2744
    [0xf6] = MMX_OP2(psadbw),
2745
    [0xf7] = MMX_OP2(maskmov),
2746
    [0xf8] = MMX_OP2(psubb),
2747
    [0xf9] = MMX_OP2(psubw),
2748
    [0xfa] = MMX_OP2(psubl),
2749
    [0xfb] = MMX_OP2(psubq),
2750
    [0xfc] = MMX_OP2(paddb),
2751
    [0xfd] = MMX_OP2(paddw),
2752
    [0xfe] = MMX_OP2(paddl),
2753
};
2754

    
2755
static void *sse_op_table2[3 * 8][2] = {
2756
    [0 + 2] = MMX_OP2(psrlw),
2757
    [0 + 4] = MMX_OP2(psraw),
2758
    [0 + 6] = MMX_OP2(psllw),
2759
    [8 + 2] = MMX_OP2(psrld),
2760
    [8 + 4] = MMX_OP2(psrad),
2761
    [8 + 6] = MMX_OP2(pslld),
2762
    [16 + 2] = MMX_OP2(psrlq),
2763
    [16 + 3] = { NULL, helper_psrldq_xmm },
2764
    [16 + 6] = MMX_OP2(psllq),
2765
    [16 + 7] = { NULL, helper_pslldq_xmm },
2766
};
2767

    
2768
static void *sse_op_table3[4 * 3] = {
2769
    helper_cvtsi2ss,
2770
    helper_cvtsi2sd,
2771
    X86_64_ONLY(helper_cvtsq2ss),
2772
    X86_64_ONLY(helper_cvtsq2sd),
2773

    
2774
    helper_cvttss2si,
2775
    helper_cvttsd2si,
2776
    X86_64_ONLY(helper_cvttss2sq),
2777
    X86_64_ONLY(helper_cvttsd2sq),
2778

    
2779
    helper_cvtss2si,
2780
    helper_cvtsd2si,
2781
    X86_64_ONLY(helper_cvtss2sq),
2782
    X86_64_ONLY(helper_cvtsd2sq),
2783
};
2784

    
2785
static void *sse_op_table4[8][4] = {
2786
    SSE_FOP(cmpeq),
2787
    SSE_FOP(cmplt),
2788
    SSE_FOP(cmple),
2789
    SSE_FOP(cmpunord),
2790
    SSE_FOP(cmpneq),
2791
    SSE_FOP(cmpnlt),
2792
    SSE_FOP(cmpnle),
2793
    SSE_FOP(cmpord),
2794
};
2795

    
2796
static void *sse_op_table5[256] = {
2797
    [0x0c] = helper_pi2fw,
2798
    [0x0d] = helper_pi2fd,
2799
    [0x1c] = helper_pf2iw,
2800
    [0x1d] = helper_pf2id,
2801
    [0x8a] = helper_pfnacc,
2802
    [0x8e] = helper_pfpnacc,
2803
    [0x90] = helper_pfcmpge,
2804
    [0x94] = helper_pfmin,
2805
    [0x96] = helper_pfrcp,
2806
    [0x97] = helper_pfrsqrt,
2807
    [0x9a] = helper_pfsub,
2808
    [0x9e] = helper_pfadd,
2809
    [0xa0] = helper_pfcmpgt,
2810
    [0xa4] = helper_pfmax,
2811
    [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2812
    [0xa7] = helper_movq, /* pfrsqit1 */
2813
    [0xaa] = helper_pfsubr,
2814
    [0xae] = helper_pfacc,
2815
    [0xb0] = helper_pfcmpeq,
2816
    [0xb4] = helper_pfmul,
2817
    [0xb6] = helper_movq, /* pfrcpit2 */
2818
    [0xb7] = helper_pmulhrw_mmx,
2819
    [0xbb] = helper_pswapd,
2820
    [0xbf] = helper_pavgb_mmx /* pavgusb */
2821
};
2822

    
2823
static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2824
{
2825
    int b1, op1_offset, op2_offset, is_xmm, val, ot;
2826
    int modrm, mod, rm, reg, reg_addr, offset_addr;
2827
    void *sse_op2;
2828

    
2829
    b &= 0xff;
2830
    if (s->prefix & PREFIX_DATA)
2831
        b1 = 1;
2832
    else if (s->prefix & PREFIX_REPZ)
2833
        b1 = 2;
2834
    else if (s->prefix & PREFIX_REPNZ)
2835
        b1 = 3;
2836
    else
2837
        b1 = 0;
2838
    sse_op2 = sse_op_table1[b][b1];
2839
    if (!sse_op2)
2840
        goto illegal_op;
2841
    if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2842
        is_xmm = 1;
2843
    } else {
2844
        if (b1 == 0) {
2845
            /* MMX case */
2846
            is_xmm = 0;
2847
        } else {
2848
            is_xmm = 1;
2849
        }
2850
    }
2851
    /* simple MMX/SSE operation */
2852
    if (s->flags & HF_TS_MASK) {
2853
        gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2854
        return;
2855
    }
2856
    if (s->flags & HF_EM_MASK) {
2857
    illegal_op:
2858
        gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2859
        return;
2860
    }
2861
    if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2862
        goto illegal_op;
2863
    if (b == 0x0e) {
2864
        if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
2865
            goto illegal_op;
2866
        /* femms */
2867
        tcg_gen_helper_0_0(helper_emms);
2868
        return;
2869
    }
2870
    if (b == 0x77) {
2871
        /* emms */
2872
        tcg_gen_helper_0_0(helper_emms);
2873
        return;
2874
    }
2875
    /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2876
       the static cpu state) */
2877
    if (!is_xmm) {
2878
        tcg_gen_helper_0_0(helper_enter_mmx);
2879
    }
2880

    
2881
    modrm = ldub_code(s->pc++);
2882
    reg = ((modrm >> 3) & 7);
2883
    if (is_xmm)
2884
        reg |= rex_r;
2885
    mod = (modrm >> 6) & 3;
2886
    if (sse_op2 == SSE_SPECIAL) {
2887
        b |= (b1 << 8);
2888
        switch(b) {
2889
        case 0x0e7: /* movntq */
2890
            if (mod == 3)
2891
                goto illegal_op;
2892
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2893
            gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2894
            break;
2895
        case 0x1e7: /* movntdq */
2896
        case 0x02b: /* movntps */
2897
        case 0x12b: /* movntps */
2898
        case 0x3f0: /* lddqu */
2899
            if (mod == 3)
2900
                goto illegal_op;
2901
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2902
            gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2903
            break;
2904
        case 0x6e: /* movd mm, ea */
2905
#ifdef TARGET_X86_64
2906
            if (s->dflag == 2) {
2907
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2908
                tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
2909
            } else
2910
#endif
2911
            {
2912
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2913
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
2914
                                 offsetof(CPUX86State,fpregs[reg].mmx));
2915
                tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
2916
            }
2917
            break;
2918
        case 0x16e: /* movd xmm, ea */
2919
#ifdef TARGET_X86_64
2920
            if (s->dflag == 2) {
2921
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2922
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
2923
                                 offsetof(CPUX86State,xmm_regs[reg]));
2924
                tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
2925
            } else
2926
#endif
2927
            {
2928
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2929
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
2930
                                 offsetof(CPUX86State,xmm_regs[reg]));
2931
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2932
                tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
2933
            }
2934
            break;
2935
        case 0x6f: /* movq mm, ea */
2936
            if (mod != 3) {
2937
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2938
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
2939
            } else {
2940
                rm = (modrm & 7);
2941
                tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
2942
                               offsetof(CPUX86State,fpregs[rm].mmx));
2943
                tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
2944
                               offsetof(CPUX86State,fpregs[reg].mmx));
2945
            }
2946
            break;
2947
        case 0x010: /* movups */
2948
        case 0x110: /* movupd */
2949
        case 0x028: /* movaps */
2950
        case 0x128: /* movapd */
2951
        case 0x16f: /* movdqa xmm, ea */
2952
        case 0x26f: /* movdqu xmm, ea */
2953
            if (mod != 3) {
2954
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2955
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
2956
            } else {
2957
                rm = (modrm & 7) | REX_B(s);
2958
                gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2959
                            offsetof(CPUX86State,xmm_regs[rm]));
2960
            }
2961
            break;
2962
        case 0x210: /* movss xmm, ea */
2963
            if (mod != 3) {
2964
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2965
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
2966
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2967
                gen_op_movl_T0_0();
2968
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2969
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2970
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2971
            } else {
2972
                rm = (modrm & 7) | REX_B(s);
2973
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2974
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2975
            }
2976
            break;
2977
        case 0x310: /* movsd xmm, ea */
2978
            if (mod != 3) {
2979
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2980
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2981
                gen_op_movl_T0_0();
2982
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2983
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2984
            } else {
2985
                rm = (modrm & 7) | REX_B(s);
2986
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2987
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2988
            }
2989
            break;
2990
        case 0x012: /* movlps */
2991
        case 0x112: /* movlpd */
2992
            if (mod != 3) {
2993
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2994
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2995
            } else {
2996
                /* movhlps */
2997
                rm = (modrm & 7) | REX_B(s);
2998
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2999
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3000
            }
3001
            break;
3002
        case 0x212: /* movsldup */
3003
            if (mod != 3) {
3004
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3005
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3006
            } else {
3007
                rm = (modrm & 7) | REX_B(s);
3008
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3009
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3010
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3011
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3012
            }
3013
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3014
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3015
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3016
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3017
            break;
3018
        case 0x312: /* movddup */
3019
            if (mod != 3) {
3020
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3021
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3022
            } else {
3023
                rm = (modrm & 7) | REX_B(s);
3024
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3025
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3026
            }
3027
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3028
                        offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3029
            break;
3030
        case 0x016: /* movhps */
3031
        case 0x116: /* movhpd */
3032
            if (mod != 3) {
3033
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3034
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3035
            } else {
3036
                /* movlhps */
3037
                rm = (modrm & 7) | REX_B(s);
3038
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3039
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3040
            }
3041
            break;
3042
        case 0x216: /* movshdup */
3043
            if (mod != 3) {
3044
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3045
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3046
            } else {
3047
                rm = (modrm & 7) | REX_B(s);
3048
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3049
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3050
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3051
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3052
            }
3053
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3054
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3055
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3056
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3057
            break;
3058
        case 0x7e: /* movd ea, mm */
3059
#ifdef TARGET_X86_64
3060
            if (s->dflag == 2) {
3061
                tcg_gen_ld_i64(cpu_T[0], cpu_env, 
3062
                               offsetof(CPUX86State,fpregs[reg].mmx));
3063
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3064
            } else
3065
#endif
3066
            {
3067
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
3068
                                 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3069
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3070
            }
3071
            break;
3072
        case 0x17e: /* movd ea, xmm */
3073
#ifdef TARGET_X86_64
3074
            if (s->dflag == 2) {
3075
                tcg_gen_ld_i64(cpu_T[0], cpu_env, 
3076
                               offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3077
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3078
            } else
3079
#endif
3080
            {
3081
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
3082
                                 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3083
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3084
            }
3085
            break;
3086
        case 0x27e: /* movq xmm, ea */
3087
            if (mod != 3) {
3088
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3089
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3090
            } else {
3091
                rm = (modrm & 7) | REX_B(s);
3092
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3093
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3094
            }
3095
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3096
            break;
3097
        case 0x7f: /* movq ea, mm */
3098
            if (mod != 3) {
3099
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3100
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3101
            } else {
3102
                rm = (modrm & 7);
3103
                gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3104
                            offsetof(CPUX86State,fpregs[reg].mmx));
3105
            }
3106
            break;
3107
        case 0x011: /* movups */
3108
        case 0x111: /* movupd */
3109
        case 0x029: /* movaps */
3110
        case 0x129: /* movapd */
3111
        case 0x17f: /* movdqa ea, xmm */
3112
        case 0x27f: /* movdqu ea, xmm */
3113
            if (mod != 3) {
3114
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3115
                gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3116
            } else {
3117
                rm = (modrm & 7) | REX_B(s);
3118
                gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3119
                            offsetof(CPUX86State,xmm_regs[reg]));
3120
            }
3121
            break;
3122
        case 0x211: /* movss ea, xmm */
3123
            if (mod != 3) {
3124
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3125
                gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3126
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
3127
            } else {
3128
                rm = (modrm & 7) | REX_B(s);
3129
                gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3130
                            offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3131
            }
3132
            break;
3133
        case 0x311: /* movsd ea, xmm */
3134
            if (mod != 3) {
3135
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3136
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3137
            } else {
3138
                rm = (modrm & 7) | REX_B(s);
3139
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3140
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3141
            }
3142
            break;
3143
        case 0x013: /* movlps */
3144
        case 0x113: /* movlpd */
3145
            if (mod != 3) {
3146
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3147
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3148
            } else {
3149
                goto illegal_op;
3150
            }
3151
            break;
3152
        case 0x017: /* movhps */
3153
        case 0x117: /* movhpd */
3154
            if (mod != 3) {
3155
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3156
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3157
            } else {
3158
                goto illegal_op;
3159
            }
3160
            break;
3161
        case 0x71: /* shift mm, im */
3162
        case 0x72:
3163
        case 0x73:
3164
        case 0x171: /* shift xmm, im */
3165
        case 0x172:
3166
        case 0x173:
3167
            val = ldub_code(s->pc++);
3168
            if (is_xmm) {
3169
                gen_op_movl_T0_im(val);
3170
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3171
                gen_op_movl_T0_0();
3172
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3173
                op1_offset = offsetof(CPUX86State,xmm_t0);
3174
            } else {
3175
                gen_op_movl_T0_im(val);
3176
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3177
                gen_op_movl_T0_0();
3178
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3179
                op1_offset = offsetof(CPUX86State,mmx_t0);
3180
            }
3181
            sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3182
            if (!sse_op2)
3183
                goto illegal_op;
3184
            if (is_xmm) {
3185
                rm = (modrm & 7) | REX_B(s);
3186
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3187
            } else {
3188
                rm = (modrm & 7);
3189
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3190
            }
3191
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3192
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3193
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3194
            break;
3195
        case 0x050: /* movmskps */
3196
            rm = (modrm & 7) | REX_B(s);
3197
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3198
                             offsetof(CPUX86State,xmm_regs[rm]));
3199
            tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3200
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3201
            gen_op_mov_reg_T0(OT_LONG, reg);
3202
            break;
3203
        case 0x150: /* movmskpd */
3204
            rm = (modrm & 7) | REX_B(s);
3205
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3206
                             offsetof(CPUX86State,xmm_regs[rm]));
3207
            tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3208
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3209
            gen_op_mov_reg_T0(OT_LONG, reg);
3210
            break;
3211
        case 0x02a: /* cvtpi2ps */
3212
        case 0x12a: /* cvtpi2pd */
3213
            tcg_gen_helper_0_0(helper_enter_mmx);
3214
            if (mod != 3) {
3215
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3216
                op2_offset = offsetof(CPUX86State,mmx_t0);
3217
                gen_ldq_env_A0(s->mem_index, op2_offset);
3218
            } else {
3219
                rm = (modrm & 7);
3220
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3221
            }
3222
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3223
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3224
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3225
            switch(b >> 8) {
3226
            case 0x0:
3227
                tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3228
                break;
3229
            default:
3230
            case 0x1:
3231
                tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3232
                break;
3233
            }
3234
            break;
3235
        case 0x22a: /* cvtsi2ss */
3236
        case 0x32a: /* cvtsi2sd */
3237
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3238
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3239
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3240
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3241
            sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3242
            tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3243
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3244
            break;
3245
        case 0x02c: /* cvttps2pi */
3246
        case 0x12c: /* cvttpd2pi */
3247
        case 0x02d: /* cvtps2pi */
3248
        case 0x12d: /* cvtpd2pi */
3249
            tcg_gen_helper_0_0(helper_enter_mmx);
3250
            if (mod != 3) {
3251
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3252
                op2_offset = offsetof(CPUX86State,xmm_t0);
3253
                gen_ldo_env_A0(s->mem_index, op2_offset);
3254
            } else {
3255
                rm = (modrm & 7) | REX_B(s);
3256
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3257
            }
3258
            op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3259
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3260
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3261
            switch(b) {
3262
            case 0x02c:
3263
                tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3264
                break;
3265
            case 0x12c:
3266
                tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3267
                break;
3268
            case 0x02d:
3269
                tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3270
                break;
3271
            case 0x12d:
3272
                tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3273
                break;
3274
            }
3275
            break;
3276
        case 0x22c: /* cvttss2si */
3277
        case 0x32c: /* cvttsd2si */
3278
        case 0x22d: /* cvtss2si */
3279
        case 0x32d: /* cvtsd2si */
3280
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3281
            if (mod != 3) {
3282
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3283
                if ((b >> 8) & 1) {
3284
                    gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3285
                } else {
3286
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3287
                    gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3288
                }
3289
                op2_offset = offsetof(CPUX86State,xmm_t0);
3290
            } else {
3291
                rm = (modrm & 7) | REX_B(s);
3292
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3293
            }
3294
            sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3295
                                    (b & 1) * 4];
3296
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3297
            if (ot == OT_LONG) {
3298
                tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
3299
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3300
            } else {
3301
                tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3302
            }
3303
            gen_op_mov_reg_T0(ot, reg);
3304
            break;
3305
        case 0xc4: /* pinsrw */
3306
        case 0x1c4:
3307
            s->rip_offset = 1;
3308
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3309
            val = ldub_code(s->pc++);
3310
            if (b1) {
3311
                val &= 7;
3312
                tcg_gen_st16_tl(cpu_T[0], cpu_env,
3313
                                offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3314
            } else {
3315
                val &= 3;
3316
                tcg_gen_st16_tl(cpu_T[0], cpu_env,
3317
                                offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3318
            }
3319
            break;
3320
        case 0xc5: /* pextrw */
3321
        case 0x1c5:
3322
            if (mod != 3)
3323
                goto illegal_op;
3324
            val = ldub_code(s->pc++);
3325
            if (b1) {
3326
                val &= 7;
3327
                rm = (modrm & 7) | REX_B(s);
3328
                tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3329
                                 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3330
            } else {
3331
                val &= 3;
3332
                rm = (modrm & 7);
3333
                tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3334
                                offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3335
            }
3336
            reg = ((modrm >> 3) & 7) | rex_r;
3337
            gen_op_mov_reg_T0(OT_LONG, reg);
3338
            break;
3339
        case 0x1d6: /* movq ea, xmm */
3340
            if (mod != 3) {
3341
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3342
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3343
            } else {
3344
                rm = (modrm & 7) | REX_B(s);
3345
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3346
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3347
                gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3348
            }
3349
            break;
3350
        case 0x2d6: /* movq2dq */
3351
            tcg_gen_helper_0_0(helper_enter_mmx);
3352
            rm = (modrm & 7);
3353
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3354
                        offsetof(CPUX86State,fpregs[rm].mmx));
3355
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3356
            break;
3357
        case 0x3d6: /* movdq2q */
3358
            tcg_gen_helper_0_0(helper_enter_mmx);
3359
            rm = (modrm & 7) | REX_B(s);
3360
            gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3361
                        offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3362
            break;
3363
        case 0xd7: /* pmovmskb */
3364
        case 0x1d7:
3365
            if (mod != 3)
3366
                goto illegal_op;
3367
            if (b1) {
3368
                rm = (modrm & 7) | REX_B(s);
3369
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3370
                tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
3371
            } else {
3372
                rm = (modrm & 7);
3373
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3374
                tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
3375
            }
3376
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3377
            reg = ((modrm >> 3) & 7) | rex_r;
3378
            gen_op_mov_reg_T0(OT_LONG, reg);
3379
            break;
3380
        default:
3381
            goto illegal_op;
3382
        }
3383
    } else {
3384
        /* generic MMX or SSE operation */
3385
        switch(b) {
3386
        case 0x70: /* pshufx insn */
3387
        case 0xc6: /* pshufx insn */
3388
        case 0xc2: /* compare insns */
3389
            s->rip_offset = 1;
3390
            break;
3391
        default:
3392
            break;
3393
        }
3394
        if (is_xmm) {
3395
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3396
            if (mod != 3) {
3397
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3398
                op2_offset = offsetof(CPUX86State,xmm_t0);
3399
                if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3400
                                b == 0xc2)) {
3401
                    /* specific case for SSE single instructions */
3402
                    if (b1 == 2) {
3403
                        /* 32 bit access */
3404
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3405
                        gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3406
                    } else {
3407
                        /* 64 bit access */
3408
                        gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3409
                    }
3410
                } else {
3411
                    gen_ldo_env_A0(s->mem_index, op2_offset);
3412
                }
3413
            } else {
3414
                rm = (modrm & 7) | REX_B(s);
3415
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3416
            }
3417
        } else {
3418
            op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3419
            if (mod != 3) {
3420
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3421
                op2_offset = offsetof(CPUX86State,mmx_t0);
3422
                gen_ldq_env_A0(s->mem_index, op2_offset);
3423
            } else {
3424
                rm = (modrm & 7);
3425
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3426
            }
3427
        }
3428
        switch(b) {
3429
        case 0x0f: /* 3DNow! data insns */
3430
            if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3431
                goto illegal_op;
3432
            val = ldub_code(s->pc++);
3433
            sse_op2 = sse_op_table5[val];
3434
            if (!sse_op2)
3435
                goto illegal_op;
3436
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3437
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3438
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3439
            break;
3440
        case 0x70: /* pshufx insn */
3441
        case 0xc6: /* pshufx insn */
3442
            val = ldub_code(s->pc++);
3443
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3444
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3445
            tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3446
            break;
3447
        case 0xc2:
3448
            /* compare insns */
3449
            val = ldub_code(s->pc++);
3450
            if (val >= 8)
3451
                goto illegal_op;
3452
            sse_op2 = sse_op_table4[val][b1];
3453
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3454
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3455
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3456
            break;
3457
        case 0xf7:
3458
            /* maskmov : we must prepare A0 */
3459
            if (mod != 3)
3460
                goto illegal_op;
3461
#ifdef TARGET_X86_64
3462
            if (s->aflag == 2) {
3463
                gen_op_movq_A0_reg(R_EDI);
3464
            } else
3465
#endif
3466
            {
3467
                gen_op_movl_A0_reg(R_EDI);
3468
                if (s->aflag == 0)
3469
                    gen_op_andl_A0_ffff();
3470
            }
3471
            gen_add_A0_ds_seg(s);
3472

    
3473
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3474
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3475
            tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
3476
            break;
3477
        default:
3478
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3479
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3480
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3481
            break;
3482
        }
3483
        if (b == 0x2e || b == 0x2f) {
3484
            /* just to keep the EFLAGS optimization correct */
3485
            gen_op_com_dummy();
3486
            s->cc_op = CC_OP_EFLAGS;
3487
        }
3488
    }
3489
}
3490

    
3491
/* convert one instruction. s->is_jmp is set if the translation must
3492
   be stopped. Return the next pc value */
3493
static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3494
{
3495
    int b, prefixes, aflag, dflag;
3496
    int shift, ot;
3497
    int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3498
    target_ulong next_eip, tval;
3499
    int rex_w, rex_r;
3500

    
3501
    s->pc = pc_start;
3502
    prefixes = 0;
3503
    aflag = s->code32;
3504
    dflag = s->code32;
3505
    s->override = -1;
3506
    rex_w = -1;
3507
    rex_r = 0;
3508
#ifdef TARGET_X86_64
3509
    s->rex_x = 0;
3510
    s->rex_b = 0;
3511
    x86_64_hregs = 0;
3512
#endif
3513
    s->rip_offset = 0; /* for relative ip address */
3514
 next_byte:
3515
    b = ldub_code(s->pc);
3516
    s->pc++;
3517
    /* check prefixes */
3518
#ifdef TARGET_X86_64
3519
    if (CODE64(s)) {
3520
        switch (b) {
3521
        case 0xf3:
3522
            prefixes |= PREFIX_REPZ;
3523
            goto next_byte;
3524
        case 0xf2:
3525
            prefixes |= PREFIX_REPNZ;
3526
            goto next_byte;
3527
        case 0xf0:
3528
            prefixes |= PREFIX_LOCK;
3529
            goto next_byte;
3530
        case 0x2e:
3531
            s->override = R_CS;
3532
            goto next_byte;
3533
        case 0x36:
3534
            s->override = R_SS;
3535
            goto next_byte;
3536
        case 0x3e:
3537
            s->override = R_DS;
3538
            goto next_byte;
3539
        case 0x26:
3540
            s->override = R_ES;
3541
            goto next_byte;
3542
        case 0x64:
3543
            s->override = R_FS;
3544
            goto next_byte;
3545
        case 0x65:
3546
            s->override = R_GS;
3547
            goto next_byte;
3548
        case 0x66:
3549
            prefixes |= PREFIX_DATA;
3550
            goto next_byte;
3551
        case 0x67:
3552
            prefixes |= PREFIX_ADR;
3553
            goto next_byte;
3554
        case 0x40 ... 0x4f:
3555
            /* REX prefix */
3556
            rex_w = (b >> 3) & 1;
3557
            rex_r = (b & 0x4) << 1;
3558
            s->rex_x = (b & 0x2) << 2;
3559
            REX_B(s) = (b & 0x1) << 3;
3560
            x86_64_hregs = 1; /* select uniform byte register addressing */
3561
            goto next_byte;
3562
        }
3563
        if (rex_w == 1) {
3564
            /* 0x66 is ignored if rex.w is set */
3565
            dflag = 2;
3566
        } else {
3567
            if (prefixes & PREFIX_DATA)
3568
                dflag ^= 1;
3569
        }
3570
        if (!(prefixes & PREFIX_ADR))
3571
            aflag = 2;
3572
    } else
3573
#endif
3574
    {
3575
        switch (b) {
3576
        case 0xf3:
3577
            prefixes |= PREFIX_REPZ;
3578
            goto next_byte;
3579
        case 0xf2:
3580
            prefixes |= PREFIX_REPNZ;
3581
            goto next_byte;
3582
        case 0xf0:
3583
            prefixes |= PREFIX_LOCK;
3584
            goto next_byte;
3585
        case 0x2e:
3586
            s->override = R_CS;
3587
            goto next_byte;
3588
        case 0x36:
3589
            s->override = R_SS;
3590
            goto next_byte;
3591
        case 0x3e:
3592
            s->override = R_DS;
3593
            goto next_byte;
3594
        case 0x26:
3595
            s->override = R_ES;
3596
            goto next_byte;
3597
        case 0x64:
3598
            s->override = R_FS;
3599
            goto next_byte;
3600
        case 0x65:
3601
            s->override = R_GS;
3602
            goto next_byte;
3603
        case 0x66:
3604
            prefixes |= PREFIX_DATA;
3605
            goto next_byte;
3606
        case 0x67:
3607
            prefixes |= PREFIX_ADR;
3608
            goto next_byte;
3609
        }
3610
        if (prefixes & PREFIX_DATA)
3611
            dflag ^= 1;
3612
        if (prefixes & PREFIX_ADR)
3613
            aflag ^= 1;
3614
    }
3615

    
3616
    s->prefix = prefixes;
3617
    s->aflag = aflag;
3618
    s->dflag = dflag;
3619

    
3620
    /* lock generation */
3621
    if (prefixes & PREFIX_LOCK)
3622
        tcg_gen_helper_0_0(helper_lock);
3623

    
3624
    /* now check op code */
3625
 reswitch:
3626
    switch(b) {
3627
    case 0x0f:
3628
        /**************************/
3629
        /* extended op code */
3630
        b = ldub_code(s->pc++) | 0x100;
3631
        goto reswitch;
3632

    
3633
        /**************************/
3634
        /* arith & logic */
3635
    case 0x00 ... 0x05:
3636
    case 0x08 ... 0x0d:
3637
    case 0x10 ... 0x15:
3638
    case 0x18 ... 0x1d:
3639
    case 0x20 ... 0x25:
3640
    case 0x28 ... 0x2d:
3641
    case 0x30 ... 0x35:
3642
    case 0x38 ... 0x3d:
3643
        {
3644
            int op, f, val;
3645
            op = (b >> 3) & 7;
3646
            f = (b >> 1) & 3;
3647

    
3648
            if ((b & 1) == 0)
3649
                ot = OT_BYTE;
3650
            else
3651
                ot = dflag + OT_WORD;
3652

    
3653
            switch(f) {
3654
            case 0: /* OP Ev, Gv */
3655
                modrm = ldub_code(s->pc++);
3656
                reg = ((modrm >> 3) & 7) | rex_r;
3657
                mod = (modrm >> 6) & 3;
3658
                rm = (modrm & 7) | REX_B(s);
3659
                if (mod != 3) {
3660
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3661
                    opreg = OR_TMP0;
3662
                } else if (op == OP_XORL && rm == reg) {
3663
                xor_zero:
3664
                    /* xor reg, reg optimisation */
3665
                    gen_op_movl_T0_0();
3666
                    s->cc_op = CC_OP_LOGICB + ot;
3667
                    gen_op_mov_reg_T0(ot, reg);
3668
                    gen_op_update1_cc();
3669
                    break;
3670
                } else {
3671
                    opreg = rm;
3672
                }
3673
                gen_op_mov_TN_reg(ot, 1, reg);
3674
                gen_op(s, op, ot, opreg);
3675
                break;
3676
            case 1: /* OP Gv, Ev */
3677
                modrm = ldub_code(s->pc++);
3678
                mod = (modrm >> 6) & 3;
3679
                reg = ((modrm >> 3) & 7) | rex_r;
3680
                rm = (modrm & 7) | REX_B(s);
3681
                if (mod != 3) {
3682
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3683
                    gen_op_ld_T1_A0(ot + s->mem_index);
3684
                } else if (op == OP_XORL && rm == reg) {
3685
                    goto xor_zero;
3686
                } else {
3687
                    gen_op_mov_TN_reg(ot, 1, rm);
3688
                }
3689
                gen_op(s, op, ot, reg);
3690
                break;
3691
            case 2: /* OP A, Iv */
3692
                val = insn_get(s, ot);
3693
                gen_op_movl_T1_im(val);
3694
                gen_op(s, op, ot, OR_EAX);
3695
                break;
3696
            }
3697
        }
3698
        break;
3699

    
3700
    case 0x80: /* GRP1 */
3701
    case 0x81:
3702
    case 0x82:
3703
    case 0x83:
3704
        {
3705
            int val;
3706

    
3707
            if ((b & 1) == 0)
3708
                ot = OT_BYTE;
3709
            else
3710
                ot = dflag + OT_WORD;
3711

    
3712
            modrm = ldub_code(s->pc++);
3713
            mod = (modrm >> 6) & 3;
3714
            rm = (modrm & 7) | REX_B(s);
3715
            op = (modrm >> 3) & 7;
3716

    
3717
            if (mod != 3) {
3718
                if (b == 0x83)
3719
                    s->rip_offset = 1;
3720
                else
3721
                    s->rip_offset = insn_const_size(ot);
3722
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3723
                opreg = OR_TMP0;
3724
            } else {
3725
                opreg = rm;
3726
            }
3727

    
3728
            switch(b) {
3729
            default:
3730
            case 0x80:
3731
            case 0x81:
3732
            case 0x82:
3733
                val = insn_get(s, ot);
3734
                break;
3735
            case 0x83:
3736
                val = (int8_t)insn_get(s, OT_BYTE);
3737
                break;
3738
            }
3739
            gen_op_movl_T1_im(val);
3740
            gen_op(s, op, ot, opreg);
3741
        }
3742
        break;
3743

    
3744
        /**************************/
3745
        /* inc, dec, and other misc arith */
3746
    case 0x40 ... 0x47: /* inc Gv */
3747
        ot = dflag ? OT_LONG : OT_WORD;
3748
        gen_inc(s, ot, OR_EAX + (b & 7), 1);
3749
        break;
3750
    case 0x48 ... 0x4f: /* dec Gv */
3751
        ot = dflag ? OT_LONG : OT_WORD;
3752
        gen_inc(s, ot, OR_EAX + (b & 7), -1);
3753
        break;
3754
    case 0xf6: /* GRP3 */
3755
    case 0xf7:
3756
        if ((b & 1) == 0)
3757
            ot = OT_BYTE;
3758
        else
3759
            ot = dflag + OT_WORD;
3760

    
3761
        modrm = ldub_code(s->pc++);
3762
        mod = (modrm >> 6) & 3;
3763
        rm = (modrm & 7) | REX_B(s);
3764
        op = (modrm >> 3) & 7;
3765
        if (mod != 3) {
3766
            if (op == 0)
3767
                s->rip_offset = insn_const_size(ot);
3768
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3769
            gen_op_ld_T0_A0(ot + s->mem_index);
3770
        } else {
3771
            gen_op_mov_TN_reg(ot, 0, rm);
3772
        }
3773

    
3774
        switch(op) {
3775
        case 0: /* test */
3776
            val = insn_get(s, ot);
3777
            gen_op_movl_T1_im(val);
3778
            gen_op_testl_T0_T1_cc();
3779
            s->cc_op = CC_OP_LOGICB + ot;
3780
            break;
3781
        case 2: /* not */
3782
            tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
3783
            if (mod != 3) {
3784
                gen_op_st_T0_A0(ot + s->mem_index);
3785
            } else {
3786
                gen_op_mov_reg_T0(ot, rm);
3787
            }
3788
            break;
3789
        case 3: /* neg */
3790
            tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
3791
            if (mod != 3) {
3792
                gen_op_st_T0_A0(ot + s->mem_index);
3793
            } else {
3794
                gen_op_mov_reg_T0(ot, rm);
3795
            }
3796
            gen_op_update_neg_cc();
3797
            s->cc_op = CC_OP_SUBB + ot;
3798
            break;
3799
        case 4: /* mul */
3800
            switch(ot) {
3801
            case OT_BYTE:
3802
                gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
3803
                tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
3804
                tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
3805
                /* XXX: use 32 bit mul which could be faster */
3806
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3807
                gen_op_mov_reg_T0(OT_WORD, R_EAX);
3808
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3809
                tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
3810
                s->cc_op = CC_OP_MULB;
3811
                break;
3812
            case OT_WORD:
3813
                gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
3814
                tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
3815
                tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
3816
                /* XXX: use 32 bit mul which could be faster */
3817
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3818
                gen_op_mov_reg_T0(OT_WORD, R_EAX);
3819
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3820
                tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
3821
                gen_op_mov_reg_T0(OT_WORD, R_EDX);
3822
                tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3823
                s->cc_op = CC_OP_MULW;
3824
                break;
3825
            default:
3826
            case OT_LONG:
3827
#ifdef TARGET_X86_64
3828
                gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3829
                tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
3830
                tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
3831
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3832
                gen_op_mov_reg_T0(OT_LONG, R_EAX);
3833
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3834
                tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
3835
                gen_op_mov_reg_T0(OT_LONG, R_EDX);
3836
                tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3837
#else
3838
                {
3839
                    TCGv t0, t1;
3840
                    t0 = tcg_temp_new(TCG_TYPE_I64);
3841
                    t1 = tcg_temp_new(TCG_TYPE_I64);
3842
                    gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3843
                    tcg_gen_extu_i32_i64(t0, cpu_T[0]);
3844
                    tcg_gen_extu_i32_i64(t1, cpu_T[1]);
3845
                    tcg_gen_mul_i64(t0, t0, t1);
3846
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3847
                    gen_op_mov_reg_T0(OT_LONG, R_EAX);
3848
                    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3849
                    tcg_gen_shri_i64(t0, t0, 32);
3850
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3851
                    gen_op_mov_reg_T0(OT_LONG, R_EDX);
3852
                    tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
3853
                }
3854
#endif
3855
                s->cc_op = CC_OP_MULL;
3856
                break;
3857
#ifdef TARGET_X86_64
3858
            case OT_QUAD:
3859
                tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
3860
                s->cc_op = CC_OP_MULQ;
3861
                break;
3862
#endif
3863
            }
3864
            break;
3865
        case 5: /* imul */
3866
            switch(ot) {
3867
            case OT_BYTE:
3868
                gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
3869
                tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
3870
                tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
3871
                /* XXX: use 32 bit mul which could be faster */
3872
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3873
                gen_op_mov_reg_T0(OT_WORD, R_EAX);
3874
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3875
                tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
3876
                tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3877
                s->cc_op = CC_OP_MULB;
3878
                break;
3879
            case OT_WORD:
3880
                gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
3881
                tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
3882
                tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
3883
                /* XXX: use 32 bit mul which could be faster */
3884
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3885
                gen_op_mov_reg_T0(OT_WORD, R_EAX);
3886
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3887
                tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
3888
                tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3889
                tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
3890
                gen_op_mov_reg_T0(OT_WORD, R_EDX);
3891
                s->cc_op = CC_OP_MULW;
3892
                break;
3893
            default:
3894
            case OT_LONG:
3895
#ifdef TARGET_X86_64
3896
                gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3897
                tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
3898
                tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
3899
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3900
                gen_op_mov_reg_T0(OT_LONG, R_EAX);
3901
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3902
                tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
3903
                tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3904
                tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
3905
                gen_op_mov_reg_T0(OT_LONG, R_EDX);
3906
#else
3907
                {
3908
                    TCGv t0, t1;
3909
                    t0 = tcg_temp_new(TCG_TYPE_I64);
3910
                    t1 = tcg_temp_new(TCG_TYPE_I64);
3911
                    gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
3912
                    tcg_gen_ext_i32_i64(t0, cpu_T[0]);
3913
                    tcg_gen_ext_i32_i64(t1, cpu_T[1]);
3914
                    tcg_gen_mul_i64(t0, t0, t1);
3915
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3916
                    gen_op_mov_reg_T0(OT_LONG, R_EAX);
3917
                    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
3918
                    tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
3919
                    tcg_gen_shri_i64(t0, t0, 32);
3920
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
3921
                    gen_op_mov_reg_T0(OT_LONG, R_EDX);
3922
                    tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
3923
                }
3924
#endif
3925
                s->cc_op = CC_OP_MULL;
3926
                break;
3927
#ifdef TARGET_X86_64
3928
            case OT_QUAD:
3929
                tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
3930
                s->cc_op = CC_OP_MULQ;
3931
                break;
3932
#endif
3933
            }
3934
            break;
3935
        case 6: /* div */
3936
            switch(ot) {
3937
            case OT_BYTE:
3938
                gen_jmp_im(pc_start - s->cs_base);
3939
                tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
3940
                break;
3941
            case OT_WORD:
3942
                gen_jmp_im(pc_start - s->cs_base);
3943
                tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
3944
                break;
3945
            default:
3946
            case OT_LONG:
3947
                gen_jmp_im(pc_start - s->cs_base);
3948
                tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
3949
                break;
3950
#ifdef TARGET_X86_64
3951
            case OT_QUAD:
3952
                gen_jmp_im(pc_start - s->cs_base);
3953
                tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
3954
                break;
3955
#endif
3956
            }
3957
            break;
3958
        case 7: /* idiv */
3959
            switch(ot) {
3960
            case OT_BYTE:
3961
                gen_jmp_im(pc_start - s->cs_base);
3962
                tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
3963
                break;
3964
            case OT_WORD:
3965
                gen_jmp_im(pc_start - s->cs_base);
3966
                tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
3967
                break;
3968
            default:
3969
            case OT_LONG:
3970
                gen_jmp_im(pc_start - s->cs_base);
3971
                tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
3972
                break;
3973
#ifdef TARGET_X86_64
3974
            case OT_QUAD:
3975
                gen_jmp_im(pc_start - s->cs_base);
3976
                tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
3977
                break;
3978
#endif
3979
            }
3980
            break;
3981
        default:
3982
            goto illegal_op;
3983
        }
3984
        break;
3985

    
3986
    case 0xfe: /* GRP4 */
3987
    case 0xff: /* GRP5 */
3988
        if ((b & 1) == 0)
3989
            ot = OT_BYTE;
3990
        else
3991
            ot = dflag + OT_WORD;
3992

    
3993
        modrm = ldub_code(s->pc++);
3994
        mod = (modrm >> 6) & 3;
3995
        rm = (modrm & 7) | REX_B(s);
3996
        op = (modrm >> 3) & 7;
3997
        if (op >= 2 && b == 0xfe) {
3998
            goto illegal_op;
3999
        }
4000
        if (CODE64(s)) {
4001
            if (op == 2 || op == 4) {
4002
                /* operand size for jumps is 64 bit */
4003
                ot = OT_QUAD;
4004
            } else if (op == 3 || op == 5) {
4005
                /* for call calls, the operand is 16 or 32 bit, even
4006
                   in long mode */
4007
                ot = dflag ? OT_LONG : OT_WORD;
4008
            } else if (op == 6) {
4009
                /* default push size is 64 bit */
4010
                ot = dflag ? OT_QUAD : OT_WORD;
4011
            }
4012
        }
4013
        if (mod != 3) {
4014
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4015
            if (op >= 2 && op != 3 && op != 5)
4016
                gen_op_ld_T0_A0(ot + s->mem_index);
4017
        } else {
4018
            gen_op_mov_TN_reg(ot, 0, rm);
4019
        }
4020

    
4021
        switch(op) {
4022
        case 0: /* inc Ev */
4023
            if (mod != 3)
4024
                opreg = OR_TMP0;
4025
            else
4026
                opreg = rm;
4027
            gen_inc(s, ot, opreg, 1);
4028
            break;
4029
        case 1: /* dec Ev */
4030
            if (mod != 3)
4031
                opreg = OR_TMP0;
4032
            else
4033
                opreg = rm;
4034
            gen_inc(s, ot, opreg, -1);
4035
            break;
4036
        case 2: /* call Ev */
4037
            /* XXX: optimize if memory (no 'and' is necessary) */
4038
            if (s->dflag == 0)
4039
                gen_op_andl_T0_ffff();
4040
            next_eip = s->pc - s->cs_base;
4041
            gen_movtl_T1_im(next_eip);
4042
            gen_push_T1(s);
4043
            gen_op_jmp_T0();
4044
            gen_eob(s);
4045
            break;
4046
        case 3: /* lcall Ev */
4047
            gen_op_ld_T1_A0(ot + s->mem_index);
4048
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4049
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4050
        do_lcall:
4051
            if (s->pe && !s->vm86) {
4052
                if (s->cc_op != CC_OP_DYNAMIC)
4053
                    gen_op_set_cc_op(s->cc_op);
4054
                gen_jmp_im(pc_start - s->cs_base);
4055
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4056
                tcg_gen_helper_0_4(helper_lcall_protected,
4057
                                   cpu_tmp2_i32, cpu_T[1],
4058
                                   tcg_const_i32(dflag), 
4059
                                   tcg_const_i32(s->pc - pc_start));
4060
            } else {
4061
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4062
                tcg_gen_helper_0_4(helper_lcall_real,
4063
                                   cpu_tmp2_i32, cpu_T[1],
4064
                                   tcg_const_i32(dflag), 
4065
                                   tcg_const_i32(s->pc - s->cs_base));
4066
            }
4067
            gen_eob(s);
4068
            break;
4069
        case 4: /* jmp Ev */
4070
            if (s->dflag == 0)
4071
                gen_op_andl_T0_ffff();
4072
            gen_op_jmp_T0();
4073
            gen_eob(s);
4074
            break;
4075
        case 5: /* ljmp Ev */
4076
            gen_op_ld_T1_A0(ot + s->mem_index);
4077
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4078
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4079
        do_ljmp:
4080
            if (s->pe && !s->vm86) {
4081
                if (s->cc_op != CC_OP_DYNAMIC)
4082
                    gen_op_set_cc_op(s->cc_op);
4083
                gen_jmp_im(pc_start - s->cs_base);
4084
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4085
                tcg_gen_helper_0_3(helper_ljmp_protected,
4086
                                   cpu_tmp2_i32,
4087
                                   cpu_T[1],
4088
                                   tcg_const_i32(s->pc - pc_start));
4089
            } else {
4090
                gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
4091
                gen_op_movl_T0_T1();
4092
                gen_op_jmp_T0();
4093
            }
4094
            gen_eob(s);
4095
            break;
4096
        case 6: /* push Ev */
4097
            gen_push_T0(s);
4098
            break;
4099
        default:
4100
            goto illegal_op;
4101
        }
4102
        break;
4103

    
4104
    case 0x84: /* test Ev, Gv */
4105
    case 0x85:
4106
        if ((b & 1) == 0)
4107
            ot = OT_BYTE;
4108
        else
4109
            ot = dflag + OT_WORD;
4110

    
4111
        modrm = ldub_code(s->pc++);
4112
        mod = (modrm >> 6) & 3;
4113
        rm = (modrm & 7) | REX_B(s);
4114
        reg = ((modrm >> 3) & 7) | rex_r;
4115

    
4116
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4117
        gen_op_mov_TN_reg(ot, 1, reg);
4118
        gen_op_testl_T0_T1_cc();
4119
        s->cc_op = CC_OP_LOGICB + ot;
4120
        break;
4121

    
4122
    case 0xa8: /* test eAX, Iv */
4123
    case 0xa9:
4124
        if ((b & 1) == 0)
4125
            ot = OT_BYTE;
4126
        else
4127
            ot = dflag + OT_WORD;
4128
        val = insn_get(s, ot);
4129

    
4130
        gen_op_mov_TN_reg(ot, 0, OR_EAX);
4131
        gen_op_movl_T1_im(val);
4132
        gen_op_testl_T0_T1_cc();
4133
        s->cc_op = CC_OP_LOGICB + ot;
4134
        break;
4135

    
4136
    case 0x98: /* CWDE/CBW */
4137
#ifdef TARGET_X86_64
4138
        if (dflag == 2) {
4139
            gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4140
            tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4141
            gen_op_mov_reg_T0(OT_QUAD, R_EAX);
4142
        } else
4143
#endif
4144
        if (dflag == 1) {
4145
            gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4146
            tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4147
            gen_op_mov_reg_T0(OT_LONG, R_EAX);
4148
        } else {
4149
            gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
4150
            tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4151
            gen_op_mov_reg_T0(OT_WORD, R_EAX);
4152
        }
4153
        break;
4154
    case 0x99: /* CDQ/CWD */
4155
#ifdef TARGET_X86_64
4156
        if (dflag == 2) {
4157
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4158
            tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
4159
            gen_op_mov_reg_T0(OT_QUAD, R_EDX);
4160
        } else
4161
#endif
4162
        if (dflag == 1) {
4163
            gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4164
            tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4165
            tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
4166
            gen_op_mov_reg_T0(OT_LONG, R_EDX);
4167
        } else {
4168
            gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
4169
            tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4170
            tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
4171
            gen_op_mov_reg_T0(OT_WORD, R_EDX);
4172
        }
4173
        break;
4174
    case 0x1af: /* imul Gv, Ev */
4175
    case 0x69: /* imul Gv, Ev, I */
4176
    case 0x6b:
4177
        ot = dflag + OT_WORD;
4178
        modrm = ldub_code(s->pc++);
4179
        reg = ((modrm >> 3) & 7) | rex_r;
4180
        if (b == 0x69)
4181
            s->rip_offset = insn_const_size(ot);
4182
        else if (b == 0x6b)
4183
            s->rip_offset = 1;
4184
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4185
        if (b == 0x69) {
4186
            val = insn_get(s, ot);
4187
            gen_op_movl_T1_im(val);
4188
        } else if (b == 0x6b) {
4189
            val = (int8_t)insn_get(s, OT_BYTE);
4190
            gen_op_movl_T1_im(val);
4191
        } else {
4192
            gen_op_mov_TN_reg(ot, 1, reg);
4193
        }
4194

    
4195
#ifdef TARGET_X86_64
4196
        if (ot == OT_QUAD) {
4197
            tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
4198
        } else
4199
#endif
4200
        if (ot == OT_LONG) {
4201
#ifdef TARGET_X86_64
4202
                tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4203
                tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4204
                tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4205
                tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4206
                tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4207
                tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4208
#else
4209
                {
4210
                    TCGv t0, t1;
4211
                    t0 = tcg_temp_new(TCG_TYPE_I64);
4212
                    t1 = tcg_temp_new(TCG_TYPE_I64);
4213
                    tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4214
                    tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4215
                    tcg_gen_mul_i64(t0, t0, t1);
4216
                    tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4217
                    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4218
                    tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4219
                    tcg_gen_shri_i64(t0, t0, 32);
4220
                    tcg_gen_trunc_i64_i32(cpu_T[1], t0);
4221
                    tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
4222
                }
4223
#endif
4224
        } else {
4225
            tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4226
            tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4227
            /* XXX: use 32 bit mul which could be faster */
4228
            tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4229
            tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4230
            tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4231
            tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4232
        }
4233
        gen_op_mov_reg_T0(ot, reg);
4234
        s->cc_op = CC_OP_MULB + ot;
4235
        break;
4236
    case 0x1c0:
4237
    case 0x1c1: /* xadd Ev, Gv */
4238
        if ((b & 1) == 0)
4239
            ot = OT_BYTE;
4240
        else
4241
            ot = dflag + OT_WORD;
4242
        modrm = ldub_code(s->pc++);
4243
        reg = ((modrm >> 3) & 7) | rex_r;
4244
        mod = (modrm >> 6) & 3;
4245
        if (mod == 3) {
4246
            rm = (modrm & 7) | REX_B(s);
4247
            gen_op_mov_TN_reg(ot, 0, reg);
4248
            gen_op_mov_TN_reg(ot, 1, rm);
4249
            gen_op_addl_T0_T1();
4250
            gen_op_mov_reg_T1(ot, reg);
4251
            gen_op_mov_reg_T0(ot, rm);
4252
        } else {
4253
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4254
            gen_op_mov_TN_reg(ot, 0, reg);
4255
            gen_op_ld_T1_A0(ot + s->mem_index);
4256
            gen_op_addl_T0_T1();
4257
            gen_op_st_T0_A0(ot + s->mem_index);
4258
            gen_op_mov_reg_T1(ot, reg);
4259
        }
4260
        gen_op_update2_cc();
4261
        s->cc_op = CC_OP_ADDB + ot;
4262
        break;
4263
    case 0x1b0:
4264
    case 0x1b1: /* cmpxchg Ev, Gv */
4265
        {
4266
            int label1;
4267

    
4268
            if ((b & 1) == 0)
4269
                ot = OT_BYTE;
4270
            else
4271
                ot = dflag + OT_WORD;
4272
            modrm = ldub_code(s->pc++);
4273
            reg = ((modrm >> 3) & 7) | rex_r;
4274
            mod = (modrm >> 6) & 3;
4275
            gen_op_mov_TN_reg(ot, 1, reg);
4276
            if (mod == 3) {
4277
                rm = (modrm & 7) | REX_B(s);
4278
                gen_op_mov_TN_reg(ot, 0, rm);
4279
            } else {
4280
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4281
                gen_op_ld_T0_A0(ot + s->mem_index);
4282
                rm = 0; /* avoid warning */
4283
            }
4284
            label1 = gen_new_label();
4285
            tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_EAX]));
4286
            tcg_gen_sub_tl(cpu_T3, cpu_T3, cpu_T[0]);
4287
            gen_extu(ot, cpu_T3);
4288
            tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
4289
            tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
4290
            gen_op_mov_reg_T0(ot, R_EAX);
4291
            gen_set_label(label1);
4292
            if (mod == 3) {
4293
                gen_op_mov_reg_T1(ot, rm);
4294
            } else {
4295
                gen_op_st_T1_A0(ot + s->mem_index);
4296
            }
4297
            tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4298
            tcg_gen_mov_tl(cpu_cc_dst, cpu_T3);
4299
            s->cc_op = CC_OP_SUBB + ot;
4300
        }
4301
        break;
4302
    case 0x1c7: /* cmpxchg8b */
4303
        modrm = ldub_code(s->pc++);
4304
        mod = (modrm >> 6) & 3;
4305
        if ((mod == 3) || ((modrm & 0x38) != 0x8))
4306
            goto illegal_op;
4307
        gen_jmp_im(pc_start - s->cs_base);
4308
        if (s->cc_op != CC_OP_DYNAMIC)
4309
            gen_op_set_cc_op(s->cc_op);
4310
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4311
        tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
4312
        s->cc_op = CC_OP_EFLAGS;
4313
        break;
4314

    
4315
        /**************************/
4316
        /* push/pop */
4317
    case 0x50 ... 0x57: /* push */
4318
        gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
4319
        gen_push_T0(s);
4320
        break;
4321
    case 0x58 ... 0x5f: /* pop */
4322
        if (CODE64(s)) {
4323
            ot = dflag ? OT_QUAD : OT_WORD;
4324
        } else {
4325
            ot = dflag + OT_WORD;
4326
        }
4327
        gen_pop_T0(s);
4328
        /* NOTE: order is important for pop %sp */
4329
        gen_pop_update(s);
4330
        gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
4331
        break;
4332
    case 0x60: /* pusha */
4333
        if (CODE64(s))
4334
            goto illegal_op;
4335
        gen_pusha(s);
4336
        break;
4337
    case 0x61: /* popa */
4338
        if (CODE64(s))
4339
            goto illegal_op;
4340
        gen_popa(s);
4341
        break;
4342
    case 0x68: /* push Iv */
4343
    case 0x6a:
4344
        if (CODE64(s)) {
4345
            ot = dflag ? OT_QUAD : OT_WORD;
4346
        } else {
4347
            ot = dflag + OT_WORD;
4348
        }
4349
        if (b == 0x68)
4350
            val = insn_get(s, ot);
4351
        else
4352
            val = (int8_t)insn_get(s, OT_BYTE);
4353
        gen_op_movl_T0_im(val);
4354
        gen_push_T0(s);
4355
        break;
4356
    case 0x8f: /* pop Ev */
4357
        if (CODE64(s)) {
4358
            ot = dflag ? OT_QUAD : OT_WORD;
4359
        } else {
4360
            ot = dflag + OT_WORD;
4361
        }
4362
        modrm = ldub_code(s->pc++);
4363
        mod = (modrm >> 6) & 3;
4364
        gen_pop_T0(s);
4365
        if (mod == 3) {
4366
            /* NOTE: order is important for pop %sp */
4367
            gen_pop_update(s);
4368
            rm = (modrm & 7) | REX_B(s);
4369
            gen_op_mov_reg_T0(ot, rm);
4370
        } else {
4371
            /* NOTE: order is important too for MMU exceptions */
4372
            s->popl_esp_hack = 1 << ot;
4373
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4374
            s->popl_esp_hack = 0;
4375
            gen_pop_update(s);
4376
        }
4377
        break;
4378
    case 0xc8: /* enter */
4379
        {
4380
            int level;
4381
            val = lduw_code(s->pc);
4382
            s->pc += 2;
4383
            level = ldub_code(s->pc++);
4384
            gen_enter(s, val, level);
4385
        }
4386
        break;
4387
    case 0xc9: /* leave */
4388
        /* XXX: exception not precise (ESP is updated before potential exception) */
4389
        if (CODE64(s)) {
4390
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4391
            gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4392
        } else if (s->ss32) {
4393
            gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4394
            gen_op_mov_reg_T0(OT_LONG, R_ESP);
4395
        } else {
4396
            gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4397
            gen_op_mov_reg_T0(OT_WORD, R_ESP);
4398
        }
4399
        gen_pop_T0(s);
4400
        if (CODE64(s)) {
4401
            ot = dflag ? OT_QUAD : OT_WORD;
4402
        } else {
4403
            ot = dflag + OT_WORD;
4404
        }
4405
        gen_op_mov_reg_T0(ot, R_EBP);
4406
        gen_pop_update(s);
4407
        break;
4408
    case 0x06: /* push es */
4409
    case 0x0e: /* push cs */
4410
    case 0x16: /* push ss */
4411
    case 0x1e: /* push ds */
4412
        if (CODE64(s))
4413
            goto illegal_op;
4414
        gen_op_movl_T0_seg(b >> 3);
4415
        gen_push_T0(s);
4416
        break;
4417
    case 0x1a0: /* push fs */
4418
    case 0x1a8: /* push gs */
4419
        gen_op_movl_T0_seg((b >> 3) & 7);
4420
        gen_push_T0(s);
4421
        break;
4422
    case 0x07: /* pop es */
4423
    case 0x17: /* pop ss */
4424
    case 0x1f: /* pop ds */
4425
        if (CODE64(s))
4426
            goto illegal_op;
4427
        reg = b >> 3;
4428
        gen_pop_T0(s);
4429
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4430
        gen_pop_update(s);
4431
        if (reg == R_SS) {
4432
            /* if reg == SS, inhibit interrupts/trace. */
4433
            /* If several instructions disable interrupts, only the
4434
               _first_ does it */
4435
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4436
                tcg_gen_helper_0_0(helper_set_inhibit_irq);
4437
            s->tf = 0;
4438
        }
4439
        if (s->is_jmp) {
4440
            gen_jmp_im(s->pc - s->cs_base);
4441
            gen_eob(s);
4442
        }
4443
        break;
4444
    case 0x1a1: /* pop fs */
4445
    case 0x1a9: /* pop gs */
4446
        gen_pop_T0(s);
4447
        gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4448
        gen_pop_update(s);
4449
        if (s->is_jmp) {
4450
            gen_jmp_im(s->pc - s->cs_base);
4451
            gen_eob(s);
4452
        }
4453
        break;
4454

    
4455
        /**************************/
4456
        /* mov */
4457
    case 0x88:
4458
    case 0x89: /* mov Gv, Ev */
4459
        if ((b & 1) == 0)
4460
            ot = OT_BYTE;
4461
        else
4462
            ot = dflag + OT_WORD;
4463
        modrm = ldub_code(s->pc++);
4464
        reg = ((modrm >> 3) & 7) | rex_r;
4465

    
4466
        /* generate a generic store */
4467
        gen_ldst_modrm(s, modrm, ot, reg, 1);
4468
        break;
4469
    case 0xc6:
4470
    case 0xc7: /* mov Ev, Iv */
4471
        if ((b & 1) == 0)
4472
            ot = OT_BYTE;
4473
        else
4474
            ot = dflag + OT_WORD;
4475
        modrm = ldub_code(s->pc++);
4476
        mod = (modrm >> 6) & 3;
4477
        if (mod != 3) {
4478
            s->rip_offset = insn_const_size(ot);
4479
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4480
        }
4481
        val = insn_get(s, ot);
4482
        gen_op_movl_T0_im(val);
4483
        if (mod != 3)
4484
            gen_op_st_T0_A0(ot + s->mem_index);
4485
        else
4486
            gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4487
        break;
4488
    case 0x8a:
4489
    case 0x8b: /* mov Ev, Gv */
4490
        if ((b & 1) == 0)
4491
            ot = OT_BYTE;
4492
        else
4493
            ot = OT_WORD + dflag;
4494
        modrm = ldub_code(s->pc++);
4495
        reg = ((modrm >> 3) & 7) | rex_r;
4496

    
4497
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4498
        gen_op_mov_reg_T0(ot, reg);
4499
        break;
4500
    case 0x8e: /* mov seg, Gv */
4501
        modrm = ldub_code(s->pc++);
4502
        reg = (modrm >> 3) & 7;
4503
        if (reg >= 6 || reg == R_CS)
4504
            goto illegal_op;
4505
        gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4506
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4507
        if (reg == R_SS) {
4508
            /* if reg == SS, inhibit interrupts/trace */
4509
            /* If several instructions disable interrupts, only the
4510
               _first_ does it */
4511
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4512
                tcg_gen_helper_0_0(helper_set_inhibit_irq);
4513
            s->tf = 0;
4514
        }
4515
        if (s->is_jmp) {
4516
            gen_jmp_im(s->pc - s->cs_base);
4517
            gen_eob(s);
4518
        }
4519
        break;
4520
    case 0x8c: /* mov Gv, seg */
4521
        modrm = ldub_code(s->pc++);
4522
        reg = (modrm >> 3) & 7;
4523
        mod = (modrm >> 6) & 3;
4524
        if (reg >= 6)
4525
            goto illegal_op;
4526
        gen_op_movl_T0_seg(reg);
4527
        if (mod == 3)
4528
            ot = OT_WORD + dflag;
4529
        else
4530
            ot = OT_WORD;
4531
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4532
        break;
4533

    
4534
    case 0x1b6: /* movzbS Gv, Eb */
4535
    case 0x1b7: /* movzwS Gv, Eb */
4536
    case 0x1be: /* movsbS Gv, Eb */
4537
    case 0x1bf: /* movswS Gv, Eb */
4538
        {
4539
            int d_ot;
4540
            /* d_ot is the size of destination */
4541
            d_ot = dflag + OT_WORD;
4542
            /* ot is the size of source */
4543
            ot = (b & 1) + OT_BYTE;
4544
            modrm = ldub_code(s->pc++);
4545
            reg = ((modrm >> 3) & 7) | rex_r;
4546
            mod = (modrm >> 6) & 3;
4547
            rm = (modrm & 7) | REX_B(s);
4548

    
4549
            if (mod == 3) {
4550
                gen_op_mov_TN_reg(ot, 0, rm);
4551
                switch(ot | (b & 8)) {
4552
                case OT_BYTE:
4553
                    tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4554
                    break;
4555
                case OT_BYTE | 8:
4556
                    tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4557
                    break;
4558
                case OT_WORD:
4559
                    tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4560
                    break;
4561
                default:
4562
                case OT_WORD | 8:
4563
                    tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4564
                    break;
4565
                }
4566
                gen_op_mov_reg_T0(d_ot, reg);
4567
            } else {
4568
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4569
                if (b & 8) {
4570
                    gen_op_lds_T0_A0(ot + s->mem_index);
4571
                } else {
4572
                    gen_op_ldu_T0_A0(ot + s->mem_index);
4573
                }
4574
                gen_op_mov_reg_T0(d_ot, reg);
4575
            }
4576
        }
4577
        break;
4578

    
4579
    case 0x8d: /* lea */
4580
        ot = dflag + OT_WORD;
4581
        modrm = ldub_code(s->pc++);
4582
        mod = (modrm >> 6) & 3;
4583
        if (mod == 3)
4584
            goto illegal_op;
4585
        reg = ((modrm >> 3) & 7) | rex_r;
4586
        /* we must ensure that no segment is added */
4587
        s->override = -1;
4588
        val = s->addseg;
4589
        s->addseg = 0;
4590
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4591
        s->addseg = val;
4592
        gen_op_mov_reg_A0(ot - OT_WORD, reg);
4593
        break;
4594

    
4595
    case 0xa0: /* mov EAX, Ov */
4596
    case 0xa1:
4597
    case 0xa2: /* mov Ov, EAX */
4598
    case 0xa3:
4599
        {
4600
            target_ulong offset_addr;
4601

    
4602
            if ((b & 1) == 0)
4603
                ot = OT_BYTE;
4604
            else
4605
                ot = dflag + OT_WORD;
4606
#ifdef TARGET_X86_64
4607
            if (s->aflag == 2) {
4608
                offset_addr = ldq_code(s->pc);
4609
                s->pc += 8;
4610
                gen_op_movq_A0_im(offset_addr);
4611
            } else
4612
#endif
4613
            {
4614
                if (s->aflag) {
4615
                    offset_addr = insn_get(s, OT_LONG);
4616
                } else {
4617
                    offset_addr = insn_get(s, OT_WORD);
4618
                }
4619
                gen_op_movl_A0_im(offset_addr);
4620
            }
4621
            gen_add_A0_ds_seg(s);
4622
            if ((b & 2) == 0) {
4623
                gen_op_ld_T0_A0(ot + s->mem_index);
4624
                gen_op_mov_reg_T0(ot, R_EAX);
4625
            } else {
4626
                gen_op_mov_TN_reg(ot, 0, R_EAX);
4627
                gen_op_st_T0_A0(ot + s->mem_index);
4628
            }
4629
        }
4630
        break;
4631
    case 0xd7: /* xlat */
4632
#ifdef TARGET_X86_64
4633
        if (s->aflag == 2) {
4634
            gen_op_movq_A0_reg(R_EBX);
4635
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
4636
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4637
            tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4638
        } else
4639
#endif
4640
        {
4641
            gen_op_movl_A0_reg(R_EBX);
4642
            gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
4643
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
4644
            tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
4645
            if (s->aflag == 0)
4646
                gen_op_andl_A0_ffff();
4647
            else
4648
                tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
4649
        }
4650
        gen_add_A0_ds_seg(s);
4651
        gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4652
        gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4653
        break;
4654
    case 0xb0 ... 0xb7: /* mov R, Ib */
4655
        val = insn_get(s, OT_BYTE);
4656
        gen_op_movl_T0_im(val);
4657
        gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4658
        break;
4659
    case 0xb8 ... 0xbf: /* mov R, Iv */
4660
#ifdef TARGET_X86_64
4661
        if (dflag == 2) {
4662
            uint64_t tmp;
4663
            /* 64 bit case */
4664
            tmp = ldq_code(s->pc);
4665
            s->pc += 8;
4666
            reg = (b & 7) | REX_B(s);
4667
            gen_movtl_T0_im(tmp);
4668
            gen_op_mov_reg_T0(OT_QUAD, reg);
4669
        } else
4670
#endif
4671
        {
4672
            ot = dflag ? OT_LONG : OT_WORD;
4673
            val = insn_get(s, ot);
4674
            reg = (b & 7) | REX_B(s);
4675
            gen_op_movl_T0_im(val);
4676
            gen_op_mov_reg_T0(ot, reg);
4677
        }
4678
        break;
4679

    
4680
    case 0x91 ... 0x97: /* xchg R, EAX */
4681
        ot = dflag + OT_WORD;
4682
        reg = (b & 7) | REX_B(s);
4683
        rm = R_EAX;
4684
        goto do_xchg_reg;
4685
    case 0x86:
4686
    case 0x87: /* xchg Ev, Gv */
4687
        if ((b & 1) == 0)
4688
            ot = OT_BYTE;
4689
        else
4690
            ot = dflag + OT_WORD;
4691
        modrm = ldub_code(s->pc++);
4692
        reg = ((modrm >> 3) & 7) | rex_r;
4693
        mod = (modrm >> 6) & 3;
4694
        if (mod == 3) {
4695
            rm = (modrm & 7) | REX_B(s);
4696
        do_xchg_reg:
4697
            gen_op_mov_TN_reg(ot, 0, reg);
4698
            gen_op_mov_TN_reg(ot, 1, rm);
4699
            gen_op_mov_reg_T0(ot, rm);
4700
            gen_op_mov_reg_T1(ot, reg);
4701
        } else {
4702
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4703
            gen_op_mov_TN_reg(ot, 0, reg);
4704
            /* for xchg, lock is implicit */
4705
            if (!(prefixes & PREFIX_LOCK))
4706
                tcg_gen_helper_0_0(helper_lock);
4707
            gen_op_ld_T1_A0(ot + s->mem_index);
4708
            gen_op_st_T0_A0(ot + s->mem_index);
4709
            if (!(prefixes & PREFIX_LOCK))
4710
                tcg_gen_helper_0_0(helper_unlock);
4711
            gen_op_mov_reg_T1(ot, reg);
4712
        }
4713
        break;
4714
    case 0xc4: /* les Gv */
4715
        if (CODE64(s))
4716
            goto illegal_op;
4717
        op = R_ES;
4718
        goto do_lxx;
4719
    case 0xc5: /* lds Gv */
4720
        if (CODE64(s))
4721
            goto illegal_op;
4722
        op = R_DS;
4723
        goto do_lxx;
4724
    case 0x1b2: /* lss Gv */
4725
        op = R_SS;
4726
        goto do_lxx;
4727
    case 0x1b4: /* lfs Gv */
4728
        op = R_FS;
4729
        goto do_lxx;
4730
    case 0x1b5: /* lgs Gv */
4731
        op = R_GS;
4732
    do_lxx:
4733
        ot = dflag ? OT_LONG : OT_WORD;
4734
        modrm = ldub_code(s->pc++);
4735
        reg = ((modrm >> 3) & 7) | rex_r;
4736
        mod = (modrm >> 6) & 3;
4737
        if (mod == 3)
4738
            goto illegal_op;
4739
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4740
        gen_op_ld_T1_A0(ot + s->mem_index);
4741
        gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4742
        /* load the segment first to handle exceptions properly */
4743
        gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4744
        gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4745
        /* then put the data */
4746
        gen_op_mov_reg_T1(ot, reg);
4747
        if (s->is_jmp) {
4748
            gen_jmp_im(s->pc - s->cs_base);
4749
            gen_eob(s);
4750
        }
4751
        break;
4752

    
4753
        /************************/
4754
        /* shifts */
4755
    case 0xc0:
4756
    case 0xc1:
4757
        /* shift Ev,Ib */
4758
        shift = 2;
4759
    grp2:
4760
        {
4761
            if ((b & 1) == 0)
4762
                ot = OT_BYTE;
4763
            else
4764
                ot = dflag + OT_WORD;
4765

    
4766
            modrm = ldub_code(s->pc++);
4767
            mod = (modrm >> 6) & 3;
4768
            op = (modrm >> 3) & 7;
4769

    
4770
            if (mod != 3) {
4771
                if (shift == 2) {
4772
                    s->rip_offset = 1;
4773
                }
4774
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4775
                opreg = OR_TMP0;
4776
            } else {
4777
                opreg = (modrm & 7) | REX_B(s);
4778
            }
4779

    
4780
            /* simpler op */
4781
            if (shift == 0) {
4782
                gen_shift(s, op, ot, opreg, OR_ECX);
4783
            } else {
4784
                if (shift == 2) {
4785
                    shift = ldub_code(s->pc++);
4786
                }
4787
                gen_shifti(s, op, ot, opreg, shift);
4788
            }
4789
        }
4790
        break;
4791
    case 0xd0:
4792
    case 0xd1:
4793
        /* shift Ev,1 */
4794
        shift = 1;
4795
        goto grp2;
4796
    case 0xd2:
4797
    case 0xd3:
4798
        /* shift Ev,cl */
4799
        shift = 0;
4800
        goto grp2;
4801

    
4802
    case 0x1a4: /* shld imm */
4803
        op = 0;
4804
        shift = 1;
4805
        goto do_shiftd;
4806
    case 0x1a5: /* shld cl */
4807
        op = 0;
4808
        shift = 0;
4809
        goto do_shiftd;
4810
    case 0x1ac: /* shrd imm */
4811
        op = 1;
4812
        shift = 1;
4813
        goto do_shiftd;
4814
    case 0x1ad: /* shrd cl */
4815
        op = 1;
4816
        shift = 0;
4817
    do_shiftd:
4818
        ot = dflag + OT_WORD;
4819
        modrm = ldub_code(s->pc++);
4820
        mod = (modrm >> 6) & 3;
4821
        rm = (modrm & 7) | REX_B(s);
4822
        reg = ((modrm >> 3) & 7) | rex_r;
4823
        if (mod != 3) {
4824
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4825
            opreg = OR_TMP0;
4826
        } else {
4827
            opreg = rm;
4828
        }
4829
        gen_op_mov_TN_reg(ot, 1, reg);
4830

    
4831
        if (shift) {
4832
            val = ldub_code(s->pc++);
4833
            tcg_gen_movi_tl(cpu_T3, val);
4834
        } else {
4835
            tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
4836
        }
4837
        gen_shiftd_rm_T1_T3(s, ot, opreg, op);
4838
        break;
4839

    
4840
        /************************/
4841
        /* floats */
4842
    case 0xd8 ... 0xdf:
4843
        if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4844
            /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4845
            /* XXX: what to do if illegal op ? */
4846
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4847
            break;
4848
        }
4849
        modrm = ldub_code(s->pc++);
4850
        mod = (modrm >> 6) & 3;
4851
        rm = modrm & 7;
4852
        op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4853
        if (mod != 3) {
4854
            /* memory op */
4855
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4856
            switch(op) {
4857
            case 0x00 ... 0x07: /* fxxxs */
4858
            case 0x10 ... 0x17: /* fixxxl */
4859
            case 0x20 ... 0x27: /* fxxxl */
4860
            case 0x30 ... 0x37: /* fixxx */
4861
                {
4862
                    int op1;
4863
                    op1 = op & 7;
4864

    
4865
                    switch(op >> 4) {
4866
                    case 0:
4867
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4868
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4869
                        tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
4870
                        break;
4871
                    case 1:
4872
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4873
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4874
                        tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4875
                        break;
4876
                    case 2:
4877
                        tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
4878
                                          (s->mem_index >> 2) - 1);
4879
                        tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
4880
                        break;
4881
                    case 3:
4882
                    default:
4883
                        gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4884
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4885
                        tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4886
                        break;
4887
                    }
4888

    
4889
                    tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
4890
                    if (op1 == 3) {
4891
                        /* fcomp needs pop */
4892
                        tcg_gen_helper_0_0(helper_fpop);
4893
                    }
4894
                }
4895
                break;
4896
            case 0x08: /* flds */
4897
            case 0x0a: /* fsts */
4898
            case 0x0b: /* fstps */
4899
            case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4900
            case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4901
            case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4902
                switch(op & 7) {
4903
                case 0:
4904
                    switch(op >> 4) {
4905
                    case 0:
4906
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4907
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4908
                        tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
4909
                        break;
4910
                    case 1:
4911
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4912
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4913
                        tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4914
                        break;
4915
                    case 2:
4916
                        tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
4917
                                          (s->mem_index >> 2) - 1);
4918
                        tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
4919
                        break;
4920
                    case 3:
4921
                    default:
4922
                        gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4923
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4924
                        tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4925
                        break;
4926
                    }
4927
                    break;
4928
                case 1:
4929
                    /* XXX: the corresponding CPUID bit must be tested ! */
4930
                    switch(op >> 4) {
4931
                    case 1:
4932
                        tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
4933
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4934
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4935
                        break;
4936
                    case 2:
4937
                        tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
4938
                        tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
4939
                                          (s->mem_index >> 2) - 1);
4940
                        break;
4941
                    case 3:
4942
                    default:
4943
                        tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
4944
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4945
                        gen_op_st_T0_A0(OT_WORD + s->mem_index);
4946
                        break;
4947
                    }
4948
                    tcg_gen_helper_0_0(helper_fpop);
4949
                    break;
4950
                default:
4951
                    switch(op >> 4) {
4952
                    case 0:
4953
                        tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
4954
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4955
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4956
                        break;
4957
                    case 1:
4958
                        tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
4959
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4960
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4961
                        break;
4962
                    case 2:
4963
                        tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
4964
                        tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
4965
                                          (s->mem_index >> 2) - 1);
4966
                        break;
4967
                    case 3:
4968
                    default:
4969
                        tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
4970
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4971
                        gen_op_st_T0_A0(OT_WORD + s->mem_index);
4972
                        break;
4973
                    }
4974
                    if ((op & 7) == 3)
4975
                        tcg_gen_helper_0_0(helper_fpop);
4976
                    break;
4977
                }
4978
                break;
4979
            case 0x0c: /* fldenv mem */
4980
                if (s->cc_op != CC_OP_DYNAMIC)
4981
                    gen_op_set_cc_op(s->cc_op);
4982
                gen_jmp_im(pc_start - s->cs_base);
4983
                tcg_gen_helper_0_2(helper_fldenv, 
4984
                                   cpu_A0, tcg_const_i32(s->dflag));
4985
                break;
4986
            case 0x0d: /* fldcw mem */
4987
                gen_op_ld_T0_A0(OT_WORD + s->mem_index);
4988
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4989
                tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
4990
                break;
4991
            case 0x0e: /* fnstenv mem */
4992
                if (s->cc_op != CC_OP_DYNAMIC)
4993
                    gen_op_set_cc_op(s->cc_op);
4994
                gen_jmp_im(pc_start - s->cs_base);
4995
                tcg_gen_helper_0_2(helper_fstenv,
4996
                                   cpu_A0, tcg_const_i32(s->dflag));
4997
                break;
4998
            case 0x0f: /* fnstcw mem */
4999
                tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
5000
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5001
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
5002
                break;
5003
            case 0x1d: /* fldt mem */
5004
                if (s->cc_op != CC_OP_DYNAMIC)
5005
                    gen_op_set_cc_op(s->cc_op);
5006
                gen_jmp_im(pc_start - s->cs_base);
5007
                tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
5008
                break;
5009
            case 0x1f: /* fstpt mem */
5010
                if (s->cc_op != CC_OP_DYNAMIC)
5011
                    gen_op_set_cc_op(s->cc_op);
5012
                gen_jmp_im(pc_start - s->cs_base);
5013
                tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
5014
                tcg_gen_helper_0_0(helper_fpop);
5015
                break;
5016
            case 0x2c: /* frstor mem */
5017
                if (s->cc_op != CC_OP_DYNAMIC)
5018
                    gen_op_set_cc_op(s->cc_op);
5019
                gen_jmp_im(pc_start - s->cs_base);
5020
                tcg_gen_helper_0_2(helper_frstor,
5021
                                   cpu_A0, tcg_const_i32(s->dflag));
5022
                break;
5023
            case 0x2e: /* fnsave mem */
5024
                if (s->cc_op != CC_OP_DYNAMIC)
5025
                    gen_op_set_cc_op(s->cc_op);
5026
                gen_jmp_im(pc_start - s->cs_base);
5027
                tcg_gen_helper_0_2(helper_fsave,
5028
                                   cpu_A0, tcg_const_i32(s->dflag));
5029
                break;
5030
            case 0x2f: /* fnstsw mem */
5031
                tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5032
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5033
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
5034
                break;
5035
            case 0x3c: /* fbld */
5036
                if (s->cc_op != CC_OP_DYNAMIC)
5037
                    gen_op_set_cc_op(s->cc_op);
5038
                gen_jmp_im(pc_start - s->cs_base);
5039
                tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
5040
                break;
5041
            case 0x3e: /* fbstp */
5042
                if (s->cc_op != CC_OP_DYNAMIC)
5043
                    gen_op_set_cc_op(s->cc_op);
5044
                gen_jmp_im(pc_start - s->cs_base);
5045
                tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
5046
                tcg_gen_helper_0_0(helper_fpop);
5047
                break;
5048
            case 0x3d: /* fildll */
5049
                tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
5050
                                  (s->mem_index >> 2) - 1);
5051
                tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
5052
                break;
5053
            case 0x3f: /* fistpll */
5054
                tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
5055
                tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
5056
                                  (s->mem_index >> 2) - 1);
5057
                tcg_gen_helper_0_0(helper_fpop);
5058
                break;
5059
            default:
5060
                goto illegal_op;
5061
            }
5062
        } else {
5063
            /* register float ops */
5064
            opreg = rm;
5065

    
5066
            switch(op) {
5067
            case 0x08: /* fld sti */
5068
                tcg_gen_helper_0_0(helper_fpush);
5069
                tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
5070
                break;
5071
            case 0x09: /* fxchg sti */
5072
            case 0x29: /* fxchg4 sti, undocumented op */
5073
            case 0x39: /* fxchg7 sti, undocumented op */
5074
                tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
5075
                break;
5076
            case 0x0a: /* grp d9/2 */
5077
                switch(rm) {
5078
                case 0: /* fnop */
5079
                    /* check exceptions (FreeBSD FPU probe) */
5080
                    if (s->cc_op != CC_OP_DYNAMIC)
5081
                        gen_op_set_cc_op(s->cc_op);
5082
                    gen_jmp_im(pc_start - s->cs_base);
5083
                    tcg_gen_helper_0_0(helper_fwait);
5084
                    break;
5085
                default:
5086
                    goto illegal_op;
5087
                }
5088
                break;
5089
            case 0x0c: /* grp d9/4 */
5090
                switch(rm) {
5091
                case 0: /* fchs */
5092
                    tcg_gen_helper_0_0(helper_fchs_ST0);
5093
                    break;
5094
                case 1: /* fabs */
5095
                    tcg_gen_helper_0_0(helper_fabs_ST0);
5096
                    break;
5097
                case 4: /* ftst */
5098
                    tcg_gen_helper_0_0(helper_fldz_FT0);
5099
                    tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5100
                    break;
5101
                case 5: /* fxam */
5102
                    tcg_gen_helper_0_0(helper_fxam_ST0);
5103
                    break;
5104
                default:
5105
                    goto illegal_op;
5106
                }
5107
                break;
5108
            case 0x0d: /* grp d9/5 */
5109
                {
5110
                    switch(rm) {
5111
                    case 0:
5112
                        tcg_gen_helper_0_0(helper_fpush);
5113
                        tcg_gen_helper_0_0(helper_fld1_ST0);
5114
                        break;
5115
                    case 1:
5116
                        tcg_gen_helper_0_0(helper_fpush);
5117
                        tcg_gen_helper_0_0(helper_fldl2t_ST0);
5118
                        break;
5119
                    case 2:
5120
                        tcg_gen_helper_0_0(helper_fpush);
5121
                        tcg_gen_helper_0_0(helper_fldl2e_ST0);
5122
                        break;
5123
                    case 3:
5124
                        tcg_gen_helper_0_0(helper_fpush);
5125
                        tcg_gen_helper_0_0(helper_fldpi_ST0);
5126
                        break;
5127
                    case 4:
5128
                        tcg_gen_helper_0_0(helper_fpush);
5129
                        tcg_gen_helper_0_0(helper_fldlg2_ST0);
5130
                        break;
5131
                    case 5:
5132
                        tcg_gen_helper_0_0(helper_fpush);
5133
                        tcg_gen_helper_0_0(helper_fldln2_ST0);
5134
                        break;
5135
                    case 6:
5136
                        tcg_gen_helper_0_0(helper_fpush);
5137
                        tcg_gen_helper_0_0(helper_fldz_ST0);
5138
                        break;
5139
                    default:
5140
                        goto illegal_op;
5141
                    }
5142
                }
5143
                break;
5144
            case 0x0e: /* grp d9/6 */
5145
                switch(rm) {
5146
                case 0: /* f2xm1 */
5147
                    tcg_gen_helper_0_0(helper_f2xm1);
5148
                    break;
5149
                case 1: /* fyl2x */
5150
                    tcg_gen_helper_0_0(helper_fyl2x);
5151
                    break;
5152
                case 2: /* fptan */
5153
                    tcg_gen_helper_0_0(helper_fptan);
5154
                    break;
5155
                case 3: /* fpatan */
5156
                    tcg_gen_helper_0_0(helper_fpatan);
5157
                    break;
5158
                case 4: /* fxtract */
5159
                    tcg_gen_helper_0_0(helper_fxtract);
5160
                    break;
5161
                case 5: /* fprem1 */
5162
                    tcg_gen_helper_0_0(helper_fprem1);
5163
                    break;
5164
                case 6: /* fdecstp */
5165
                    tcg_gen_helper_0_0(helper_fdecstp);
5166
                    break;
5167
                default:
5168
                case 7: /* fincstp */
5169
                    tcg_gen_helper_0_0(helper_fincstp);
5170
                    break;
5171
                }
5172
                break;
5173
            case 0x0f: /* grp d9/7 */
5174
                switch(rm) {
5175
                case 0: /* fprem */
5176
                    tcg_gen_helper_0_0(helper_fprem);
5177
                    break;
5178
                case 1: /* fyl2xp1 */
5179
                    tcg_gen_helper_0_0(helper_fyl2xp1);
5180
                    break;
5181
                case 2: /* fsqrt */
5182
                    tcg_gen_helper_0_0(helper_fsqrt);
5183
                    break;
5184
                case 3: /* fsincos */
5185
                    tcg_gen_helper_0_0(helper_fsincos);
5186
                    break;
5187
                case 5: /* fscale */
5188
                    tcg_gen_helper_0_0(helper_fscale);
5189
                    break;
5190
                case 4: /* frndint */
5191
                    tcg_gen_helper_0_0(helper_frndint);
5192
                    break;
5193
                case 6: /* fsin */
5194
                    tcg_gen_helper_0_0(helper_fsin);
5195
                    break;
5196
                default:
5197
                case 7: /* fcos */
5198
                    tcg_gen_helper_0_0(helper_fcos);
5199
                    break;
5200
                }
5201
                break;
5202
            case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5203
            case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5204
            case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5205
                {
5206
                    int op1;
5207

    
5208
                    op1 = op & 7;
5209
                    if (op >= 0x20) {
5210
                        tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
5211
                        if (op >= 0x30)
5212
                            tcg_gen_helper_0_0(helper_fpop);
5213
                    } else {
5214
                        tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5215
                        tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5216
                    }
5217
                }
5218
                break;
5219
            case 0x02: /* fcom */
5220
            case 0x22: /* fcom2, undocumented op */
5221
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5222
                tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5223
                break;
5224
            case 0x03: /* fcomp */
5225
            case 0x23: /* fcomp3, undocumented op */
5226
            case 0x32: /* fcomp5, undocumented op */
5227
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5228
                tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5229
                tcg_gen_helper_0_0(helper_fpop);
5230
                break;
5231
            case 0x15: /* da/5 */
5232
                switch(rm) {
5233
                case 1: /* fucompp */
5234
                    tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5235
                    tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5236
                    tcg_gen_helper_0_0(helper_fpop);
5237
                    tcg_gen_helper_0_0(helper_fpop);
5238
                    break;
5239
                default:
5240
                    goto illegal_op;
5241
                }
5242
                break;
5243
            case 0x1c:
5244
                switch(rm) {
5245
                case 0: /* feni (287 only, just do nop here) */
5246
                    break;
5247
                case 1: /* fdisi (287 only, just do nop here) */
5248
                    break;
5249
                case 2: /* fclex */
5250
                    tcg_gen_helper_0_0(helper_fclex);
5251
                    break;
5252
                case 3: /* fninit */
5253
                    tcg_gen_helper_0_0(helper_fninit);
5254
                    break;
5255
                case 4: /* fsetpm (287 only, just do nop here) */
5256
                    break;
5257
                default:
5258
                    goto illegal_op;
5259
                }
5260
                break;
5261
            case 0x1d: /* fucomi */
5262
                if (s->cc_op != CC_OP_DYNAMIC)
5263
                    gen_op_set_cc_op(s->cc_op);
5264
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5265
                tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5266
                gen_op_fcomi_dummy();
5267
                s->cc_op = CC_OP_EFLAGS;
5268
                break;
5269
            case 0x1e: /* fcomi */
5270
                if (s->cc_op != CC_OP_DYNAMIC)
5271
                    gen_op_set_cc_op(s->cc_op);
5272
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5273
                tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5274
                gen_op_fcomi_dummy();
5275
                s->cc_op = CC_OP_EFLAGS;
5276
                break;
5277
            case 0x28: /* ffree sti */
5278
                tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5279
                break;
5280
            case 0x2a: /* fst sti */
5281
                tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5282
                break;
5283
            case 0x2b: /* fstp sti */
5284
            case 0x0b: /* fstp1 sti, undocumented op */
5285
            case 0x3a: /* fstp8 sti, undocumented op */
5286
            case 0x3b: /* fstp9 sti, undocumented op */
5287
                tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5288
                tcg_gen_helper_0_0(helper_fpop);
5289
                break;
5290
            case 0x2c: /* fucom st(i) */
5291
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5292
                tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5293
                break;
5294
            case 0x2d: /* fucomp st(i) */
5295
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5296
                tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5297
                tcg_gen_helper_0_0(helper_fpop);
5298
                break;
5299
            case 0x33: /* de/3 */
5300
                switch(rm) {
5301
                case 1: /* fcompp */
5302
                    tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5303
                    tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5304
                    tcg_gen_helper_0_0(helper_fpop);
5305
                    tcg_gen_helper_0_0(helper_fpop);
5306
                    break;
5307
                default:
5308
                    goto illegal_op;
5309
                }
5310
                break;
5311
            case 0x38: /* ffreep sti, undocumented op */
5312
                tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5313
                tcg_gen_helper_0_0(helper_fpop);
5314
                break;
5315
            case 0x3c: /* df/4 */
5316
                switch(rm) {
5317
                case 0:
5318
                    tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5319
                    tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5320
                    gen_op_mov_reg_T0(OT_WORD, R_EAX);
5321
                    break;
5322
                default:
5323
                    goto illegal_op;
5324
                }
5325
                break;
5326
            case 0x3d: /* fucomip */
5327
                if (s->cc_op != CC_OP_DYNAMIC)
5328
                    gen_op_set_cc_op(s->cc_op);
5329
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5330
                tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5331
                tcg_gen_helper_0_0(helper_fpop);
5332
                gen_op_fcomi_dummy();
5333
                s->cc_op = CC_OP_EFLAGS;
5334
                break;
5335
            case 0x3e: /* fcomip */
5336
                if (s->cc_op != CC_OP_DYNAMIC)
5337
                    gen_op_set_cc_op(s->cc_op);
5338
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5339
                tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5340
                tcg_gen_helper_0_0(helper_fpop);
5341
                gen_op_fcomi_dummy();
5342
                s->cc_op = CC_OP_EFLAGS;
5343
                break;
5344
            case 0x10 ... 0x13: /* fcmovxx */
5345
            case 0x18 ... 0x1b:
5346
                {
5347
                    int op1, l1;
5348
                    const static uint8_t fcmov_cc[8] = {
5349
                        (JCC_B << 1),
5350
                        (JCC_Z << 1),
5351
                        (JCC_BE << 1),
5352
                        (JCC_P << 1),
5353
                    };
5354
                    op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
5355
                    gen_setcc(s, op1);
5356
                    l1 = gen_new_label();
5357
                    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), l1);
5358
                    tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
5359
                    gen_set_label(l1);
5360
                }
5361
                break;
5362
            default:
5363
                goto illegal_op;
5364
            }
5365
        }
5366
        break;
5367
        /************************/
5368
        /* string ops */
5369

    
5370
    case 0xa4: /* movsS */
5371
    case 0xa5:
5372
        if ((b & 1) == 0)
5373
            ot = OT_BYTE;
5374
        else
5375
            ot = dflag + OT_WORD;
5376

    
5377
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5378
            gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5379
        } else {
5380
            gen_movs(s, ot);
5381
        }
5382
        break;
5383

    
5384
    case 0xaa: /* stosS */
5385
    case 0xab:
5386
        if ((b & 1) == 0)
5387
            ot = OT_BYTE;
5388
        else
5389
            ot = dflag + OT_WORD;
5390

    
5391
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5392
            gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5393
        } else {
5394
            gen_stos(s, ot);
5395
        }
5396
        break;
5397
    case 0xac: /* lodsS */
5398
    case 0xad:
5399
        if ((b & 1) == 0)
5400
            ot = OT_BYTE;
5401
        else
5402
            ot = dflag + OT_WORD;
5403
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5404
            gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5405
        } else {
5406
            gen_lods(s, ot);
5407
        }
5408
        break;
5409
    case 0xae: /* scasS */
5410
    case 0xaf:
5411
        if ((b & 1) == 0)
5412
            ot = OT_BYTE;
5413
        else
5414
            ot = dflag + OT_WORD;
5415
        if (prefixes & PREFIX_REPNZ) {
5416
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5417
        } else if (prefixes & PREFIX_REPZ) {
5418
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5419
        } else {
5420
            gen_scas(s, ot);
5421
            s->cc_op = CC_OP_SUBB + ot;
5422
        }
5423
        break;
5424

    
5425
    case 0xa6: /* cmpsS */
5426
    case 0xa7:
5427
        if ((b & 1) == 0)
5428
            ot = OT_BYTE;
5429
        else
5430
            ot = dflag + OT_WORD;
5431
        if (prefixes & PREFIX_REPNZ) {
5432
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5433
        } else if (prefixes & PREFIX_REPZ) {
5434
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5435
        } else {
5436
            gen_cmps(s, ot);
5437
            s->cc_op = CC_OP_SUBB + ot;
5438
        }
5439
        break;
5440
    case 0x6c: /* insS */
5441
    case 0x6d:
5442
        if ((b & 1) == 0)
5443
            ot = OT_BYTE;
5444
        else
5445
            ot = dflag ? OT_LONG : OT_WORD;
5446
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5447
        gen_op_andl_T0_ffff();
5448
        gen_check_io(s, ot, pc_start - s->cs_base, 
5449
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
5450
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5451
            gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5452
        } else {
5453
            gen_ins(s, ot);
5454
        }
5455
        break;
5456
    case 0x6e: /* outsS */
5457
    case 0x6f:
5458
        if ((b & 1) == 0)
5459
            ot = OT_BYTE;
5460
        else
5461
            ot = dflag ? OT_LONG : OT_WORD;
5462
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5463
        gen_op_andl_T0_ffff();
5464
        gen_check_io(s, ot, pc_start - s->cs_base,
5465
                     svm_is_rep(prefixes) | 4);
5466
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5467
            gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5468
        } else {
5469
            gen_outs(s, ot);
5470
        }
5471
        break;
5472

    
5473
        /************************/
5474
        /* port I/O */
5475

    
5476
    case 0xe4:
5477
    case 0xe5:
5478
        if ((b & 1) == 0)
5479
            ot = OT_BYTE;
5480
        else
5481
            ot = dflag ? OT_LONG : OT_WORD;
5482
        val = ldub_code(s->pc++);
5483
        gen_op_movl_T0_im(val);
5484
        gen_check_io(s, ot, pc_start - s->cs_base,
5485
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5486
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5487
        tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5488
        gen_op_mov_reg_T1(ot, R_EAX);
5489
        break;
5490
    case 0xe6:
5491
    case 0xe7:
5492
        if ((b & 1) == 0)
5493
            ot = OT_BYTE;
5494
        else
5495
            ot = dflag ? OT_LONG : OT_WORD;
5496
        val = ldub_code(s->pc++);
5497
        gen_op_movl_T0_im(val);
5498
        gen_check_io(s, ot, pc_start - s->cs_base,
5499
                     svm_is_rep(prefixes));
5500
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5501

    
5502
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5503
        tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5504
        tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5505
        tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5506
        break;
5507
    case 0xec:
5508
    case 0xed:
5509
        if ((b & 1) == 0)
5510
            ot = OT_BYTE;
5511
        else
5512
            ot = dflag ? OT_LONG : OT_WORD;
5513
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5514
        gen_op_andl_T0_ffff();
5515
        gen_check_io(s, ot, pc_start - s->cs_base,
5516
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5517
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5518
        tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5519
        gen_op_mov_reg_T1(ot, R_EAX);
5520
        break;
5521
    case 0xee:
5522
    case 0xef:
5523
        if ((b & 1) == 0)
5524
            ot = OT_BYTE;
5525
        else
5526
            ot = dflag ? OT_LONG : OT_WORD;
5527
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5528
        gen_op_andl_T0_ffff();
5529
        gen_check_io(s, ot, pc_start - s->cs_base,
5530
                     svm_is_rep(prefixes));
5531
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5532

    
5533
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5534
        tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5535
        tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5536
        tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5537
        break;
5538

    
5539
        /************************/
5540
        /* control */
5541
    case 0xc2: /* ret im */
5542
        val = ldsw_code(s->pc);
5543
        s->pc += 2;
5544
        gen_pop_T0(s);
5545
        if (CODE64(s) && s->dflag)
5546
            s->dflag = 2;
5547
        gen_stack_update(s, val + (2 << s->dflag));
5548
        if (s->dflag == 0)
5549
            gen_op_andl_T0_ffff();
5550
        gen_op_jmp_T0();
5551
        gen_eob(s);
5552
        break;
5553
    case 0xc3: /* ret */
5554
        gen_pop_T0(s);
5555
        gen_pop_update(s);
5556
        if (s->dflag == 0)
5557
            gen_op_andl_T0_ffff();
5558
        gen_op_jmp_T0();
5559
        gen_eob(s);
5560
        break;
5561
    case 0xca: /* lret im */
5562
        val = ldsw_code(s->pc);
5563
        s->pc += 2;
5564
    do_lret:
5565
        if (s->pe && !s->vm86) {
5566
            if (s->cc_op != CC_OP_DYNAMIC)
5567
                gen_op_set_cc_op(s->cc_op);
5568
            gen_jmp_im(pc_start - s->cs_base);
5569
            tcg_gen_helper_0_2(helper_lret_protected,
5570
                               tcg_const_i32(s->dflag), 
5571
                               tcg_const_i32(val));
5572
        } else {
5573
            gen_stack_A0(s);
5574
            /* pop offset */
5575
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5576
            if (s->dflag == 0)
5577
                gen_op_andl_T0_ffff();
5578
            /* NOTE: keeping EIP updated is not a problem in case of
5579
               exception */
5580
            gen_op_jmp_T0();
5581
            /* pop selector */
5582
            gen_op_addl_A0_im(2 << s->dflag);
5583
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5584
            gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5585
            /* add stack offset */
5586
            gen_stack_update(s, val + (4 << s->dflag));
5587
        }
5588
        gen_eob(s);
5589
        break;
5590
    case 0xcb: /* lret */
5591
        val = 0;
5592
        goto do_lret;
5593
    case 0xcf: /* iret */
5594
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5595
            break;
5596
        if (!s->pe) {
5597
            /* real mode */
5598
            tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5599
            s->cc_op = CC_OP_EFLAGS;
5600
        } else if (s->vm86) {
5601
            if (s->iopl != 3) {
5602
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5603
            } else {
5604
                tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5605
                s->cc_op = CC_OP_EFLAGS;
5606
            }
5607
        } else {
5608
            if (s->cc_op != CC_OP_DYNAMIC)
5609
                gen_op_set_cc_op(s->cc_op);
5610
            gen_jmp_im(pc_start - s->cs_base);
5611
            tcg_gen_helper_0_2(helper_iret_protected,
5612
                               tcg_const_i32(s->dflag), 
5613
                               tcg_const_i32(s->pc - s->cs_base));
5614
            s->cc_op = CC_OP_EFLAGS;
5615
        }
5616
        gen_eob(s);
5617
        break;
5618
    case 0xe8: /* call im */
5619
        {
5620
            if (dflag)
5621
                tval = (int32_t)insn_get(s, OT_LONG);
5622
            else
5623
                tval = (int16_t)insn_get(s, OT_WORD);
5624
            next_eip = s->pc - s->cs_base;
5625
            tval += next_eip;
5626
            if (s->dflag == 0)
5627
                tval &= 0xffff;
5628
            gen_movtl_T0_im(next_eip);
5629
            gen_push_T0(s);
5630
            gen_jmp(s, tval);
5631
        }
5632
        break;
5633
    case 0x9a: /* lcall im */
5634
        {
5635
            unsigned int selector, offset;
5636

    
5637
            if (CODE64(s))
5638
                goto illegal_op;
5639
            ot = dflag ? OT_LONG : OT_WORD;
5640
            offset = insn_get(s, ot);
5641
            selector = insn_get(s, OT_WORD);
5642

    
5643
            gen_op_movl_T0_im(selector);
5644
            gen_op_movl_T1_imu(offset);
5645
        }
5646
        goto do_lcall;
5647
    case 0xe9: /* jmp im */
5648
        if (dflag)
5649
            tval = (int32_t)insn_get(s, OT_LONG);
5650
        else
5651
            tval = (int16_t)insn_get(s, OT_WORD);
5652
        tval += s->pc - s->cs_base;
5653
        if (s->dflag == 0)
5654
            tval &= 0xffff;
5655
        gen_jmp(s, tval);
5656
        break;
5657
    case 0xea: /* ljmp im */
5658
        {
5659
            unsigned int selector, offset;
5660

    
5661
            if (CODE64(s))
5662
                goto illegal_op;
5663
            ot = dflag ? OT_LONG : OT_WORD;
5664
            offset = insn_get(s, ot);
5665
            selector = insn_get(s, OT_WORD);
5666

    
5667
            gen_op_movl_T0_im(selector);
5668
            gen_op_movl_T1_imu(offset);
5669
        }
5670
        goto do_ljmp;
5671
    case 0xeb: /* jmp Jb */
5672
        tval = (int8_t)insn_get(s, OT_BYTE);
5673
        tval += s->pc - s->cs_base;
5674
        if (s->dflag == 0)
5675
            tval &= 0xffff;
5676
        gen_jmp(s, tval);
5677
        break;
5678
    case 0x70 ... 0x7f: /* jcc Jb */
5679
        tval = (int8_t)insn_get(s, OT_BYTE);
5680
        goto do_jcc;
5681
    case 0x180 ... 0x18f: /* jcc Jv */
5682
        if (dflag) {
5683
            tval = (int32_t)insn_get(s, OT_LONG);
5684
        } else {
5685
            tval = (int16_t)insn_get(s, OT_WORD);
5686
        }
5687
    do_jcc:
5688
        next_eip = s->pc - s->cs_base;
5689
        tval += next_eip;
5690
        if (s->dflag == 0)
5691
            tval &= 0xffff;
5692
        gen_jcc(s, b, tval, next_eip);
5693
        break;
5694

    
5695
    case 0x190 ... 0x19f: /* setcc Gv */
5696
        modrm = ldub_code(s->pc++);
5697
        gen_setcc(s, b);
5698
        gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5699
        break;
5700
    case 0x140 ... 0x14f: /* cmov Gv, Ev */
5701
        ot = dflag + OT_WORD;
5702
        modrm = ldub_code(s->pc++);
5703
        reg = ((modrm >> 3) & 7) | rex_r;
5704
        mod = (modrm >> 6) & 3;
5705
        gen_setcc(s, b);
5706
        if (mod != 3) {
5707
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5708
            gen_op_ld_T1_A0(ot + s->mem_index);
5709
        } else {
5710
            rm = (modrm & 7) | REX_B(s);
5711
            gen_op_mov_TN_reg(ot, 1, rm);
5712
        }
5713
        gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5714
        break;
5715

    
5716
        /************************/
5717
        /* flags */
5718
    case 0x9c: /* pushf */
5719
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5720
            break;
5721
        if (s->vm86 && s->iopl != 3) {
5722
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5723
        } else {
5724
            if (s->cc_op != CC_OP_DYNAMIC)
5725
                gen_op_set_cc_op(s->cc_op);
5726
            gen_op_movl_T0_eflags();
5727
            gen_push_T0(s);
5728
        }
5729
        break;
5730
    case 0x9d: /* popf */
5731
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5732
            break;
5733
        if (s->vm86 && s->iopl != 3) {
5734
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5735
        } else {
5736
            gen_pop_T0(s);
5737
            if (s->cpl == 0) {
5738
                if (s->dflag) {
5739
                    gen_op_movl_eflags_T0_cpl0();
5740
                } else {
5741
                    gen_op_movw_eflags_T0_cpl0();
5742
                }
5743
            } else {
5744
                if (s->cpl <= s->iopl) {
5745
                    if (s->dflag) {
5746
                        gen_op_movl_eflags_T0_io();
5747
                    } else {
5748
                        gen_op_movw_eflags_T0_io();
5749
                    }
5750
                } else {
5751
                    if (s->dflag) {
5752
                        gen_op_movl_eflags_T0();
5753
                    } else {
5754
                        gen_op_movw_eflags_T0();
5755
                    }
5756
                }
5757
            }
5758
            gen_pop_update(s);
5759
            s->cc_op = CC_OP_EFLAGS;
5760
            /* abort translation because TF flag may change */
5761
            gen_jmp_im(s->pc - s->cs_base);
5762
            gen_eob(s);
5763
        }
5764
        break;
5765
    case 0x9e: /* sahf */
5766
        if (CODE64(s))
5767
            goto illegal_op;
5768
        gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5769
        if (s->cc_op != CC_OP_DYNAMIC)
5770
            gen_op_set_cc_op(s->cc_op);
5771
        gen_op_movb_eflags_T0();
5772
        s->cc_op = CC_OP_EFLAGS;
5773
        break;
5774
    case 0x9f: /* lahf */
5775
        if (CODE64(s))
5776
            goto illegal_op;
5777
        if (s->cc_op != CC_OP_DYNAMIC)
5778
            gen_op_set_cc_op(s->cc_op);
5779
        gen_op_movl_T0_eflags();
5780
        gen_op_mov_reg_T0(OT_BYTE, R_AH);
5781
        break;
5782
    case 0xf5: /* cmc */
5783
        if (s->cc_op != CC_OP_DYNAMIC)
5784
            gen_op_set_cc_op(s->cc_op);
5785
        gen_op_cmc();
5786
        s->cc_op = CC_OP_EFLAGS;
5787
        break;
5788
    case 0xf8: /* clc */
5789
        if (s->cc_op != CC_OP_DYNAMIC)
5790
            gen_op_set_cc_op(s->cc_op);
5791
        gen_op_clc();
5792
        s->cc_op = CC_OP_EFLAGS;
5793
        break;
5794
    case 0xf9: /* stc */
5795
        if (s->cc_op != CC_OP_DYNAMIC)
5796
            gen_op_set_cc_op(s->cc_op);
5797
        gen_op_stc();
5798
        s->cc_op = CC_OP_EFLAGS;
5799
        break;
5800
    case 0xfc: /* cld */
5801
        tcg_gen_movi_i32(cpu_tmp2_i32, 1);
5802
        tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5803
        break;
5804
    case 0xfd: /* std */
5805
        tcg_gen_movi_i32(cpu_tmp2_i32, -1);
5806
        tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5807
        break;
5808

    
5809
        /************************/
5810
        /* bit operations */
5811
    case 0x1ba: /* bt/bts/btr/btc Gv, im */
5812
        ot = dflag + OT_WORD;
5813
        modrm = ldub_code(s->pc++);
5814
        op = (modrm >> 3) & 7;
5815
        mod = (modrm >> 6) & 3;
5816
        rm = (modrm & 7) | REX_B(s);
5817
        if (mod != 3) {
5818
            s->rip_offset = 1;
5819
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5820
            gen_op_ld_T0_A0(ot + s->mem_index);
5821
        } else {
5822
            gen_op_mov_TN_reg(ot, 0, rm);
5823
        }
5824
        /* load shift */
5825
        val = ldub_code(s->pc++);
5826
        gen_op_movl_T1_im(val);
5827
        if (op < 4)
5828
            goto illegal_op;
5829
        op -= 4;
5830
        goto bt_op;
5831
    case 0x1a3: /* bt Gv, Ev */
5832
        op = 0;
5833
        goto do_btx;
5834
    case 0x1ab: /* bts */
5835
        op = 1;
5836
        goto do_btx;
5837
    case 0x1b3: /* btr */
5838
        op = 2;
5839
        goto do_btx;
5840
    case 0x1bb: /* btc */
5841
        op = 3;
5842
    do_btx:
5843
        ot = dflag + OT_WORD;
5844
        modrm = ldub_code(s->pc++);
5845
        reg = ((modrm >> 3) & 7) | rex_r;
5846
        mod = (modrm >> 6) & 3;
5847
        rm = (modrm & 7) | REX_B(s);
5848
        gen_op_mov_TN_reg(OT_LONG, 1, reg);
5849
        if (mod != 3) {
5850
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5851
            /* specific case: we need to add a displacement */
5852
            gen_exts(ot, cpu_T[1]);
5853
            tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
5854
            tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
5855
            tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
5856
            gen_op_ld_T0_A0(ot + s->mem_index);
5857
        } else {
5858
            gen_op_mov_TN_reg(ot, 0, rm);
5859
        }
5860
    bt_op:
5861
        tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
5862
        switch(op) {
5863
        case 0:
5864
            tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
5865
            tcg_gen_movi_tl(cpu_cc_dst, 0);
5866
            break;
5867
        case 1:
5868
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5869
            tcg_gen_movi_tl(cpu_tmp0, 1);
5870
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5871
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5872
            break;
5873
        case 2:
5874
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5875
            tcg_gen_movi_tl(cpu_tmp0, 1);
5876
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5877
            tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
5878
            tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5879
            break;
5880
        default:
5881
        case 3:
5882
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
5883
            tcg_gen_movi_tl(cpu_tmp0, 1);
5884
            tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
5885
            tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
5886
            break;
5887
        }
5888
        s->cc_op = CC_OP_SARB + ot;
5889
        if (op != 0) {
5890
            if (mod != 3)
5891
                gen_op_st_T0_A0(ot + s->mem_index);
5892
            else
5893
                gen_op_mov_reg_T0(ot, rm);
5894
            tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
5895
            tcg_gen_movi_tl(cpu_cc_dst, 0);
5896
        }
5897
        break;
5898
    case 0x1bc: /* bsf */
5899
    case 0x1bd: /* bsr */
5900
        {
5901
            int label1;
5902
            ot = dflag + OT_WORD;
5903
            modrm = ldub_code(s->pc++);
5904
            reg = ((modrm >> 3) & 7) | rex_r;
5905
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5906
            gen_extu(ot, cpu_T[0]);
5907
            label1 = gen_new_label();
5908
            tcg_gen_movi_tl(cpu_cc_dst, 0);
5909
            tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), label1);
5910
            if (b & 1) {
5911
                tcg_gen_helper_1_1(helper_bsr, cpu_T[0], cpu_T[0]);
5912
            } else {
5913
                tcg_gen_helper_1_1(helper_bsf, cpu_T[0], cpu_T[0]);
5914
            }
5915
            gen_op_mov_reg_T0(ot, reg);
5916
            tcg_gen_movi_tl(cpu_cc_dst, 1);
5917
            gen_set_label(label1);
5918
            tcg_gen_discard_tl(cpu_cc_src);
5919
            s->cc_op = CC_OP_LOGICB + ot;
5920
        }
5921
        break;
5922
        /************************/
5923
        /* bcd */
5924
    case 0x27: /* daa */
5925
        if (CODE64(s))
5926
            goto illegal_op;
5927
        if (s->cc_op != CC_OP_DYNAMIC)
5928
            gen_op_set_cc_op(s->cc_op);
5929
        tcg_gen_helper_0_0(helper_daa);
5930
        s->cc_op = CC_OP_EFLAGS;
5931
        break;
5932
    case 0x2f: /* das */
5933
        if (CODE64(s))
5934
            goto illegal_op;
5935
        if (s->cc_op != CC_OP_DYNAMIC)
5936
            gen_op_set_cc_op(s->cc_op);
5937
        tcg_gen_helper_0_0(helper_das);
5938
        s->cc_op = CC_OP_EFLAGS;
5939
        break;
5940
    case 0x37: /* aaa */
5941
        if (CODE64(s))
5942
            goto illegal_op;
5943
        if (s->cc_op != CC_OP_DYNAMIC)
5944
            gen_op_set_cc_op(s->cc_op);
5945
        tcg_gen_helper_0_0(helper_aaa);
5946
        s->cc_op = CC_OP_EFLAGS;
5947
        break;
5948
    case 0x3f: /* aas */
5949
        if (CODE64(s))
5950
            goto illegal_op;
5951
        if (s->cc_op != CC_OP_DYNAMIC)
5952
            gen_op_set_cc_op(s->cc_op);
5953
        tcg_gen_helper_0_0(helper_aas);
5954
        s->cc_op = CC_OP_EFLAGS;
5955
        break;
5956
    case 0xd4: /* aam */
5957
        if (CODE64(s))
5958
            goto illegal_op;
5959
        val = ldub_code(s->pc++);
5960
        if (val == 0) {
5961
            gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5962
        } else {
5963
            tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
5964
            s->cc_op = CC_OP_LOGICB;
5965
        }
5966
        break;
5967
    case 0xd5: /* aad */
5968
        if (CODE64(s))
5969
            goto illegal_op;
5970
        val = ldub_code(s->pc++);
5971
        tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
5972
        s->cc_op = CC_OP_LOGICB;
5973
        break;
5974
        /************************/
5975
        /* misc */
5976
    case 0x90: /* nop */
5977
        /* XXX: xchg + rex handling */
5978
        /* XXX: correct lock test for all insn */
5979
        if (prefixes & PREFIX_LOCK)
5980
            goto illegal_op;
5981
        if (prefixes & PREFIX_REPZ) {
5982
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
5983
        }
5984
        break;
5985
    case 0x9b: /* fwait */
5986
        if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5987
            (HF_MP_MASK | HF_TS_MASK)) {
5988
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5989
        } else {
5990
            if (s->cc_op != CC_OP_DYNAMIC)
5991
                gen_op_set_cc_op(s->cc_op);
5992
            gen_jmp_im(pc_start - s->cs_base);
5993
            tcg_gen_helper_0_0(helper_fwait);
5994
        }
5995
        break;
5996
    case 0xcc: /* int3 */
5997
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5998
            break;
5999
        gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
6000
        break;
6001
    case 0xcd: /* int N */
6002
        val = ldub_code(s->pc++);
6003
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
6004
            break;
6005
        if (s->vm86 && s->iopl != 3) {
6006
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6007
        } else {
6008
            gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
6009
        }
6010
        break;
6011
    case 0xce: /* into */
6012
        if (CODE64(s))
6013
            goto illegal_op;
6014
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
6015
            break;
6016
        if (s->cc_op != CC_OP_DYNAMIC)
6017
            gen_op_set_cc_op(s->cc_op);
6018
        gen_jmp_im(pc_start - s->cs_base);
6019
        tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
6020
        break;
6021
    case 0xf1: /* icebp (undocumented, exits to external debugger) */
6022
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
6023
            break;
6024
#if 1
6025
        gen_debug(s, pc_start - s->cs_base);
6026
#else
6027
        /* start debug */
6028
        tb_flush(cpu_single_env);
6029
        cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
6030
#endif
6031
        break;
6032
    case 0xfa: /* cli */
6033
        if (!s->vm86) {
6034
            if (s->cpl <= s->iopl) {
6035
                tcg_gen_helper_0_0(helper_cli);
6036
            } else {
6037
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6038
            }
6039
        } else {
6040
            if (s->iopl == 3) {
6041
                tcg_gen_helper_0_0(helper_cli);
6042
            } else {
6043
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6044
            }
6045
        }
6046
        break;
6047
    case 0xfb: /* sti */
6048
        if (!s->vm86) {
6049
            if (s->cpl <= s->iopl) {
6050
            gen_sti:
6051
                tcg_gen_helper_0_0(helper_sti);
6052
                /* interruptions are enabled only the first insn after sti */
6053
                /* If several instructions disable interrupts, only the
6054
                   _first_ does it */
6055
                if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
6056
                    tcg_gen_helper_0_0(helper_set_inhibit_irq);
6057
                /* give a chance to handle pending irqs */
6058
                gen_jmp_im(s->pc - s->cs_base);
6059
                gen_eob(s);
6060
            } else {
6061
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6062
            }
6063
        } else {
6064
            if (s->iopl == 3) {
6065
                goto gen_sti;
6066
            } else {
6067
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6068
            }
6069
        }
6070
        break;
6071
    case 0x62: /* bound */
6072
        if (CODE64(s))
6073
            goto illegal_op;
6074
        ot = dflag ? OT_LONG : OT_WORD;
6075
        modrm = ldub_code(s->pc++);
6076
        reg = (modrm >> 3) & 7;
6077
        mod = (modrm >> 6) & 3;
6078
        if (mod == 3)
6079
            goto illegal_op;
6080
        gen_op_mov_TN_reg(ot, 0, reg);
6081
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6082
        gen_jmp_im(pc_start - s->cs_base);
6083
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6084
        if (ot == OT_WORD)
6085
            tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
6086
        else
6087
            tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
6088
        break;
6089
    case 0x1c8 ... 0x1cf: /* bswap reg */
6090
        reg = (b & 7) | REX_B(s);
6091
#ifdef TARGET_X86_64
6092
        if (dflag == 2) {
6093
            gen_op_mov_TN_reg(OT_QUAD, 0, reg);
6094
            tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
6095
            gen_op_mov_reg_T0(OT_QUAD, reg);
6096
        } else
6097
        {
6098
            TCGv tmp0;
6099
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
6100
            
6101
            tmp0 = tcg_temp_new(TCG_TYPE_I32);
6102
            tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
6103
            tcg_gen_bswap_i32(tmp0, tmp0);
6104
            tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
6105
            gen_op_mov_reg_T0(OT_LONG, reg);
6106
        }
6107
#else
6108
        {
6109
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
6110
            tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
6111
            gen_op_mov_reg_T0(OT_LONG, reg);
6112
        }
6113
#endif
6114
        break;
6115
    case 0xd6: /* salc */
6116
        if (CODE64(s))
6117
            goto illegal_op;
6118
        if (s->cc_op != CC_OP_DYNAMIC)
6119
            gen_op_set_cc_op(s->cc_op);
6120
        gen_op_salc();
6121
        break;
6122
    case 0xe0: /* loopnz */
6123
    case 0xe1: /* loopz */
6124
    case 0xe2: /* loop */
6125
    case 0xe3: /* jecxz */
6126
        {
6127
            int l1, l2, l3;
6128

    
6129
            tval = (int8_t)insn_get(s, OT_BYTE);
6130
            next_eip = s->pc - s->cs_base;
6131
            tval += next_eip;
6132
            if (s->dflag == 0)
6133
                tval &= 0xffff;
6134

    
6135
            l1 = gen_new_label();
6136
            l2 = gen_new_label();
6137
            l3 = gen_new_label();
6138
            b &= 3;
6139
            switch(b) {
6140
            case 0: /* loopnz */
6141
            case 1: /* loopz */
6142
                if (s->cc_op != CC_OP_DYNAMIC)
6143
                    gen_op_set_cc_op(s->cc_op);
6144
                gen_op_add_reg_im(s->aflag, R_ECX, -1);
6145
                gen_op_jz_ecx(s->aflag, l3);
6146
                gen_compute_eflags(cpu_tmp0);
6147
                tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
6148
                if (b == 0) {
6149
                    tcg_gen_brcond_tl(TCG_COND_EQ, 
6150
                                      cpu_tmp0, tcg_const_tl(0), l1);
6151
                } else {
6152
                    tcg_gen_brcond_tl(TCG_COND_NE, 
6153
                                      cpu_tmp0, tcg_const_tl(0), l1);
6154
                }
6155
                break;
6156
            case 2: /* loop */
6157
                gen_op_add_reg_im(s->aflag, R_ECX, -1);
6158
                gen_op_jnz_ecx(s->aflag, l1);
6159
                break;
6160
            default:
6161
            case 3: /* jcxz */
6162
                gen_op_jz_ecx(s->aflag, l1);
6163
                break;
6164
            }
6165

    
6166
            gen_set_label(l3);
6167
            gen_jmp_im(next_eip);
6168
            gen_op_jmp_label(l2);
6169

    
6170
            gen_set_label(l1);
6171
            gen_jmp_im(tval);
6172
            gen_set_label(l2);
6173
            gen_eob(s);
6174
        }
6175
        break;
6176
    case 0x130: /* wrmsr */
6177
    case 0x132: /* rdmsr */
6178
        if (s->cpl != 0) {
6179
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6180
        } else {
6181
            int retval = 0;
6182
            if (b & 2) {
6183
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
6184
                tcg_gen_helper_0_0(helper_rdmsr);
6185
            } else {
6186
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
6187
                tcg_gen_helper_0_0(helper_wrmsr);
6188
            }
6189
            if(retval)
6190
                gen_eob(s);
6191
        }
6192
        break;
6193
    case 0x131: /* rdtsc */
6194
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
6195
            break;
6196
        gen_jmp_im(pc_start - s->cs_base);
6197
        tcg_gen_helper_0_0(helper_rdtsc);
6198
        break;
6199
    case 0x133: /* rdpmc */
6200
        gen_jmp_im(pc_start - s->cs_base);
6201
        tcg_gen_helper_0_0(helper_rdpmc);
6202
        break;
6203
    case 0x134: /* sysenter */
6204
        if (CODE64(s))
6205
            goto illegal_op;
6206
        if (!s->pe) {
6207
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6208
        } else {
6209
            if (s->cc_op != CC_OP_DYNAMIC) {
6210
                gen_op_set_cc_op(s->cc_op);
6211
                s->cc_op = CC_OP_DYNAMIC;
6212
            }
6213
            gen_jmp_im(pc_start - s->cs_base);
6214
            tcg_gen_helper_0_0(helper_sysenter);
6215
            gen_eob(s);
6216
        }
6217
        break;
6218
    case 0x135: /* sysexit */
6219
        if (CODE64(s))
6220
            goto illegal_op;
6221
        if (!s->pe) {
6222
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6223
        } else {
6224
            if (s->cc_op != CC_OP_DYNAMIC) {
6225
                gen_op_set_cc_op(s->cc_op);
6226
                s->cc_op = CC_OP_DYNAMIC;
6227
            }
6228
            gen_jmp_im(pc_start - s->cs_base);
6229
            tcg_gen_helper_0_0(helper_sysexit);
6230
            gen_eob(s);
6231
        }
6232
        break;
6233
#ifdef TARGET_X86_64
6234
    case 0x105: /* syscall */
6235
        /* XXX: is it usable in real mode ? */
6236
        if (s->cc_op != CC_OP_DYNAMIC) {
6237
            gen_op_set_cc_op(s->cc_op);
6238
            s->cc_op = CC_OP_DYNAMIC;
6239
        }
6240
        gen_jmp_im(pc_start - s->cs_base);
6241
        tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
6242
        gen_eob(s);
6243
        break;
6244
    case 0x107: /* sysret */
6245
        if (!s->pe) {
6246
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6247
        } else {
6248
            if (s->cc_op != CC_OP_DYNAMIC) {
6249
                gen_op_set_cc_op(s->cc_op);
6250
                s->cc_op = CC_OP_DYNAMIC;
6251
            }
6252
            gen_jmp_im(pc_start - s->cs_base);
6253
            tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
6254
            /* condition codes are modified only in long mode */
6255
            if (s->lma)
6256
                s->cc_op = CC_OP_EFLAGS;
6257
            gen_eob(s);
6258
        }
6259
        break;
6260
#endif
6261
    case 0x1a2: /* cpuid */
6262
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
6263
            break;
6264
        tcg_gen_helper_0_0(helper_cpuid);
6265
        break;
6266
    case 0xf4: /* hlt */
6267
        if (s->cpl != 0) {
6268
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6269
        } else {
6270
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
6271
                break;
6272
            if (s->cc_op != CC_OP_DYNAMIC)
6273
                gen_op_set_cc_op(s->cc_op);
6274
            gen_jmp_im(s->pc - s->cs_base);
6275
            tcg_gen_helper_0_0(helper_hlt);
6276
            s->is_jmp = 3;
6277
        }
6278
        break;
6279
    case 0x100:
6280
        modrm = ldub_code(s->pc++);
6281
        mod = (modrm >> 6) & 3;
6282
        op = (modrm >> 3) & 7;
6283
        switch(op) {
6284
        case 0: /* sldt */
6285
            if (!s->pe || s->vm86)
6286
                goto illegal_op;
6287
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
6288
                break;
6289
            gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
6290
            ot = OT_WORD;
6291
            if (mod == 3)
6292
                ot += s->dflag;
6293
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6294
            break;
6295
        case 2: /* lldt */
6296
            if (!s->pe || s->vm86)
6297
                goto illegal_op;
6298
            if (s->cpl != 0) {
6299
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6300
            } else {
6301
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
6302
                    break;
6303
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6304
                gen_jmp_im(pc_start - s->cs_base);
6305
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6306
                tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
6307
            }
6308
            break;
6309
        case 1: /* str */
6310
            if (!s->pe || s->vm86)
6311
                goto illegal_op;
6312
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
6313
                break;
6314
            gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
6315
            ot = OT_WORD;
6316
            if (mod == 3)
6317
                ot += s->dflag;
6318
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6319
            break;
6320
        case 3: /* ltr */
6321
            if (!s->pe || s->vm86)
6322
                goto illegal_op;
6323
            if (s->cpl != 0) {
6324
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6325
            } else {
6326
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
6327
                    break;
6328
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6329
                gen_jmp_im(pc_start - s->cs_base);
6330
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6331
                tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
6332
            }
6333
            break;
6334
        case 4: /* verr */
6335
        case 5: /* verw */
6336
            if (!s->pe || s->vm86)
6337
                goto illegal_op;
6338
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6339
            if (s->cc_op != CC_OP_DYNAMIC)
6340
                gen_op_set_cc_op(s->cc_op);
6341
            if (op == 4)
6342
                gen_op_verr();
6343
            else
6344
                gen_op_verw();
6345
            s->cc_op = CC_OP_EFLAGS;
6346
            break;
6347
        default:
6348
            goto illegal_op;
6349
        }
6350
        break;
6351
    case 0x101:
6352
        modrm = ldub_code(s->pc++);
6353
        mod = (modrm >> 6) & 3;
6354
        op = (modrm >> 3) & 7;
6355
        rm = modrm & 7;
6356
        switch(op) {
6357
        case 0: /* sgdt */
6358
            if (mod == 3)
6359
                goto illegal_op;
6360
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
6361
                break;
6362
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6363
            gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
6364
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
6365
            gen_add_A0_im(s, 2);
6366
            gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
6367
            if (!s->dflag)
6368
                gen_op_andl_T0_im(0xffffff);
6369
            gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6370
            break;
6371
        case 1:
6372
            if (mod == 3) {
6373
                switch (rm) {
6374
                case 0: /* monitor */
6375
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6376
                        s->cpl != 0)
6377
                        goto illegal_op;
6378
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
6379
                        break;
6380
                    gen_jmp_im(pc_start - s->cs_base);
6381
#ifdef TARGET_X86_64
6382
                    if (s->aflag == 2) {
6383
                        gen_op_movq_A0_reg(R_EAX);
6384
                    } else
6385
#endif
6386
                    {
6387
                        gen_op_movl_A0_reg(R_EAX);
6388
                        if (s->aflag == 0)
6389
                            gen_op_andl_A0_ffff();
6390
                    }
6391
                    gen_add_A0_ds_seg(s);
6392
                    tcg_gen_helper_0_1(helper_monitor, cpu_A0);
6393
                    break;
6394
                case 1: /* mwait */
6395
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6396
                        s->cpl != 0)
6397
                        goto illegal_op;
6398
                    if (s->cc_op != CC_OP_DYNAMIC) {
6399
                        gen_op_set_cc_op(s->cc_op);
6400
                        s->cc_op = CC_OP_DYNAMIC;
6401
                    }
6402
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
6403
                        break;
6404
                    gen_jmp_im(s->pc - s->cs_base);
6405
                    tcg_gen_helper_0_0(helper_mwait);
6406
                    gen_eob(s);
6407
                    break;
6408
                default:
6409
                    goto illegal_op;
6410
                }
6411
            } else { /* sidt */
6412
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
6413
                    break;
6414
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6415
                gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
6416
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
6417
                gen_add_A0_im(s, 2);
6418
                gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
6419
                if (!s->dflag)
6420
                    gen_op_andl_T0_im(0xffffff);
6421
                gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6422
            }
6423
            break;
6424
        case 2: /* lgdt */
6425
        case 3: /* lidt */
6426
            if (mod == 3) {
6427
                switch(rm) {
6428
                case 0: /* VMRUN */
6429
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
6430
                        break;
6431
                    if (s->cc_op != CC_OP_DYNAMIC)
6432
                        gen_op_set_cc_op(s->cc_op);
6433
                    gen_jmp_im(s->pc - s->cs_base);
6434
                    tcg_gen_helper_0_0(helper_vmrun);
6435
                    s->cc_op = CC_OP_EFLAGS;
6436
                    gen_eob(s);
6437
                    break;
6438
                case 1: /* VMMCALL */
6439
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
6440
                         break;
6441
                    /* FIXME: cause #UD if hflags & SVM */
6442
                    tcg_gen_helper_0_0(helper_vmmcall);
6443
                    break;
6444
                case 2: /* VMLOAD */
6445
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
6446
                         break;
6447
                    tcg_gen_helper_0_0(helper_vmload);
6448
                    break;
6449
                case 3: /* VMSAVE */
6450
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
6451
                         break;
6452
                    tcg_gen_helper_0_0(helper_vmsave);
6453
                    break;
6454
                case 4: /* STGI */
6455
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
6456
                         break;
6457
                    tcg_gen_helper_0_0(helper_stgi);
6458
                    break;
6459
                case 5: /* CLGI */
6460
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
6461
                         break;
6462
                    tcg_gen_helper_0_0(helper_clgi);
6463
                    break;
6464
                case 6: /* SKINIT */
6465
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
6466
                         break;
6467
                    tcg_gen_helper_0_0(helper_skinit);
6468
                    break;
6469
                case 7: /* INVLPGA */
6470
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
6471
                         break;
6472
                    tcg_gen_helper_0_0(helper_invlpga);
6473
                    break;
6474
                default:
6475
                    goto illegal_op;
6476
                }
6477
            } else if (s->cpl != 0) {
6478
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6479
            } else {
6480
                if (gen_svm_check_intercept(s, pc_start,
6481
                                            op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
6482
                    break;
6483
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6484
                gen_op_ld_T1_A0(OT_WORD + s->mem_index);
6485
                gen_add_A0_im(s, 2);
6486
                gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6487
                if (!s->dflag)
6488
                    gen_op_andl_T0_im(0xffffff);
6489
                if (op == 2) {
6490
                    gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
6491
                    gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
6492
                } else {
6493
                    gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
6494
                    gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
6495
                }
6496
            }
6497
            break;
6498
        case 4: /* smsw */
6499
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
6500
                break;
6501
            gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
6502
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6503
            break;
6504
        case 6: /* lmsw */
6505
            if (s->cpl != 0) {
6506
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6507
            } else {
6508
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
6509
                    break;
6510
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6511
                tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
6512
                gen_jmp_im(s->pc - s->cs_base);
6513
                gen_eob(s);
6514
            }
6515
            break;
6516
        case 7: /* invlpg */
6517
            if (s->cpl != 0) {
6518
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6519
            } else {
6520
                if (mod == 3) {
6521
#ifdef TARGET_X86_64
6522
                    if (CODE64(s) && rm == 0) {
6523
                        /* swapgs */
6524
                        gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
6525
                        gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
6526
                        gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
6527
                        gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
6528
                    } else
6529
#endif
6530
                    {
6531
                        goto illegal_op;
6532
                    }
6533
                } else {
6534
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
6535
                        break;
6536
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6537
                    tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
6538
                    gen_jmp_im(s->pc - s->cs_base);
6539
                    gen_eob(s);
6540
                }
6541
            }
6542
            break;
6543
        default:
6544
            goto illegal_op;
6545
        }
6546
        break;
6547
    case 0x108: /* invd */
6548
    case 0x109: /* wbinvd */
6549
        if (s->cpl != 0) {
6550
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6551
        } else {
6552
            if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6553
                break;
6554
            /* nothing to do */
6555
        }
6556
        break;
6557
    case 0x63: /* arpl or movslS (x86_64) */
6558
#ifdef TARGET_X86_64
6559
        if (CODE64(s)) {
6560
            int d_ot;
6561
            /* d_ot is the size of destination */
6562
            d_ot = dflag + OT_WORD;
6563

    
6564
            modrm = ldub_code(s->pc++);
6565
            reg = ((modrm >> 3) & 7) | rex_r;
6566
            mod = (modrm >> 6) & 3;
6567
            rm = (modrm & 7) | REX_B(s);
6568

    
6569
            if (mod == 3) {
6570
                gen_op_mov_TN_reg(OT_LONG, 0, rm);
6571
                /* sign extend */
6572
                if (d_ot == OT_QUAD)
6573
                    tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
6574
                gen_op_mov_reg_T0(d_ot, reg);
6575
            } else {
6576
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6577
                if (d_ot == OT_QUAD) {
6578
                    gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6579
                } else {
6580
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6581
                }
6582
                gen_op_mov_reg_T0(d_ot, reg);
6583
            }
6584
        } else
6585
#endif
6586
        {
6587
            if (!s->pe || s->vm86)
6588
                goto illegal_op;
6589
            ot = dflag ? OT_LONG : OT_WORD;
6590
            modrm = ldub_code(s->pc++);
6591
            reg = (modrm >> 3) & 7;
6592
            mod = (modrm >> 6) & 3;
6593
            rm = modrm & 7;
6594
            if (mod != 3) {
6595
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6596
                gen_op_ld_T0_A0(ot + s->mem_index);
6597
            } else {
6598
                gen_op_mov_TN_reg(ot, 0, rm);
6599
            }
6600
            gen_op_mov_TN_reg(ot, 1, reg);
6601
            if (s->cc_op != CC_OP_DYNAMIC)
6602
                gen_op_set_cc_op(s->cc_op);
6603
            gen_op_arpl();
6604
            s->cc_op = CC_OP_EFLAGS;
6605
            if (mod != 3) {
6606
                gen_op_st_T0_A0(ot + s->mem_index);
6607
            } else {
6608
                gen_op_mov_reg_T0(ot, rm);
6609
            }
6610
            gen_op_arpl_update();
6611
        }
6612
        break;
6613
    case 0x102: /* lar */
6614
    case 0x103: /* lsl */
6615
        if (!s->pe || s->vm86)
6616
            goto illegal_op;
6617
        ot = dflag ? OT_LONG : OT_WORD;
6618
        modrm = ldub_code(s->pc++);
6619
        reg = ((modrm >> 3) & 7) | rex_r;
6620
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6621
        gen_op_mov_TN_reg(ot, 1, reg);
6622
        if (s->cc_op != CC_OP_DYNAMIC)
6623
            gen_op_set_cc_op(s->cc_op);
6624
        if (b == 0x102)
6625
            gen_op_lar();
6626
        else
6627
            gen_op_lsl();
6628
        s->cc_op = CC_OP_EFLAGS;
6629
        gen_op_mov_reg_T1(ot, reg);
6630
        break;
6631
    case 0x118:
6632
        modrm = ldub_code(s->pc++);
6633
        mod = (modrm >> 6) & 3;
6634
        op = (modrm >> 3) & 7;
6635
        switch(op) {
6636
        case 0: /* prefetchnta */
6637
        case 1: /* prefetchnt0 */
6638
        case 2: /* prefetchnt0 */
6639
        case 3: /* prefetchnt0 */
6640
            if (mod == 3)
6641
                goto illegal_op;
6642
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6643
            /* nothing more to do */
6644
            break;
6645
        default: /* nop (multi byte) */
6646
            gen_nop_modrm(s, modrm);
6647
            break;
6648
        }
6649
        break;
6650
    case 0x119 ... 0x11f: /* nop (multi byte) */
6651
        modrm = ldub_code(s->pc++);
6652
        gen_nop_modrm(s, modrm);
6653
        break;
6654
    case 0x120: /* mov reg, crN */
6655
    case 0x122: /* mov crN, reg */
6656
        if (s->cpl != 0) {
6657
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6658
        } else {
6659
            modrm = ldub_code(s->pc++);
6660
            if ((modrm & 0xc0) != 0xc0)
6661
                goto illegal_op;
6662
            rm = (modrm & 7) | REX_B(s);
6663
            reg = ((modrm >> 3) & 7) | rex_r;
6664
            if (CODE64(s))
6665
                ot = OT_QUAD;
6666
            else
6667
                ot = OT_LONG;
6668
            switch(reg) {
6669
            case 0:
6670
            case 2:
6671
            case 3:
6672
            case 4:
6673
            case 8:
6674
                if (b & 2) {
6675
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6676
                    gen_op_mov_TN_reg(ot, 0, rm);
6677
                    tcg_gen_helper_0_2(helper_movl_crN_T0, 
6678
                                       tcg_const_i32(reg), cpu_T[0]);
6679
                    gen_jmp_im(s->pc - s->cs_base);
6680
                    gen_eob(s);
6681
                } else {
6682
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6683
#if !defined(CONFIG_USER_ONLY)
6684
                    if (reg == 8)
6685
                        tcg_gen_helper_1_0(helper_movtl_T0_cr8, cpu_T[0]);
6686
                    else
6687
#endif
6688
                        gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6689
                    gen_op_mov_reg_T0(ot, rm);
6690
                }
6691
                break;
6692
            default:
6693
                goto illegal_op;
6694
            }
6695
        }
6696
        break;
6697
    case 0x121: /* mov reg, drN */
6698
    case 0x123: /* mov drN, reg */
6699
        if (s->cpl != 0) {
6700
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6701
        } else {
6702
            modrm = ldub_code(s->pc++);
6703
            if ((modrm & 0xc0) != 0xc0)
6704
                goto illegal_op;
6705
            rm = (modrm & 7) | REX_B(s);
6706
            reg = ((modrm >> 3) & 7) | rex_r;
6707
            if (CODE64(s))
6708
                ot = OT_QUAD;
6709
            else
6710
                ot = OT_LONG;
6711
            /* XXX: do it dynamically with CR4.DE bit */
6712
            if (reg == 4 || reg == 5 || reg >= 8)
6713
                goto illegal_op;
6714
            if (b & 2) {
6715
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6716
                gen_op_mov_TN_reg(ot, 0, rm);
6717
                tcg_gen_helper_0_2(helper_movl_drN_T0,
6718
                                   tcg_const_i32(reg), cpu_T[0]);
6719
                gen_jmp_im(s->pc - s->cs_base);
6720
                gen_eob(s);
6721
            } else {
6722
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6723
                gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6724
                gen_op_mov_reg_T0(ot, rm);
6725
            }
6726
        }
6727
        break;
6728
    case 0x106: /* clts */
6729
        if (s->cpl != 0) {
6730
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6731
        } else {
6732
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6733
            tcg_gen_helper_0_0(helper_clts);
6734
            /* abort block because static cpu state changed */
6735
            gen_jmp_im(s->pc - s->cs_base);
6736
            gen_eob(s);
6737
        }
6738
        break;
6739
    /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6740
    case 0x1c3: /* MOVNTI reg, mem */
6741
        if (!(s->cpuid_features & CPUID_SSE2))
6742
            goto illegal_op;
6743
        ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6744
        modrm = ldub_code(s->pc++);
6745
        mod = (modrm >> 6) & 3;
6746
        if (mod == 3)
6747
            goto illegal_op;
6748
        reg = ((modrm >> 3) & 7) | rex_r;
6749
        /* generate a generic store */
6750
        gen_ldst_modrm(s, modrm, ot, reg, 1);
6751
        break;
6752
    case 0x1ae:
6753
        modrm = ldub_code(s->pc++);
6754
        mod = (modrm >> 6) & 3;
6755
        op = (modrm >> 3) & 7;
6756
        switch(op) {
6757
        case 0: /* fxsave */
6758
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6759
                (s->flags & HF_EM_MASK))
6760
                goto illegal_op;
6761
            if (s->flags & HF_TS_MASK) {
6762
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6763
                break;
6764
            }
6765
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6766
            if (s->cc_op != CC_OP_DYNAMIC)
6767
                gen_op_set_cc_op(s->cc_op);
6768
            gen_jmp_im(pc_start - s->cs_base);
6769
            tcg_gen_helper_0_2(helper_fxsave, 
6770
                               cpu_A0, tcg_const_i32((s->dflag == 2)));
6771
            break;
6772
        case 1: /* fxrstor */
6773
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6774
                (s->flags & HF_EM_MASK))
6775
                goto illegal_op;
6776
            if (s->flags & HF_TS_MASK) {
6777
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6778
                break;
6779
            }
6780
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6781
            if (s->cc_op != CC_OP_DYNAMIC)
6782
                gen_op_set_cc_op(s->cc_op);
6783
            gen_jmp_im(pc_start - s->cs_base);
6784
            tcg_gen_helper_0_2(helper_fxrstor,
6785
                               cpu_A0, tcg_const_i32((s->dflag == 2)));
6786
            break;
6787
        case 2: /* ldmxcsr */
6788
        case 3: /* stmxcsr */
6789
            if (s->flags & HF_TS_MASK) {
6790
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6791
                break;
6792
            }
6793
            if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6794
                mod == 3)
6795
                goto illegal_op;
6796
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6797
            if (op == 2) {
6798
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6799
                gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6800
            } else {
6801
                gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6802
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
6803
            }
6804
            break;
6805
        case 5: /* lfence */
6806
        case 6: /* mfence */
6807
            if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6808
                goto illegal_op;
6809
            break;
6810
        case 7: /* sfence / clflush */
6811
            if ((modrm & 0xc7) == 0xc0) {
6812
                /* sfence */
6813
                /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6814
                if (!(s->cpuid_features & CPUID_SSE))
6815
                    goto illegal_op;
6816
            } else {
6817
                /* clflush */
6818
                if (!(s->cpuid_features & CPUID_CLFLUSH))
6819
                    goto illegal_op;
6820
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6821
            }
6822
            break;
6823
        default:
6824
            goto illegal_op;
6825
        }
6826
        break;
6827
    case 0x10d: /* 3DNow! prefetch(w) */
6828
        modrm = ldub_code(s->pc++);
6829
        mod = (modrm >> 6) & 3;
6830
        if (mod == 3)
6831
            goto illegal_op;
6832
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6833
        /* ignore for now */
6834
        break;
6835
    case 0x1aa: /* rsm */
6836
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6837
            break;
6838
        if (!(s->flags & HF_SMM_MASK))
6839
            goto illegal_op;
6840
        if (s->cc_op != CC_OP_DYNAMIC) {
6841
            gen_op_set_cc_op(s->cc_op);
6842
            s->cc_op = CC_OP_DYNAMIC;
6843
        }
6844
        gen_jmp_im(s->pc - s->cs_base);
6845
        tcg_gen_helper_0_0(helper_rsm);
6846
        gen_eob(s);
6847
        break;
6848
    case 0x10e ... 0x10f:
6849
        /* 3DNow! instructions, ignore prefixes */
6850
        s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
6851
    case 0x110 ... 0x117:
6852
    case 0x128 ... 0x12f:
6853
    case 0x150 ... 0x177:
6854
    case 0x17c ... 0x17f:
6855
    case 0x1c2:
6856
    case 0x1c4 ... 0x1c6:
6857
    case 0x1d0 ... 0x1fe:
6858
        gen_sse(s, b, pc_start, rex_r);
6859
        break;
6860
    default:
6861
        goto illegal_op;
6862
    }
6863
    /* lock generation */
6864
    if (s->prefix & PREFIX_LOCK)
6865
        tcg_gen_helper_0_0(helper_unlock);
6866
    return s->pc;
6867
 illegal_op:
6868
    if (s->prefix & PREFIX_LOCK)
6869
        tcg_gen_helper_0_0(helper_unlock);
6870
    /* XXX: ensure that no lock was generated */
6871
    gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6872
    return s->pc;
6873
}
6874

    
6875
static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
6876
{
6877
    switch(macro_id) {
6878
#ifdef MACRO_TEST
6879
    case MACRO_TEST:
6880
        tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
6881
        break;
6882
#endif
6883
    }
6884
}
6885

    
6886
void optimize_flags_init(void)
6887
{
6888
#if TCG_TARGET_REG_BITS == 32
6889
    assert(sizeof(CCTable) == (1 << 3));
6890
#else
6891
    assert(sizeof(CCTable) == (1 << 4));
6892
#endif
6893
    tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
6894

    
6895
    cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
6896
#if TARGET_LONG_BITS > HOST_LONG_BITS
6897
    cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL, 
6898
                                  TCG_AREG0, offsetof(CPUState, t0), "T0");
6899
    cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
6900
                                  TCG_AREG0, offsetof(CPUState, t1), "T1");
6901
    cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
6902
                                TCG_AREG0, offsetof(CPUState, t2), "A0");
6903
#else
6904
    cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
6905
    cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
6906
    cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
6907
#endif
6908
    cpu_T3 = tcg_global_mem_new(TCG_TYPE_TL,
6909
                                TCG_AREG0, offsetof(CPUState, t3), "T3");
6910
#if defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS)
6911
    /* XXX: must be suppressed once there are less fixed registers */
6912
    cpu_tmp1_i64 = tcg_global_reg2_new_hack(TCG_TYPE_I64, TCG_AREG1, TCG_AREG2, "tmp1");
6913
#endif
6914
    cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
6915
                                   TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
6916
    cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
6917
                                    TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
6918
    cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
6919
                                    TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
6920
}
6921

    
6922
/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6923
   basic block 'tb'. If search_pc is TRUE, also generate PC
6924
   information for each intermediate instruction. */
6925
static inline int gen_intermediate_code_internal(CPUState *env,
6926
                                                 TranslationBlock *tb,
6927
                                                 int search_pc)
6928
{
6929
    DisasContext dc1, *dc = &dc1;
6930
    target_ulong pc_ptr;
6931
    uint16_t *gen_opc_end;
6932
    int j, lj, cflags;
6933
    uint64_t flags;
6934
    target_ulong pc_start;
6935
    target_ulong cs_base;
6936

    
6937
    /* generate intermediate code */
6938
    pc_start = tb->pc;
6939
    cs_base = tb->cs_base;
6940
    flags = tb->flags;
6941
    cflags = tb->cflags;
6942

    
6943
    dc->pe = (flags >> HF_PE_SHIFT) & 1;
6944
    dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6945
    dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6946
    dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6947
    dc->f_st = 0;
6948
    dc->vm86 = (flags >> VM_SHIFT) & 1;
6949
    dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6950
    dc->iopl = (flags >> IOPL_SHIFT) & 3;
6951
    dc->tf = (flags >> TF_SHIFT) & 1;
6952
    dc->singlestep_enabled = env->singlestep_enabled;
6953
    dc->cc_op = CC_OP_DYNAMIC;
6954
    dc->cs_base = cs_base;
6955
    dc->tb = tb;
6956
    dc->popl_esp_hack = 0;
6957
    /* select memory access functions */
6958
    dc->mem_index = 0;
6959
    if (flags & HF_SOFTMMU_MASK) {
6960
        if (dc->cpl == 3)
6961
            dc->mem_index = 2 * 4;
6962
        else
6963
            dc->mem_index = 1 * 4;
6964
    }
6965
    dc->cpuid_features = env->cpuid_features;
6966
    dc->cpuid_ext_features = env->cpuid_ext_features;
6967
    dc->cpuid_ext2_features = env->cpuid_ext2_features;
6968
#ifdef TARGET_X86_64
6969
    dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6970
    dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6971
#endif
6972
    dc->flags = flags;
6973
    dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6974
                    (flags & HF_INHIBIT_IRQ_MASK)
6975
#ifndef CONFIG_SOFTMMU
6976
                    || (flags & HF_SOFTMMU_MASK)
6977
#endif
6978
                    );
6979
#if 0
6980
    /* check addseg logic */
6981
    if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6982
        printf("ERROR addseg\n");
6983
#endif
6984

    
6985
    cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
6986
#if !(defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS))
6987
    cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
6988
#endif
6989
    cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
6990
    cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
6991
    cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
6992
    cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
6993
    cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
6994
    cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
6995
    cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
6996

    
6997
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6998

    
6999
    dc->is_jmp = DISAS_NEXT;
7000
    pc_ptr = pc_start;
7001
    lj = -1;
7002

    
7003
    for(;;) {
7004
        if (env->nb_breakpoints > 0) {
7005
            for(j = 0; j < env->nb_breakpoints; j++) {
7006
                if (env->breakpoints[j] == pc_ptr) {
7007
                    gen_debug(dc, pc_ptr - dc->cs_base);
7008
                    break;
7009
                }
7010
            }
7011
        }
7012
        if (search_pc) {
7013
            j = gen_opc_ptr - gen_opc_buf;
7014
            if (lj < j) {
7015
                lj++;
7016
                while (lj < j)
7017
                    gen_opc_instr_start[lj++] = 0;
7018
            }
7019
            gen_opc_pc[lj] = pc_ptr;
7020
            gen_opc_cc_op[lj] = dc->cc_op;
7021
            gen_opc_instr_start[lj] = 1;
7022
        }
7023
        pc_ptr = disas_insn(dc, pc_ptr);
7024
        /* stop translation if indicated */
7025
        if (dc->is_jmp)
7026
            break;
7027
        /* if single step mode, we generate only one instruction and
7028
           generate an exception */
7029
        /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
7030
           the flag and abort the translation to give the irqs a
7031
           change to be happen */
7032
        if (dc->tf || dc->singlestep_enabled ||
7033
            (flags & HF_INHIBIT_IRQ_MASK) ||
7034
            (cflags & CF_SINGLE_INSN)) {
7035
            gen_jmp_im(pc_ptr - dc->cs_base);
7036
            gen_eob(dc);
7037
            break;
7038
        }
7039
        /* if too long translation, stop generation too */
7040
        if (gen_opc_ptr >= gen_opc_end ||
7041
            (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
7042
            gen_jmp_im(pc_ptr - dc->cs_base);
7043
            gen_eob(dc);
7044
            break;
7045
        }
7046
    }
7047
    *gen_opc_ptr = INDEX_op_end;
7048
    /* we don't forget to fill the last values */
7049
    if (search_pc) {
7050
        j = gen_opc_ptr - gen_opc_buf;
7051
        lj++;
7052
        while (lj <= j)
7053
            gen_opc_instr_start[lj++] = 0;
7054
    }
7055

    
7056
#ifdef DEBUG_DISAS
7057
    if (loglevel & CPU_LOG_TB_CPU) {
7058
        cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
7059
    }
7060
    if (loglevel & CPU_LOG_TB_IN_ASM) {
7061
        int disas_flags;
7062
        fprintf(logfile, "----------------\n");
7063
        fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
7064
#ifdef TARGET_X86_64
7065
        if (dc->code64)
7066
            disas_flags = 2;
7067
        else
7068
#endif
7069
            disas_flags = !dc->code32;
7070
        target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
7071
        fprintf(logfile, "\n");
7072
        if (loglevel & CPU_LOG_TB_OP_OPT) {
7073
            fprintf(logfile, "OP before opt:\n");
7074
            tcg_dump_ops(&tcg_ctx, logfile);
7075
            fprintf(logfile, "\n");
7076
        }
7077
    }
7078
#endif
7079

    
7080
    if (!search_pc)
7081
        tb->size = pc_ptr - pc_start;
7082
    return 0;
7083
}
7084

    
7085
int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
7086
{
7087
    return gen_intermediate_code_internal(env, tb, 0);
7088
}
7089

    
7090
int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
7091
{
7092
    return gen_intermediate_code_internal(env, tb, 1);
7093
}
7094

    
7095
void gen_pc_load(CPUState *env, TranslationBlock *tb,
7096
                unsigned long searched_pc, int pc_pos, void *puc)
7097
{
7098
    int cc_op;
7099
#ifdef DEBUG_DISAS
7100
    if (loglevel & CPU_LOG_TB_OP) {
7101
        int i;
7102
        fprintf(logfile, "RESTORE:\n");
7103
        for(i = 0;i <= pc_pos; i++) {
7104
            if (gen_opc_instr_start[i]) {
7105
                fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7106
            }
7107
        }
7108
        fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7109
                searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7110
                (uint32_t)tb->cs_base);
7111
    }
7112
#endif
7113
    env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7114
    cc_op = gen_opc_cc_op[pc_pos];
7115
    if (cc_op != CC_OP_DYNAMIC)
7116
        env->cc_op = cc_op;
7117
}