Statistics
| Branch: | Revision:

root / target-i386 / translate.c @ b6abf97d

History | View | Annotate | Download (219.7 kB)

1
/*
2
 *  i386 translation
3
 *
4
 *  Copyright (c) 2003 Fabrice Bellard
5
 *
6
 * This library is free software; you can redistribute it and/or
7
 * modify it under the terms of the GNU Lesser General Public
8
 * License as published by the Free Software Foundation; either
9
 * version 2 of the License, or (at your option) any later version.
10
 *
11
 * This library is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14
 * Lesser General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU Lesser General Public
17
 * License along with this library; if not, write to the Free Software
18
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
 */
20
#include <stdarg.h>
21
#include <stdlib.h>
22
#include <stdio.h>
23
#include <string.h>
24
#include <inttypes.h>
25
#include <signal.h>
26
#include <assert.h>
27

    
28
#include "cpu.h"
29
#include "exec-all.h"
30
#include "disas.h"
31
#include "helper.h"
32
#include "tcg-op.h"
33

    
34
#define PREFIX_REPZ   0x01
35
#define PREFIX_REPNZ  0x02
36
#define PREFIX_LOCK   0x04
37
#define PREFIX_DATA   0x08
38
#define PREFIX_ADR    0x10
39

    
40
#ifdef TARGET_X86_64
41
#define X86_64_ONLY(x) x
42
#define X86_64_DEF(x...) x
43
#define CODE64(s) ((s)->code64)
44
#define REX_X(s) ((s)->rex_x)
45
#define REX_B(s) ((s)->rex_b)
46
/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
47
#if 1
48
#define BUGGY_64(x) NULL
49
#endif
50
#else
51
#define X86_64_ONLY(x) NULL
52
#define X86_64_DEF(x...)
53
#define CODE64(s) 0
54
#define REX_X(s) 0
55
#define REX_B(s) 0
56
#endif
57

    
58
//#define MACRO_TEST   1
59

    
60
/* global register indexes */
61
static TCGv cpu_env, cpu_T[2], cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst;
62
static TCGv cpu_T3;
63
/* local register indexes (only used inside old micro ops) */
64
static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
65
static TCGv cpu_tmp5, cpu_tmp6;
66

    
67
#ifdef TARGET_X86_64
68
static int x86_64_hregs;
69
#endif
70

    
71
typedef struct DisasContext {
72
    /* current insn context */
73
    int override; /* -1 if no override */
74
    int prefix;
75
    int aflag, dflag;
76
    target_ulong pc; /* pc = eip + cs_base */
77
    int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
78
                   static state change (stop translation) */
79
    /* current block context */
80
    target_ulong cs_base; /* base of CS segment */
81
    int pe;     /* protected mode */
82
    int code32; /* 32 bit code segment */
83
#ifdef TARGET_X86_64
84
    int lma;    /* long mode active */
85
    int code64; /* 64 bit code segment */
86
    int rex_x, rex_b;
87
#endif
88
    int ss32;   /* 32 bit stack segment */
89
    int cc_op;  /* current CC operation */
90
    int addseg; /* non zero if either DS/ES/SS have a non zero base */
91
    int f_st;   /* currently unused */
92
    int vm86;   /* vm86 mode */
93
    int cpl;
94
    int iopl;
95
    int tf;     /* TF cpu flag */
96
    int singlestep_enabled; /* "hardware" single step enabled */
97
    int jmp_opt; /* use direct block chaining for direct jumps */
98
    int mem_index; /* select memory access functions */
99
    uint64_t flags; /* all execution flags */
100
    struct TranslationBlock *tb;
101
    int popl_esp_hack; /* for correct popl with esp base handling */
102
    int rip_offset; /* only used in x86_64, but left for simplicity */
103
    int cpuid_features;
104
    int cpuid_ext_features;
105
    int cpuid_ext2_features;
106
} DisasContext;
107

    
108
static void gen_eob(DisasContext *s);
109
static void gen_jmp(DisasContext *s, target_ulong eip);
110
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
111

    
112
/* i386 arith/logic operations */
113
enum {
114
    OP_ADDL,
115
    OP_ORL,
116
    OP_ADCL,
117
    OP_SBBL,
118
    OP_ANDL,
119
    OP_SUBL,
120
    OP_XORL,
121
    OP_CMPL,
122
};
123

    
124
/* i386 shift ops */
125
enum {
126
    OP_ROL,
127
    OP_ROR,
128
    OP_RCL,
129
    OP_RCR,
130
    OP_SHL,
131
    OP_SHR,
132
    OP_SHL1, /* undocumented */
133
    OP_SAR = 7,
134
};
135

    
136
/* operand size */
137
enum {
138
    OT_BYTE = 0,
139
    OT_WORD,
140
    OT_LONG,
141
    OT_QUAD,
142
};
143

    
144
enum {
145
    /* I386 int registers */
146
    OR_EAX,   /* MUST be even numbered */
147
    OR_ECX,
148
    OR_EDX,
149
    OR_EBX,
150
    OR_ESP,
151
    OR_EBP,
152
    OR_ESI,
153
    OR_EDI,
154

    
155
    OR_TMP0 = 16,    /* temporary operand register */
156
    OR_TMP1,
157
    OR_A0, /* temporary register used when doing address evaluation */
158
};
159

    
160
static inline void gen_op_movl_T0_0(void)
161
{
162
    tcg_gen_movi_tl(cpu_T[0], 0);
163
}
164

    
165
static inline void gen_op_movl_T0_im(int32_t val)
166
{
167
    tcg_gen_movi_tl(cpu_T[0], val);
168
}
169

    
170
static inline void gen_op_movl_T0_imu(uint32_t val)
171
{
172
    tcg_gen_movi_tl(cpu_T[0], val);
173
}
174

    
175
static inline void gen_op_movl_T1_im(int32_t val)
176
{
177
    tcg_gen_movi_tl(cpu_T[1], val);
178
}
179

    
180
static inline void gen_op_movl_T1_imu(uint32_t val)
181
{
182
    tcg_gen_movi_tl(cpu_T[1], val);
183
}
184

    
185
static inline void gen_op_movl_A0_im(uint32_t val)
186
{
187
    tcg_gen_movi_tl(cpu_A0, val);
188
}
189

    
190
#ifdef TARGET_X86_64
191
static inline void gen_op_movq_A0_im(int64_t val)
192
{
193
    tcg_gen_movi_tl(cpu_A0, val);
194
}
195
#endif
196

    
197
static inline void gen_movtl_T0_im(target_ulong val)
198
{
199
    tcg_gen_movi_tl(cpu_T[0], val);
200
}
201

    
202
static inline void gen_movtl_T1_im(target_ulong val)
203
{
204
    tcg_gen_movi_tl(cpu_T[1], val);
205
}
206

    
207
static inline void gen_op_andl_T0_ffff(void)
208
{
209
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
210
}
211

    
212
static inline void gen_op_andl_T0_im(uint32_t val)
213
{
214
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
215
}
216

    
217
static inline void gen_op_movl_T0_T1(void)
218
{
219
    tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
220
}
221

    
222
static inline void gen_op_andl_A0_ffff(void)
223
{
224
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
225
}
226

    
227
#ifdef TARGET_X86_64
228

    
229
#define NB_OP_SIZES 4
230

    
231
#define DEF_REGS(prefix, suffix) \
232
  prefix ## EAX ## suffix,\
233
  prefix ## ECX ## suffix,\
234
  prefix ## EDX ## suffix,\
235
  prefix ## EBX ## suffix,\
236
  prefix ## ESP ## suffix,\
237
  prefix ## EBP ## suffix,\
238
  prefix ## ESI ## suffix,\
239
  prefix ## EDI ## suffix,\
240
  prefix ## R8 ## suffix,\
241
  prefix ## R9 ## suffix,\
242
  prefix ## R10 ## suffix,\
243
  prefix ## R11 ## suffix,\
244
  prefix ## R12 ## suffix,\
245
  prefix ## R13 ## suffix,\
246
  prefix ## R14 ## suffix,\
247
  prefix ## R15 ## suffix,
248

    
249
#else /* !TARGET_X86_64 */
250

    
251
#define NB_OP_SIZES 3
252

    
253
#define DEF_REGS(prefix, suffix) \
254
  prefix ## EAX ## suffix,\
255
  prefix ## ECX ## suffix,\
256
  prefix ## EDX ## suffix,\
257
  prefix ## EBX ## suffix,\
258
  prefix ## ESP ## suffix,\
259
  prefix ## EBP ## suffix,\
260
  prefix ## ESI ## suffix,\
261
  prefix ## EDI ## suffix,
262

    
263
#endif /* !TARGET_X86_64 */
264

    
265
#if defined(WORDS_BIGENDIAN)
266
#define REG_B_OFFSET (sizeof(target_ulong) - 1)
267
#define REG_H_OFFSET (sizeof(target_ulong) - 2)
268
#define REG_W_OFFSET (sizeof(target_ulong) - 2)
269
#define REG_L_OFFSET (sizeof(target_ulong) - 4)
270
#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
271
#else
272
#define REG_B_OFFSET 0
273
#define REG_H_OFFSET 1
274
#define REG_W_OFFSET 0
275
#define REG_L_OFFSET 0
276
#define REG_LH_OFFSET 4
277
#endif
278

    
279
static inline void gen_op_mov_reg_TN(int ot, int t_index, int reg)
280
{
281
    switch(ot) {
282
    case OT_BYTE:
283
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
284
            tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
285
        } else {
286
            tcg_gen_st8_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
287
        }
288
        break;
289
    case OT_WORD:
290
        tcg_gen_st16_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
291
        break;
292
#ifdef TARGET_X86_64
293
    case OT_LONG:
294
        tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
295
        /* high part of register set to zero */
296
        tcg_gen_movi_tl(cpu_tmp0, 0);
297
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
298
        break;
299
    default:
300
    case OT_QUAD:
301
        tcg_gen_st_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
302
        break;
303
#else
304
    default:
305
    case OT_LONG:
306
        tcg_gen_st32_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
307
        break;
308
#endif
309
    }
310
}
311

    
312
static inline void gen_op_mov_reg_T0(int ot, int reg)
313
{
314
    gen_op_mov_reg_TN(ot, 0, reg);
315
}
316

    
317
static inline void gen_op_mov_reg_T1(int ot, int reg)
318
{
319
    gen_op_mov_reg_TN(ot, 1, reg);
320
}
321

    
322
static inline void gen_op_mov_reg_A0(int size, int reg)
323
{
324
    switch(size) {
325
    case 0:
326
        tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
327
        break;
328
#ifdef TARGET_X86_64
329
    case 1:
330
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
331
        /* high part of register set to zero */
332
        tcg_gen_movi_tl(cpu_tmp0, 0);
333
        tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
334
        break;
335
    default:
336
    case 2:
337
        tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
338
        break;
339
#else
340
    default:
341
    case 1:
342
        tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
343
        break;
344
#endif
345
    }
346
}
347

    
348
static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
349
{
350
    switch(ot) {
351
    case OT_BYTE:
352
        if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
353
            goto std_case;
354
        } else {
355
            tcg_gen_ld8u_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
356
        }
357
        break;
358
    default:
359
    std_case:
360
        tcg_gen_ld_tl(cpu_T[t_index], cpu_env, offsetof(CPUState, regs[reg]));
361
        break;
362
    }
363
}
364

    
365
static inline void gen_op_movl_A0_reg(int reg)
366
{
367
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
368
}
369

    
370
static inline void gen_op_addl_A0_im(int32_t val)
371
{
372
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
373
#ifdef TARGET_X86_64
374
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
375
#endif
376
}
377

    
378
#ifdef TARGET_X86_64
379
static inline void gen_op_addq_A0_im(int64_t val)
380
{
381
    tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
382
}
383
#endif
384
    
385
static void gen_add_A0_im(DisasContext *s, int val)
386
{
387
#ifdef TARGET_X86_64
388
    if (CODE64(s))
389
        gen_op_addq_A0_im(val);
390
    else
391
#endif
392
        gen_op_addl_A0_im(val);
393
}
394

    
395
static inline void gen_op_addl_T0_T1(void)
396
{
397
    tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
398
}
399

    
400
static inline void gen_op_jmp_T0(void)
401
{
402
    tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
403
}
404

    
405
static inline void gen_op_addw_ESP_im(int32_t val)
406
{
407
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
408
    tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
409
    tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]) + REG_W_OFFSET);
410
}
411

    
412
static inline void gen_op_addl_ESP_im(int32_t val)
413
{
414
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
415
    tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
416
#ifdef TARGET_X86_64
417
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
418
#endif
419
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
420
}
421

    
422
#ifdef TARGET_X86_64
423
static inline void gen_op_addq_ESP_im(int32_t val)
424
{
425
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
426
    tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
427
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ESP]));
428
}
429
#endif
430

    
431
static inline void gen_op_set_cc_op(int32_t val)
432
{
433
    tcg_gen_movi_i32(cpu_cc_op, val);
434
}
435

    
436
static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
437
{
438
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
439
    if (shift != 0) 
440
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
441
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
442
#ifdef TARGET_X86_64
443
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
444
#endif
445
}
446

    
447
static inline void gen_op_movl_A0_seg(int reg)
448
{
449
    tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
450
}
451

    
452
static inline void gen_op_addl_A0_seg(int reg)
453
{
454
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
455
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
456
#ifdef TARGET_X86_64
457
    tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
458
#endif
459
}
460

    
461
#ifdef TARGET_X86_64
462
static inline void gen_op_movq_A0_seg(int reg)
463
{
464
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
465
}
466

    
467
static inline void gen_op_addq_A0_seg(int reg)
468
{
469
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
470
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
471
}
472

    
473
static inline void gen_op_movq_A0_reg(int reg)
474
{
475
    tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
476
}
477

    
478
static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
479
{
480
    tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
481
    if (shift != 0) 
482
        tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
483
    tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
484
}
485
#endif
486

    
487
static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
488
    [0] = {
489
        DEF_REGS(gen_op_cmovw_, _T1_T0)
490
    },
491
    [1] = {
492
        DEF_REGS(gen_op_cmovl_, _T1_T0)
493
    },
494
#ifdef TARGET_X86_64
495
    [2] = {
496
        DEF_REGS(gen_op_cmovq_, _T1_T0)
497
    },
498
#endif
499
};
500

    
501
#define DEF_ARITHC(SUFFIX)\
502
    {\
503
        gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
504
        gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
505
    },\
506
    {\
507
        gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
508
        gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
509
    },\
510
    {\
511
        gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
512
        gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
513
    },\
514
    {\
515
        X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
516
        X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
517
    },
518

    
519
static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
520
    DEF_ARITHC( )
521
};
522

    
523
static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
524
    DEF_ARITHC(_raw)
525
#ifndef CONFIG_USER_ONLY
526
    DEF_ARITHC(_kernel)
527
    DEF_ARITHC(_user)
528
#endif
529
};
530

    
531
static const int cc_op_arithb[8] = {
532
    CC_OP_ADDB,
533
    CC_OP_LOGICB,
534
    CC_OP_ADDB,
535
    CC_OP_SUBB,
536
    CC_OP_LOGICB,
537
    CC_OP_SUBB,
538
    CC_OP_LOGICB,
539
    CC_OP_SUBB,
540
};
541

    
542
#define DEF_CMPXCHG(SUFFIX)\
543
    gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
544
    gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
545
    gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
546
    X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
547

    
548
static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
549
    DEF_CMPXCHG( )
550
};
551

    
552
static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
553
    DEF_CMPXCHG(_raw)
554
#ifndef CONFIG_USER_ONLY
555
    DEF_CMPXCHG(_kernel)
556
    DEF_CMPXCHG(_user)
557
#endif
558
};
559

    
560
static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
561
    [0] = {
562
        gen_op_btw_T0_T1_cc,
563
        gen_op_btsw_T0_T1_cc,
564
        gen_op_btrw_T0_T1_cc,
565
        gen_op_btcw_T0_T1_cc,
566
    },
567
    [1] = {
568
        gen_op_btl_T0_T1_cc,
569
        gen_op_btsl_T0_T1_cc,
570
        gen_op_btrl_T0_T1_cc,
571
        gen_op_btcl_T0_T1_cc,
572
    },
573
#ifdef TARGET_X86_64
574
    [2] = {
575
        gen_op_btq_T0_T1_cc,
576
        gen_op_btsq_T0_T1_cc,
577
        gen_op_btrq_T0_T1_cc,
578
        gen_op_btcq_T0_T1_cc,
579
    },
580
#endif
581
};
582

    
583
static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
584
    gen_op_add_bitw_A0_T1,
585
    gen_op_add_bitl_A0_T1,
586
    X86_64_ONLY(gen_op_add_bitq_A0_T1),
587
};
588

    
589
static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
590
    [0] = {
591
        gen_op_bsfw_T0_cc,
592
        gen_op_bsrw_T0_cc,
593
    },
594
    [1] = {
595
        gen_op_bsfl_T0_cc,
596
        gen_op_bsrl_T0_cc,
597
    },
598
#ifdef TARGET_X86_64
599
    [2] = {
600
        gen_op_bsfq_T0_cc,
601
        gen_op_bsrq_T0_cc,
602
    },
603
#endif
604
};
605

    
606
static inline void gen_op_lds_T0_A0(int idx)
607
{
608
    int mem_index = (idx >> 2) - 1;
609
    switch(idx & 3) {
610
    case 0:
611
        tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
612
        break;
613
    case 1:
614
        tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
615
        break;
616
    default:
617
    case 2:
618
        tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
619
        break;
620
    }
621
}
622

    
623
/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
624
static inline void gen_op_ld_T0_A0(int idx)
625
{
626
    int mem_index = (idx >> 2) - 1;
627
    switch(idx & 3) {
628
    case 0:
629
        tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0, mem_index);
630
        break;
631
    case 1:
632
        tcg_gen_qemu_ld16u(cpu_T[0], cpu_A0, mem_index);
633
        break;
634
    case 2:
635
        tcg_gen_qemu_ld32u(cpu_T[0], cpu_A0, mem_index);
636
        break;
637
    default:
638
    case 3:
639
        tcg_gen_qemu_ld64(cpu_T[0], cpu_A0, mem_index);
640
        break;
641
    }
642
}
643

    
644
static inline void gen_op_ldu_T0_A0(int idx)
645
{
646
    gen_op_ld_T0_A0(idx);
647
}
648

    
649
static inline void gen_op_ld_T1_A0(int idx)
650
{
651
    int mem_index = (idx >> 2) - 1;
652
    switch(idx & 3) {
653
    case 0:
654
        tcg_gen_qemu_ld8u(cpu_T[1], cpu_A0, mem_index);
655
        break;
656
    case 1:
657
        tcg_gen_qemu_ld16u(cpu_T[1], cpu_A0, mem_index);
658
        break;
659
    case 2:
660
        tcg_gen_qemu_ld32u(cpu_T[1], cpu_A0, mem_index);
661
        break;
662
    default:
663
    case 3:
664
        tcg_gen_qemu_ld64(cpu_T[1], cpu_A0, mem_index);
665
        break;
666
    }
667
}
668

    
669
static inline void gen_op_st_T0_A0(int idx)
670
{
671
    int mem_index = (idx >> 2) - 1;
672
    switch(idx & 3) {
673
    case 0:
674
        tcg_gen_qemu_st8(cpu_T[0], cpu_A0, mem_index);
675
        break;
676
    case 1:
677
        tcg_gen_qemu_st16(cpu_T[0], cpu_A0, mem_index);
678
        break;
679
    case 2:
680
        tcg_gen_qemu_st32(cpu_T[0], cpu_A0, mem_index);
681
        break;
682
    default:
683
    case 3:
684
        tcg_gen_qemu_st64(cpu_T[0], cpu_A0, mem_index);
685
        break;
686
    }
687
}
688

    
689
static inline void gen_op_st_T1_A0(int idx)
690
{
691
    int mem_index = (idx >> 2) - 1;
692
    switch(idx & 3) {
693
    case 0:
694
        tcg_gen_qemu_st8(cpu_T[1], cpu_A0, mem_index);
695
        break;
696
    case 1:
697
        tcg_gen_qemu_st16(cpu_T[1], cpu_A0, mem_index);
698
        break;
699
    case 2:
700
        tcg_gen_qemu_st32(cpu_T[1], cpu_A0, mem_index);
701
        break;
702
    default:
703
    case 3:
704
        tcg_gen_qemu_st64(cpu_T[1], cpu_A0, mem_index);
705
        break;
706
    }
707
}
708

    
709
static inline void gen_jmp_im(target_ulong pc)
710
{
711
    tcg_gen_movi_tl(cpu_tmp0, pc);
712
    tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
713
}
714

    
715
static inline void gen_string_movl_A0_ESI(DisasContext *s)
716
{
717
    int override;
718

    
719
    override = s->override;
720
#ifdef TARGET_X86_64
721
    if (s->aflag == 2) {
722
        if (override >= 0) {
723
            gen_op_movq_A0_seg(override);
724
            gen_op_addq_A0_reg_sN(0, R_ESI);
725
        } else {
726
            gen_op_movq_A0_reg(R_ESI);
727
        }
728
    } else
729
#endif
730
    if (s->aflag) {
731
        /* 32 bit address */
732
        if (s->addseg && override < 0)
733
            override = R_DS;
734
        if (override >= 0) {
735
            gen_op_movl_A0_seg(override);
736
            gen_op_addl_A0_reg_sN(0, R_ESI);
737
        } else {
738
            gen_op_movl_A0_reg(R_ESI);
739
        }
740
    } else {
741
        /* 16 address, always override */
742
        if (override < 0)
743
            override = R_DS;
744
        gen_op_movl_A0_reg(R_ESI);
745
        gen_op_andl_A0_ffff();
746
        gen_op_addl_A0_seg(override);
747
    }
748
}
749

    
750
static inline void gen_string_movl_A0_EDI(DisasContext *s)
751
{
752
#ifdef TARGET_X86_64
753
    if (s->aflag == 2) {
754
        gen_op_movq_A0_reg(R_EDI);
755
    } else
756
#endif
757
    if (s->aflag) {
758
        if (s->addseg) {
759
            gen_op_movl_A0_seg(R_ES);
760
            gen_op_addl_A0_reg_sN(0, R_EDI);
761
        } else {
762
            gen_op_movl_A0_reg(R_EDI);
763
        }
764
    } else {
765
        gen_op_movl_A0_reg(R_EDI);
766
        gen_op_andl_A0_ffff();
767
        gen_op_addl_A0_seg(R_ES);
768
    }
769
}
770

    
771
static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
772
    gen_op_movl_T0_Dshiftb,
773
    gen_op_movl_T0_Dshiftw,
774
    gen_op_movl_T0_Dshiftl,
775
    X86_64_ONLY(gen_op_movl_T0_Dshiftq),
776
};
777

    
778
static GenOpFunc1 *gen_op_jnz_ecx[3] = {
779
    gen_op_jnz_ecxw,
780
    gen_op_jnz_ecxl,
781
    X86_64_ONLY(gen_op_jnz_ecxq),
782
};
783

    
784
static GenOpFunc1 *gen_op_jz_ecx[3] = {
785
    gen_op_jz_ecxw,
786
    gen_op_jz_ecxl,
787
    X86_64_ONLY(gen_op_jz_ecxq),
788
};
789

    
790
static GenOpFunc *gen_op_dec_ECX[3] = {
791
    gen_op_decw_ECX,
792
    gen_op_decl_ECX,
793
    X86_64_ONLY(gen_op_decq_ECX),
794
};
795

    
796
static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
797
    {
798
        gen_op_jnz_subb,
799
        gen_op_jnz_subw,
800
        gen_op_jnz_subl,
801
        X86_64_ONLY(gen_op_jnz_subq),
802
    },
803
    {
804
        gen_op_jz_subb,
805
        gen_op_jz_subw,
806
        gen_op_jz_subl,
807
        X86_64_ONLY(gen_op_jz_subq),
808
    },
809
};
810

    
811
static void *helper_in_func[3] = {
812
    helper_inb,
813
    helper_inw,
814
    helper_inl,
815
};
816

    
817
static void *helper_out_func[3] = {
818
    helper_outb,
819
    helper_outw,
820
    helper_outl,
821
};
822

    
823
static void *gen_check_io_func[3] = {
824
    helper_check_iob,
825
    helper_check_iow,
826
    helper_check_iol,
827
};
828

    
829
static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
830
                         uint32_t svm_flags)
831
{
832
    int state_saved;
833
    target_ulong next_eip;
834

    
835
    state_saved = 0;
836
    if (s->pe && (s->cpl > s->iopl || s->vm86)) {
837
        if (s->cc_op != CC_OP_DYNAMIC)
838
            gen_op_set_cc_op(s->cc_op);
839
        gen_jmp_im(cur_eip);
840
        state_saved = 1;
841
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
842
        tcg_gen_helper_0_1(gen_check_io_func[ot],
843
                           cpu_tmp2_i32);
844
    }
845
    if(s->flags & (1ULL << INTERCEPT_IOIO_PROT)) {
846
        if (!state_saved) {
847
            if (s->cc_op != CC_OP_DYNAMIC)
848
                gen_op_set_cc_op(s->cc_op);
849
            gen_jmp_im(cur_eip);
850
            state_saved = 1;
851
        }
852
        svm_flags |= (1 << (4 + ot));
853
        next_eip = s->pc - s->cs_base;
854
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
855
        tcg_gen_helper_0_3(helper_svm_check_io,
856
                           cpu_tmp2_i32,
857
                           tcg_const_i32(svm_flags),
858
                           tcg_const_i32(next_eip - cur_eip));
859
    }
860
}
861

    
862
static inline void gen_movs(DisasContext *s, int ot)
863
{
864
    gen_string_movl_A0_ESI(s);
865
    gen_op_ld_T0_A0(ot + s->mem_index);
866
    gen_string_movl_A0_EDI(s);
867
    gen_op_st_T0_A0(ot + s->mem_index);
868
    gen_op_movl_T0_Dshift[ot]();
869
#ifdef TARGET_X86_64
870
    if (s->aflag == 2) {
871
        gen_op_addq_ESI_T0();
872
        gen_op_addq_EDI_T0();
873
    } else
874
#endif
875
    if (s->aflag) {
876
        gen_op_addl_ESI_T0();
877
        gen_op_addl_EDI_T0();
878
    } else {
879
        gen_op_addw_ESI_T0();
880
        gen_op_addw_EDI_T0();
881
    }
882
}
883

    
884
static inline void gen_update_cc_op(DisasContext *s)
885
{
886
    if (s->cc_op != CC_OP_DYNAMIC) {
887
        gen_op_set_cc_op(s->cc_op);
888
        s->cc_op = CC_OP_DYNAMIC;
889
    }
890
}
891

    
892
static void gen_op_update1_cc(void)
893
{
894
    tcg_gen_discard_tl(cpu_cc_src);
895
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
896
}
897

    
898
static void gen_op_update2_cc(void)
899
{
900
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
901
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
902
}
903

    
904
static inline void gen_op_cmpl_T0_T1_cc(void)
905
{
906
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
907
    tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
908
}
909

    
910
static inline void gen_op_testl_T0_T1_cc(void)
911
{
912
    tcg_gen_discard_tl(cpu_cc_src);
913
    tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
914
}
915

    
916
static void gen_op_update_neg_cc(void)
917
{
918
    tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
919
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
920
}
921

    
922
/* XXX: does not work with gdbstub "ice" single step - not a
923
   serious problem */
924
static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
925
{
926
    int l1, l2;
927

    
928
    l1 = gen_new_label();
929
    l2 = gen_new_label();
930
    gen_op_jnz_ecx[s->aflag](l1);
931
    gen_set_label(l2);
932
    gen_jmp_tb(s, next_eip, 1);
933
    gen_set_label(l1);
934
    return l2;
935
}
936

    
937
static inline void gen_stos(DisasContext *s, int ot)
938
{
939
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
940
    gen_string_movl_A0_EDI(s);
941
    gen_op_st_T0_A0(ot + s->mem_index);
942
    gen_op_movl_T0_Dshift[ot]();
943
#ifdef TARGET_X86_64
944
    if (s->aflag == 2) {
945
        gen_op_addq_EDI_T0();
946
    } else
947
#endif
948
    if (s->aflag) {
949
        gen_op_addl_EDI_T0();
950
    } else {
951
        gen_op_addw_EDI_T0();
952
    }
953
}
954

    
955
static inline void gen_lods(DisasContext *s, int ot)
956
{
957
    gen_string_movl_A0_ESI(s);
958
    gen_op_ld_T0_A0(ot + s->mem_index);
959
    gen_op_mov_reg_T0(ot, R_EAX);
960
    gen_op_movl_T0_Dshift[ot]();
961
#ifdef TARGET_X86_64
962
    if (s->aflag == 2) {
963
        gen_op_addq_ESI_T0();
964
    } else
965
#endif
966
    if (s->aflag) {
967
        gen_op_addl_ESI_T0();
968
    } else {
969
        gen_op_addw_ESI_T0();
970
    }
971
}
972

    
973
static inline void gen_scas(DisasContext *s, int ot)
974
{
975
    gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
976
    gen_string_movl_A0_EDI(s);
977
    gen_op_ld_T1_A0(ot + s->mem_index);
978
    gen_op_cmpl_T0_T1_cc();
979
    gen_op_movl_T0_Dshift[ot]();
980
#ifdef TARGET_X86_64
981
    if (s->aflag == 2) {
982
        gen_op_addq_EDI_T0();
983
    } else
984
#endif
985
    if (s->aflag) {
986
        gen_op_addl_EDI_T0();
987
    } else {
988
        gen_op_addw_EDI_T0();
989
    }
990
}
991

    
992
static inline void gen_cmps(DisasContext *s, int ot)
993
{
994
    gen_string_movl_A0_ESI(s);
995
    gen_op_ld_T0_A0(ot + s->mem_index);
996
    gen_string_movl_A0_EDI(s);
997
    gen_op_ld_T1_A0(ot + s->mem_index);
998
    gen_op_cmpl_T0_T1_cc();
999
    gen_op_movl_T0_Dshift[ot]();
1000
#ifdef TARGET_X86_64
1001
    if (s->aflag == 2) {
1002
        gen_op_addq_ESI_T0();
1003
        gen_op_addq_EDI_T0();
1004
    } else
1005
#endif
1006
    if (s->aflag) {
1007
        gen_op_addl_ESI_T0();
1008
        gen_op_addl_EDI_T0();
1009
    } else {
1010
        gen_op_addw_ESI_T0();
1011
        gen_op_addw_EDI_T0();
1012
    }
1013
}
1014

    
1015
static inline void gen_ins(DisasContext *s, int ot)
1016
{
1017
    gen_string_movl_A0_EDI(s);
1018
    gen_op_movl_T0_0();
1019
    gen_op_st_T0_A0(ot + s->mem_index);
1020
    gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1021
    tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1022
    tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1023
    tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1024
    gen_op_st_T0_A0(ot + s->mem_index);
1025
    gen_op_movl_T0_Dshift[ot]();
1026
#ifdef TARGET_X86_64
1027
    if (s->aflag == 2) {
1028
        gen_op_addq_EDI_T0();
1029
    } else
1030
#endif
1031
    if (s->aflag) {
1032
        gen_op_addl_EDI_T0();
1033
    } else {
1034
        gen_op_addw_EDI_T0();
1035
    }
1036
}
1037

    
1038
static inline void gen_outs(DisasContext *s, int ot)
1039
{
1040
    gen_string_movl_A0_ESI(s);
1041
    gen_op_ld_T0_A0(ot + s->mem_index);
1042

    
1043
    gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1044
    tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1045
    tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1046
    tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1047
    tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1048

    
1049
    gen_op_movl_T0_Dshift[ot]();
1050
#ifdef TARGET_X86_64
1051
    if (s->aflag == 2) {
1052
        gen_op_addq_ESI_T0();
1053
    } else
1054
#endif
1055
    if (s->aflag) {
1056
        gen_op_addl_ESI_T0();
1057
    } else {
1058
        gen_op_addw_ESI_T0();
1059
    }
1060
}
1061

    
1062
/* same method as Valgrind : we generate jumps to current or next
1063
   instruction */
1064
#define GEN_REPZ(op)                                                          \
1065
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1066
                                 target_ulong cur_eip, target_ulong next_eip) \
1067
{                                                                             \
1068
    int l2;\
1069
    gen_update_cc_op(s);                                                      \
1070
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1071
    gen_ ## op(s, ot);                                                        \
1072
    gen_op_dec_ECX[s->aflag]();                                               \
1073
    /* a loop would cause two single step exceptions if ECX = 1               \
1074
       before rep string_insn */                                              \
1075
    if (!s->jmp_opt)                                                          \
1076
        gen_op_jz_ecx[s->aflag](l2);                                          \
1077
    gen_jmp(s, cur_eip);                                                      \
1078
}
1079

    
1080
#define GEN_REPZ2(op)                                                         \
1081
static inline void gen_repz_ ## op(DisasContext *s, int ot,                   \
1082
                                   target_ulong cur_eip,                      \
1083
                                   target_ulong next_eip,                     \
1084
                                   int nz)                                    \
1085
{                                                                             \
1086
    int l2;\
1087
    gen_update_cc_op(s);                                                      \
1088
    l2 = gen_jz_ecx_string(s, next_eip);                                      \
1089
    gen_ ## op(s, ot);                                                        \
1090
    gen_op_dec_ECX[s->aflag]();                                               \
1091
    gen_op_set_cc_op(CC_OP_SUBB + ot);                                        \
1092
    gen_op_string_jnz_sub[nz][ot](l2);\
1093
    if (!s->jmp_opt)                                                          \
1094
        gen_op_jz_ecx[s->aflag](l2);                                          \
1095
    gen_jmp(s, cur_eip);                                                      \
1096
}
1097

    
1098
GEN_REPZ(movs)
1099
GEN_REPZ(stos)
1100
GEN_REPZ(lods)
1101
GEN_REPZ(ins)
1102
GEN_REPZ(outs)
1103
GEN_REPZ2(scas)
1104
GEN_REPZ2(cmps)
1105

    
1106
enum {
1107
    JCC_O,
1108
    JCC_B,
1109
    JCC_Z,
1110
    JCC_BE,
1111
    JCC_S,
1112
    JCC_P,
1113
    JCC_L,
1114
    JCC_LE,
1115
};
1116

    
1117
static GenOpFunc1 *gen_jcc_sub[4][8] = {
1118
    [OT_BYTE] = {
1119
        NULL,
1120
        gen_op_jb_subb,
1121
        gen_op_jz_subb,
1122
        gen_op_jbe_subb,
1123
        gen_op_js_subb,
1124
        NULL,
1125
        gen_op_jl_subb,
1126
        gen_op_jle_subb,
1127
    },
1128
    [OT_WORD] = {
1129
        NULL,
1130
        gen_op_jb_subw,
1131
        gen_op_jz_subw,
1132
        gen_op_jbe_subw,
1133
        gen_op_js_subw,
1134
        NULL,
1135
        gen_op_jl_subw,
1136
        gen_op_jle_subw,
1137
    },
1138
    [OT_LONG] = {
1139
        NULL,
1140
        gen_op_jb_subl,
1141
        gen_op_jz_subl,
1142
        gen_op_jbe_subl,
1143
        gen_op_js_subl,
1144
        NULL,
1145
        gen_op_jl_subl,
1146
        gen_op_jle_subl,
1147
    },
1148
#ifdef TARGET_X86_64
1149
    [OT_QUAD] = {
1150
        NULL,
1151
        BUGGY_64(gen_op_jb_subq),
1152
        gen_op_jz_subq,
1153
        BUGGY_64(gen_op_jbe_subq),
1154
        gen_op_js_subq,
1155
        NULL,
1156
        BUGGY_64(gen_op_jl_subq),
1157
        BUGGY_64(gen_op_jle_subq),
1158
    },
1159
#endif
1160
};
1161
static GenOpFunc1 *gen_op_loop[3][4] = {
1162
    [0] = {
1163
        gen_op_loopnzw,
1164
        gen_op_loopzw,
1165
        gen_op_jnz_ecxw,
1166
    },
1167
    [1] = {
1168
        gen_op_loopnzl,
1169
        gen_op_loopzl,
1170
        gen_op_jnz_ecxl,
1171
    },
1172
#ifdef TARGET_X86_64
1173
    [2] = {
1174
        gen_op_loopnzq,
1175
        gen_op_loopzq,
1176
        gen_op_jnz_ecxq,
1177
    },
1178
#endif
1179
};
1180

    
1181
static GenOpFunc *gen_setcc_slow[8] = {
1182
    gen_op_seto_T0_cc,
1183
    gen_op_setb_T0_cc,
1184
    gen_op_setz_T0_cc,
1185
    gen_op_setbe_T0_cc,
1186
    gen_op_sets_T0_cc,
1187
    gen_op_setp_T0_cc,
1188
    gen_op_setl_T0_cc,
1189
    gen_op_setle_T0_cc,
1190
};
1191

    
1192
static GenOpFunc *gen_setcc_sub[4][8] = {
1193
    [OT_BYTE] = {
1194
        NULL,
1195
        gen_op_setb_T0_subb,
1196
        gen_op_setz_T0_subb,
1197
        gen_op_setbe_T0_subb,
1198
        gen_op_sets_T0_subb,
1199
        NULL,
1200
        gen_op_setl_T0_subb,
1201
        gen_op_setle_T0_subb,
1202
    },
1203
    [OT_WORD] = {
1204
        NULL,
1205
        gen_op_setb_T0_subw,
1206
        gen_op_setz_T0_subw,
1207
        gen_op_setbe_T0_subw,
1208
        gen_op_sets_T0_subw,
1209
        NULL,
1210
        gen_op_setl_T0_subw,
1211
        gen_op_setle_T0_subw,
1212
    },
1213
    [OT_LONG] = {
1214
        NULL,
1215
        gen_op_setb_T0_subl,
1216
        gen_op_setz_T0_subl,
1217
        gen_op_setbe_T0_subl,
1218
        gen_op_sets_T0_subl,
1219
        NULL,
1220
        gen_op_setl_T0_subl,
1221
        gen_op_setle_T0_subl,
1222
    },
1223
#ifdef TARGET_X86_64
1224
    [OT_QUAD] = {
1225
        NULL,
1226
        gen_op_setb_T0_subq,
1227
        gen_op_setz_T0_subq,
1228
        gen_op_setbe_T0_subq,
1229
        gen_op_sets_T0_subq,
1230
        NULL,
1231
        gen_op_setl_T0_subq,
1232
        gen_op_setle_T0_subq,
1233
    },
1234
#endif
1235
};
1236

    
1237
static void *helper_fp_arith_ST0_FT0[8] = {
1238
    helper_fadd_ST0_FT0,
1239
    helper_fmul_ST0_FT0,
1240
    helper_fcom_ST0_FT0,
1241
    helper_fcom_ST0_FT0,
1242
    helper_fsub_ST0_FT0,
1243
    helper_fsubr_ST0_FT0,
1244
    helper_fdiv_ST0_FT0,
1245
    helper_fdivr_ST0_FT0,
1246
};
1247

    
1248
/* NOTE the exception in "r" op ordering */
1249
static void *helper_fp_arith_STN_ST0[8] = {
1250
    helper_fadd_STN_ST0,
1251
    helper_fmul_STN_ST0,
1252
    NULL,
1253
    NULL,
1254
    helper_fsubr_STN_ST0,
1255
    helper_fsub_STN_ST0,
1256
    helper_fdivr_STN_ST0,
1257
    helper_fdiv_STN_ST0,
1258
};
1259

    
1260
/* if d == OR_TMP0, it means memory operand (address in A0) */
1261
static void gen_op(DisasContext *s1, int op, int ot, int d)
1262
{
1263
    GenOpFunc *gen_update_cc;
1264

    
1265
    if (d != OR_TMP0) {
1266
        gen_op_mov_TN_reg(ot, 0, d);
1267
    } else {
1268
        gen_op_ld_T0_A0(ot + s1->mem_index);
1269
    }
1270
    switch(op) {
1271
    case OP_ADCL:
1272
    case OP_SBBL:
1273
        if (s1->cc_op != CC_OP_DYNAMIC)
1274
            gen_op_set_cc_op(s1->cc_op);
1275
        if (d != OR_TMP0) {
1276
            gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1277
            gen_op_mov_reg_T0(ot, d);
1278
        } else {
1279
            gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1280
        }
1281
        s1->cc_op = CC_OP_DYNAMIC;
1282
        goto the_end;
1283
    case OP_ADDL:
1284
        gen_op_addl_T0_T1();
1285
        s1->cc_op = CC_OP_ADDB + ot;
1286
        gen_update_cc = gen_op_update2_cc;
1287
        break;
1288
    case OP_SUBL:
1289
        tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1290
        s1->cc_op = CC_OP_SUBB + ot;
1291
        gen_update_cc = gen_op_update2_cc;
1292
        break;
1293
    default:
1294
    case OP_ANDL:
1295
        tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1296
        s1->cc_op = CC_OP_LOGICB + ot;
1297
        gen_update_cc = gen_op_update1_cc;
1298
        break;
1299
    case OP_ORL:
1300
        tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1301
        s1->cc_op = CC_OP_LOGICB + ot;
1302
        gen_update_cc = gen_op_update1_cc;
1303
        break;
1304
    case OP_XORL:
1305
        tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1306
        s1->cc_op = CC_OP_LOGICB + ot;
1307
        gen_update_cc = gen_op_update1_cc;
1308
        break;
1309
    case OP_CMPL:
1310
        gen_op_cmpl_T0_T1_cc();
1311
        s1->cc_op = CC_OP_SUBB + ot;
1312
        gen_update_cc = NULL;
1313
        break;
1314
    }
1315
    if (op != OP_CMPL) {
1316
        if (d != OR_TMP0)
1317
            gen_op_mov_reg_T0(ot, d);
1318
        else
1319
            gen_op_st_T0_A0(ot + s1->mem_index);
1320
    }
1321
    /* the flags update must happen after the memory write (precise
1322
       exception support) */
1323
    if (gen_update_cc)
1324
        gen_update_cc();
1325
 the_end: ;
1326
}
1327

    
1328
/* compute eflags.C to reg */
1329
static void gen_compute_eflags_c(TCGv reg)
1330
{
1331
#if TCG_TARGET_REG_BITS == 32
1332
    tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1333
    tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 
1334
                     (long)cc_table + offsetof(CCTable, compute_c));
1335
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1336
    tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE, 
1337
                 1, &reg, 0, NULL);
1338
#else
1339
    tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1340
    tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1341
    tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64, 
1342
                     (long)cc_table + offsetof(CCTable, compute_c));
1343
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1344
    tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE, 
1345
                 1, &cpu_tmp2_i32, 0, NULL);
1346
    tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1347
#endif
1348
}
1349

    
1350
/* compute all eflags to cc_src */
1351
static void gen_compute_eflags(TCGv reg)
1352
{
1353
#if TCG_TARGET_REG_BITS == 32
1354
    tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1355
    tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 
1356
                     (long)cc_table + offsetof(CCTable, compute_all));
1357
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1358
    tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE, 
1359
                 1, &reg, 0, NULL);
1360
#else
1361
    tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1362
    tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1363
    tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64, 
1364
                     (long)cc_table + offsetof(CCTable, compute_all));
1365
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1366
    tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE, 
1367
                 1, &cpu_tmp2_i32, 0, NULL);
1368
    tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1369
#endif
1370
}
1371

    
1372
/* if d == OR_TMP0, it means memory operand (address in A0) */
1373
static void gen_inc(DisasContext *s1, int ot, int d, int c)
1374
{
1375
    if (d != OR_TMP0)
1376
        gen_op_mov_TN_reg(ot, 0, d);
1377
    else
1378
        gen_op_ld_T0_A0(ot + s1->mem_index);
1379
    if (s1->cc_op != CC_OP_DYNAMIC)
1380
        gen_op_set_cc_op(s1->cc_op);
1381
    if (c > 0) {
1382
        tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1383
        s1->cc_op = CC_OP_INCB + ot;
1384
    } else {
1385
        tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1386
        s1->cc_op = CC_OP_DECB + ot;
1387
    }
1388
    if (d != OR_TMP0)
1389
        gen_op_mov_reg_T0(ot, d);
1390
    else
1391
        gen_op_st_T0_A0(ot + s1->mem_index);
1392
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1393
    gen_compute_eflags_c(cpu_cc_src);
1394
}
1395

    
1396
/* XXX: add faster immediate case */
1397
static void gen_shift_rm_T1(DisasContext *s, int ot, int op1, 
1398
                            int is_right, int is_arith)
1399
{
1400
    target_ulong mask;
1401
    int shift_label;
1402
    
1403
    if (ot == OT_QUAD)
1404
        mask = 0x3f;
1405
    else
1406
        mask = 0x1f;
1407

    
1408
    /* load */
1409
    if (op1 == OR_TMP0)
1410
        gen_op_ld_T0_A0(ot + s->mem_index);
1411
    else
1412
        gen_op_mov_TN_reg(ot, 0, op1);
1413

    
1414
    tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1415

    
1416
    tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1417

    
1418
    if (is_right) {
1419
        if (is_arith) {
1420
            switch(ot) {
1421
            case OT_BYTE:
1422
                tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
1423
                break;
1424
            case OT_WORD:
1425
                tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
1426
                break;
1427
            case OT_LONG:
1428
                tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
1429
                break;
1430
            default:
1431
                break;
1432
            }
1433
            tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1434
            tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1435
        } else {
1436
            switch(ot) {
1437
            case OT_BYTE:
1438
                tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
1439
                break;
1440
            case OT_WORD:
1441
                tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
1442
                break;
1443
            case OT_LONG:
1444
                tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1445
                break;
1446
            default:
1447
                break;
1448
            }
1449
            tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1450
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1451
        }
1452
    } else {
1453
        tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1454
        tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1455
    }
1456

    
1457
    /* store */
1458
    if (op1 == OR_TMP0)
1459
        gen_op_st_T0_A0(ot + s->mem_index);
1460
    else
1461
        gen_op_mov_reg_T0(ot, op1);
1462
        
1463
    /* update eflags if non zero shift */
1464
    if (s->cc_op != CC_OP_DYNAMIC)
1465
        gen_op_set_cc_op(s->cc_op);
1466

    
1467
    shift_label = gen_new_label();
1468
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), shift_label);
1469

    
1470
    tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1471
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1472
    if (is_right)
1473
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1474
    else
1475
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1476
        
1477
    gen_set_label(shift_label);
1478
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1479
}
1480

    
1481
static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1482
{
1483
    if (arg2 >= 0)
1484
        tcg_gen_shli_tl(ret, arg1, arg2);
1485
    else
1486
        tcg_gen_shri_tl(ret, arg1, -arg2);
1487
}
1488

    
1489
/* XXX: add faster immediate case */
1490
static void gen_rot_rm_T1(DisasContext *s, int ot, int op1, 
1491
                          int is_right)
1492
{
1493
    target_ulong mask;
1494
    int label1, label2, data_bits;
1495
    
1496
    if (ot == OT_QUAD)
1497
        mask = 0x3f;
1498
    else
1499
        mask = 0x1f;
1500

    
1501
    /* load */
1502
    if (op1 == OR_TMP0)
1503
        gen_op_ld_T0_A0(ot + s->mem_index);
1504
    else
1505
        gen_op_mov_TN_reg(ot, 0, op1);
1506

    
1507
    tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1508

    
1509
    /* Must test zero case to avoid using undefined behaviour in TCG
1510
       shifts. */
1511
    label1 = gen_new_label();
1512
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label1);
1513
    
1514
    if (ot <= OT_WORD)
1515
        tcg_gen_andi_tl(cpu_tmp0, cpu_T[1], (1 << (3 + ot)) - 1);
1516
    else
1517
        tcg_gen_mov_tl(cpu_tmp0, cpu_T[1]);
1518
    
1519
    switch(ot) {
1520
    case OT_BYTE:
1521
        tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
1522
        break;
1523
    case OT_WORD:
1524
        tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
1525
        break;
1526
    case OT_LONG:
1527
        tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1528
        break;
1529
    default:
1530
        break;
1531
    }
1532
    tcg_gen_mov_tl(cpu_T3, cpu_T[0]);
1533

    
1534
    data_bits = 8 << ot;
1535
    /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1536
       fix TCG definition) */
1537
    if (is_right) {
1538
        tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1539
        tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1540
        tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1541
    } else {
1542
        tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp0);
1543
        tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1544
        tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1545
    }
1546
    tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1547

    
1548
    gen_set_label(label1);
1549
    /* store */
1550
    if (op1 == OR_TMP0)
1551
        gen_op_st_T0_A0(ot + s->mem_index);
1552
    else
1553
        gen_op_mov_reg_T0(ot, op1);
1554
    
1555
    /* update eflags */
1556
    if (s->cc_op != CC_OP_DYNAMIC)
1557
        gen_op_set_cc_op(s->cc_op);
1558

    
1559
    label2 = gen_new_label();
1560
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[1], tcg_const_tl(0), label2);
1561

    
1562
    gen_compute_eflags(cpu_cc_src);
1563
    tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1564
    tcg_gen_xor_tl(cpu_tmp0, cpu_T3, cpu_T[0]);
1565
    tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
1566
    tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
1567
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
1568
    if (is_right) {
1569
        tcg_gen_shri_tl(cpu_T[0], cpu_T[0], data_bits - 1);
1570
    }
1571
    tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_C);
1572
    tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
1573
    
1574
    tcg_gen_discard_tl(cpu_cc_dst);
1575
    tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1576
        
1577
    gen_set_label(label2);
1578
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1579
}
1580

    
1581
static void *helper_rotc[8] = {
1582
    helper_rclb,
1583
    helper_rclw,
1584
    helper_rcll,
1585
    X86_64_ONLY(helper_rclq),
1586
    helper_rcrb,
1587
    helper_rcrw,
1588
    helper_rcrl,
1589
    X86_64_ONLY(helper_rcrq),
1590
};
1591

    
1592
/* XXX: add faster immediate = 1 case */
1593
static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1, 
1594
                           int is_right)
1595
{
1596
    int label1;
1597

    
1598
    if (s->cc_op != CC_OP_DYNAMIC)
1599
        gen_op_set_cc_op(s->cc_op);
1600

    
1601
    /* load */
1602
    if (op1 == OR_TMP0)
1603
        gen_op_ld_T0_A0(ot + s->mem_index);
1604
    else
1605
        gen_op_mov_TN_reg(ot, 0, op1);
1606
    
1607
    tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
1608
                       cpu_T[0], cpu_T[0], cpu_T[1]);
1609
    /* store */
1610
    if (op1 == OR_TMP0)
1611
        gen_op_st_T0_A0(ot + s->mem_index);
1612
    else
1613
        gen_op_mov_reg_T0(ot, op1);
1614

    
1615
    /* update eflags */
1616
    label1 = gen_new_label();
1617
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(-1), label1);
1618

    
1619
    tcg_gen_mov_tl(cpu_cc_src, cpu_T3);
1620
    tcg_gen_discard_tl(cpu_cc_dst);
1621
    tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
1622
        
1623
    gen_set_label(label1);
1624
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1625
}
1626

    
1627
/* XXX: add faster immediate case */
1628
static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1, 
1629
                                int is_right)
1630
{
1631
    int label1, label2, data_bits;
1632
    target_ulong mask;
1633

    
1634
    if (ot == OT_QUAD)
1635
        mask = 0x3f;
1636
    else
1637
        mask = 0x1f;
1638

    
1639
    /* load */
1640
    if (op1 == OR_TMP0)
1641
        gen_op_ld_T0_A0(ot + s->mem_index);
1642
    else
1643
        gen_op_mov_TN_reg(ot, 0, op1);
1644

    
1645
    tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
1646
    /* Must test zero case to avoid using undefined behaviour in TCG
1647
       shifts. */
1648
    label1 = gen_new_label();
1649
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label1);
1650
    
1651
    tcg_gen_addi_tl(cpu_tmp5, cpu_T3, -1);
1652
    if (ot == OT_WORD) {
1653
        /* Note: we implement the Intel behaviour for shift count > 16 */
1654
        if (is_right) {
1655
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1656
            tcg_gen_shli_tl(cpu_tmp0, cpu_T[1], 16);
1657
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1658
            tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1659

    
1660
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1661
            
1662
            /* only needed if count > 16, but a test would complicate */
1663
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1664
            tcg_gen_shl_tl(cpu_tmp0, cpu_T[0], cpu_tmp5);
1665

    
1666
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1667

    
1668
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1669
        } else {
1670
            /* XXX: not optimal */
1671
            tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
1672
            tcg_gen_shli_tl(cpu_T[1], cpu_T[1], 16);
1673
            tcg_gen_or_tl(cpu_T[1], cpu_T[1], cpu_T[0]);
1674
            tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1675
            
1676
            tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1677
            tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
1678
            tcg_gen_shr_tl(cpu_tmp6, cpu_T[1], cpu_tmp0);
1679
            tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
1680

    
1681
            tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1682
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), cpu_T3);
1683
            tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1684
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1685
        }
1686
    } else {
1687
        data_bits = 8 << ot;
1688
        if (is_right) {
1689
            if (ot == OT_LONG)
1690
                tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
1691

    
1692
            tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1693

    
1694
            tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T3);
1695
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1696
            tcg_gen_shl_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1697
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1698
            
1699
        } else {
1700
            if (ot == OT_LONG)
1701
                tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
1702

    
1703
            tcg_gen_shl_tl(cpu_tmp4, cpu_T[0], cpu_tmp5);
1704
            
1705
            tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T3);
1706
            tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), cpu_T3);
1707
            tcg_gen_shr_tl(cpu_T[1], cpu_T[1], cpu_tmp5);
1708
            tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1709
        }
1710
    }
1711
    tcg_gen_mov_tl(cpu_T[1], cpu_tmp4);
1712

    
1713
    gen_set_label(label1);
1714
    /* store */
1715
    if (op1 == OR_TMP0)
1716
        gen_op_st_T0_A0(ot + s->mem_index);
1717
    else
1718
        gen_op_mov_reg_T0(ot, op1);
1719
    
1720
    /* update eflags */
1721
    if (s->cc_op != CC_OP_DYNAMIC)
1722
        gen_op_set_cc_op(s->cc_op);
1723

    
1724
    label2 = gen_new_label();
1725
    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T3, tcg_const_tl(0), label2);
1726

    
1727
    tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1728
    tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1729
    if (is_right) {
1730
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1731
    } else {
1732
        tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1733
    }
1734
    gen_set_label(label2);
1735
    s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1736
}
1737

    
1738
static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1739
{
1740
    if (s != OR_TMP1)
1741
        gen_op_mov_TN_reg(ot, 1, s);
1742
    switch(op) {
1743
    case OP_ROL:
1744
        gen_rot_rm_T1(s1, ot, d, 0);
1745
        break;
1746
    case OP_ROR:
1747
        gen_rot_rm_T1(s1, ot, d, 1);
1748
        break;
1749
    case OP_SHL:
1750
    case OP_SHL1:
1751
        gen_shift_rm_T1(s1, ot, d, 0, 0);
1752
        break;
1753
    case OP_SHR:
1754
        gen_shift_rm_T1(s1, ot, d, 1, 0);
1755
        break;
1756
    case OP_SAR:
1757
        gen_shift_rm_T1(s1, ot, d, 1, 1);
1758
        break;
1759
    case OP_RCL:
1760
        gen_rotc_rm_T1(s1, ot, d, 0);
1761
        break;
1762
    case OP_RCR:
1763
        gen_rotc_rm_T1(s1, ot, d, 1);
1764
        break;
1765
    }
1766
}
1767

    
1768
static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1769
{
1770
    /* currently not optimized */
1771
    gen_op_movl_T1_im(c);
1772
    gen_shift(s1, op, ot, d, OR_TMP1);
1773
}
1774

    
1775
static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1776
{
1777
    target_long disp;
1778
    int havesib;
1779
    int base;
1780
    int index;
1781
    int scale;
1782
    int opreg;
1783
    int mod, rm, code, override, must_add_seg;
1784

    
1785
    override = s->override;
1786
    must_add_seg = s->addseg;
1787
    if (override >= 0)
1788
        must_add_seg = 1;
1789
    mod = (modrm >> 6) & 3;
1790
    rm = modrm & 7;
1791

    
1792
    if (s->aflag) {
1793

    
1794
        havesib = 0;
1795
        base = rm;
1796
        index = 0;
1797
        scale = 0;
1798

    
1799
        if (base == 4) {
1800
            havesib = 1;
1801
            code = ldub_code(s->pc++);
1802
            scale = (code >> 6) & 3;
1803
            index = ((code >> 3) & 7) | REX_X(s);
1804
            base = (code & 7);
1805
        }
1806
        base |= REX_B(s);
1807

    
1808
        switch (mod) {
1809
        case 0:
1810
            if ((base & 7) == 5) {
1811
                base = -1;
1812
                disp = (int32_t)ldl_code(s->pc);
1813
                s->pc += 4;
1814
                if (CODE64(s) && !havesib) {
1815
                    disp += s->pc + s->rip_offset;
1816
                }
1817
            } else {
1818
                disp = 0;
1819
            }
1820
            break;
1821
        case 1:
1822
            disp = (int8_t)ldub_code(s->pc++);
1823
            break;
1824
        default:
1825
        case 2:
1826
            disp = ldl_code(s->pc);
1827
            s->pc += 4;
1828
            break;
1829
        }
1830

    
1831
        if (base >= 0) {
1832
            /* for correct popl handling with esp */
1833
            if (base == 4 && s->popl_esp_hack)
1834
                disp += s->popl_esp_hack;
1835
#ifdef TARGET_X86_64
1836
            if (s->aflag == 2) {
1837
                gen_op_movq_A0_reg(base);
1838
                if (disp != 0) {
1839
                    gen_op_addq_A0_im(disp);
1840
                }
1841
            } else
1842
#endif
1843
            {
1844
                gen_op_movl_A0_reg(base);
1845
                if (disp != 0)
1846
                    gen_op_addl_A0_im(disp);
1847
            }
1848
        } else {
1849
#ifdef TARGET_X86_64
1850
            if (s->aflag == 2) {
1851
                gen_op_movq_A0_im(disp);
1852
            } else
1853
#endif
1854
            {
1855
                gen_op_movl_A0_im(disp);
1856
            }
1857
        }
1858
        /* XXX: index == 4 is always invalid */
1859
        if (havesib && (index != 4 || scale != 0)) {
1860
#ifdef TARGET_X86_64
1861
            if (s->aflag == 2) {
1862
                gen_op_addq_A0_reg_sN(scale, index);
1863
            } else
1864
#endif
1865
            {
1866
                gen_op_addl_A0_reg_sN(scale, index);
1867
            }
1868
        }
1869
        if (must_add_seg) {
1870
            if (override < 0) {
1871
                if (base == R_EBP || base == R_ESP)
1872
                    override = R_SS;
1873
                else
1874
                    override = R_DS;
1875
            }
1876
#ifdef TARGET_X86_64
1877
            if (s->aflag == 2) {
1878
                gen_op_addq_A0_seg(override);
1879
            } else
1880
#endif
1881
            {
1882
                gen_op_addl_A0_seg(override);
1883
            }
1884
        }
1885
    } else {
1886
        switch (mod) {
1887
        case 0:
1888
            if (rm == 6) {
1889
                disp = lduw_code(s->pc);
1890
                s->pc += 2;
1891
                gen_op_movl_A0_im(disp);
1892
                rm = 0; /* avoid SS override */
1893
                goto no_rm;
1894
            } else {
1895
                disp = 0;
1896
            }
1897
            break;
1898
        case 1:
1899
            disp = (int8_t)ldub_code(s->pc++);
1900
            break;
1901
        default:
1902
        case 2:
1903
            disp = lduw_code(s->pc);
1904
            s->pc += 2;
1905
            break;
1906
        }
1907
        switch(rm) {
1908
        case 0:
1909
            gen_op_movl_A0_reg(R_EBX);
1910
            gen_op_addl_A0_reg_sN(0, R_ESI);
1911
            break;
1912
        case 1:
1913
            gen_op_movl_A0_reg(R_EBX);
1914
            gen_op_addl_A0_reg_sN(0, R_EDI);
1915
            break;
1916
        case 2:
1917
            gen_op_movl_A0_reg(R_EBP);
1918
            gen_op_addl_A0_reg_sN(0, R_ESI);
1919
            break;
1920
        case 3:
1921
            gen_op_movl_A0_reg(R_EBP);
1922
            gen_op_addl_A0_reg_sN(0, R_EDI);
1923
            break;
1924
        case 4:
1925
            gen_op_movl_A0_reg(R_ESI);
1926
            break;
1927
        case 5:
1928
            gen_op_movl_A0_reg(R_EDI);
1929
            break;
1930
        case 6:
1931
            gen_op_movl_A0_reg(R_EBP);
1932
            break;
1933
        default:
1934
        case 7:
1935
            gen_op_movl_A0_reg(R_EBX);
1936
            break;
1937
        }
1938
        if (disp != 0)
1939
            gen_op_addl_A0_im(disp);
1940
        gen_op_andl_A0_ffff();
1941
    no_rm:
1942
        if (must_add_seg) {
1943
            if (override < 0) {
1944
                if (rm == 2 || rm == 3 || rm == 6)
1945
                    override = R_SS;
1946
                else
1947
                    override = R_DS;
1948
            }
1949
            gen_op_addl_A0_seg(override);
1950
        }
1951
    }
1952

    
1953
    opreg = OR_A0;
1954
    disp = 0;
1955
    *reg_ptr = opreg;
1956
    *offset_ptr = disp;
1957
}
1958

    
1959
static void gen_nop_modrm(DisasContext *s, int modrm)
1960
{
1961
    int mod, rm, base, code;
1962

    
1963
    mod = (modrm >> 6) & 3;
1964
    if (mod == 3)
1965
        return;
1966
    rm = modrm & 7;
1967

    
1968
    if (s->aflag) {
1969

    
1970
        base = rm;
1971

    
1972
        if (base == 4) {
1973
            code = ldub_code(s->pc++);
1974
            base = (code & 7);
1975
        }
1976

    
1977
        switch (mod) {
1978
        case 0:
1979
            if (base == 5) {
1980
                s->pc += 4;
1981
            }
1982
            break;
1983
        case 1:
1984
            s->pc++;
1985
            break;
1986
        default:
1987
        case 2:
1988
            s->pc += 4;
1989
            break;
1990
        }
1991
    } else {
1992
        switch (mod) {
1993
        case 0:
1994
            if (rm == 6) {
1995
                s->pc += 2;
1996
            }
1997
            break;
1998
        case 1:
1999
            s->pc++;
2000
            break;
2001
        default:
2002
        case 2:
2003
            s->pc += 2;
2004
            break;
2005
        }
2006
    }
2007
}
2008

    
2009
/* used for LEA and MOV AX, mem */
2010
static void gen_add_A0_ds_seg(DisasContext *s)
2011
{
2012
    int override, must_add_seg;
2013
    must_add_seg = s->addseg;
2014
    override = R_DS;
2015
    if (s->override >= 0) {
2016
        override = s->override;
2017
        must_add_seg = 1;
2018
    } else {
2019
        override = R_DS;
2020
    }
2021
    if (must_add_seg) {
2022
#ifdef TARGET_X86_64
2023
        if (CODE64(s)) {
2024
            gen_op_addq_A0_seg(override);
2025
        } else
2026
#endif
2027
        {
2028
            gen_op_addl_A0_seg(override);
2029
        }
2030
    }
2031
}
2032

    
2033
/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
2034
   OR_TMP0 */
2035
static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2036
{
2037
    int mod, rm, opreg, disp;
2038

    
2039
    mod = (modrm >> 6) & 3;
2040
    rm = (modrm & 7) | REX_B(s);
2041
    if (mod == 3) {
2042
        if (is_store) {
2043
            if (reg != OR_TMP0)
2044
                gen_op_mov_TN_reg(ot, 0, reg);
2045
            gen_op_mov_reg_T0(ot, rm);
2046
        } else {
2047
            gen_op_mov_TN_reg(ot, 0, rm);
2048
            if (reg != OR_TMP0)
2049
                gen_op_mov_reg_T0(ot, reg);
2050
        }
2051
    } else {
2052
        gen_lea_modrm(s, modrm, &opreg, &disp);
2053
        if (is_store) {
2054
            if (reg != OR_TMP0)
2055
                gen_op_mov_TN_reg(ot, 0, reg);
2056
            gen_op_st_T0_A0(ot + s->mem_index);
2057
        } else {
2058
            gen_op_ld_T0_A0(ot + s->mem_index);
2059
            if (reg != OR_TMP0)
2060
                gen_op_mov_reg_T0(ot, reg);
2061
        }
2062
    }
2063
}
2064

    
2065
static inline uint32_t insn_get(DisasContext *s, int ot)
2066
{
2067
    uint32_t ret;
2068

    
2069
    switch(ot) {
2070
    case OT_BYTE:
2071
        ret = ldub_code(s->pc);
2072
        s->pc++;
2073
        break;
2074
    case OT_WORD:
2075
        ret = lduw_code(s->pc);
2076
        s->pc += 2;
2077
        break;
2078
    default:
2079
    case OT_LONG:
2080
        ret = ldl_code(s->pc);
2081
        s->pc += 4;
2082
        break;
2083
    }
2084
    return ret;
2085
}
2086

    
2087
static inline int insn_const_size(unsigned int ot)
2088
{
2089
    if (ot <= OT_LONG)
2090
        return 1 << ot;
2091
    else
2092
        return 4;
2093
}
2094

    
2095
static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2096
{
2097
    TranslationBlock *tb;
2098
    target_ulong pc;
2099

    
2100
    pc = s->cs_base + eip;
2101
    tb = s->tb;
2102
    /* NOTE: we handle the case where the TB spans two pages here */
2103
    if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2104
        (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK))  {
2105
        /* jump to same page: we can use a direct jump */
2106
        tcg_gen_goto_tb(tb_num);
2107
        gen_jmp_im(eip);
2108
        tcg_gen_exit_tb((long)tb + tb_num);
2109
    } else {
2110
        /* jump to another page: currently not optimized */
2111
        gen_jmp_im(eip);
2112
        gen_eob(s);
2113
    }
2114
}
2115

    
2116
static inline void gen_jcc(DisasContext *s, int b,
2117
                           target_ulong val, target_ulong next_eip)
2118
{
2119
    TranslationBlock *tb;
2120
    int inv, jcc_op;
2121
    GenOpFunc1 *func;
2122
    target_ulong tmp;
2123
    int l1, l2;
2124

    
2125
    inv = b & 1;
2126
    jcc_op = (b >> 1) & 7;
2127

    
2128
    if (s->jmp_opt) {
2129
        switch(s->cc_op) {
2130
            /* we optimize the cmp/jcc case */
2131
        case CC_OP_SUBB:
2132
        case CC_OP_SUBW:
2133
        case CC_OP_SUBL:
2134
        case CC_OP_SUBQ:
2135
            func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
2136
            break;
2137

    
2138
            /* some jumps are easy to compute */
2139
        case CC_OP_ADDB:
2140
        case CC_OP_ADDW:
2141
        case CC_OP_ADDL:
2142
        case CC_OP_ADDQ:
2143

    
2144
        case CC_OP_ADCB:
2145
        case CC_OP_ADCW:
2146
        case CC_OP_ADCL:
2147
        case CC_OP_ADCQ:
2148

    
2149
        case CC_OP_SBBB:
2150
        case CC_OP_SBBW:
2151
        case CC_OP_SBBL:
2152
        case CC_OP_SBBQ:
2153

    
2154
        case CC_OP_LOGICB:
2155
        case CC_OP_LOGICW:
2156
        case CC_OP_LOGICL:
2157
        case CC_OP_LOGICQ:
2158

    
2159
        case CC_OP_INCB:
2160
        case CC_OP_INCW:
2161
        case CC_OP_INCL:
2162
        case CC_OP_INCQ:
2163

    
2164
        case CC_OP_DECB:
2165
        case CC_OP_DECW:
2166
        case CC_OP_DECL:
2167
        case CC_OP_DECQ:
2168

    
2169
        case CC_OP_SHLB:
2170
        case CC_OP_SHLW:
2171
        case CC_OP_SHLL:
2172
        case CC_OP_SHLQ:
2173

    
2174
        case CC_OP_SARB:
2175
        case CC_OP_SARW:
2176
        case CC_OP_SARL:
2177
        case CC_OP_SARQ:
2178
            switch(jcc_op) {
2179
            case JCC_Z:
2180
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2181
                break;
2182
            case JCC_S:
2183
                func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2184
                break;
2185
            default:
2186
                func = NULL;
2187
                break;
2188
            }
2189
            break;
2190
        default:
2191
            func = NULL;
2192
            break;
2193
        }
2194

    
2195
        if (s->cc_op != CC_OP_DYNAMIC) {
2196
            gen_op_set_cc_op(s->cc_op);
2197
            s->cc_op = CC_OP_DYNAMIC;
2198
        }
2199

    
2200
        if (!func) {
2201
            gen_setcc_slow[jcc_op]();
2202
            func = gen_op_jnz_T0_label;
2203
        }
2204

    
2205
        if (inv) {
2206
            tmp = val;
2207
            val = next_eip;
2208
            next_eip = tmp;
2209
        }
2210
        tb = s->tb;
2211

    
2212
        l1 = gen_new_label();
2213
        func(l1);
2214

    
2215
        gen_goto_tb(s, 0, next_eip);
2216

    
2217
        gen_set_label(l1);
2218
        gen_goto_tb(s, 1, val);
2219

    
2220
        s->is_jmp = 3;
2221
    } else {
2222

    
2223
        if (s->cc_op != CC_OP_DYNAMIC) {
2224
            gen_op_set_cc_op(s->cc_op);
2225
            s->cc_op = CC_OP_DYNAMIC;
2226
        }
2227
        gen_setcc_slow[jcc_op]();
2228
        if (inv) {
2229
            tmp = val;
2230
            val = next_eip;
2231
            next_eip = tmp;
2232
        }
2233
        l1 = gen_new_label();
2234
        l2 = gen_new_label();
2235
        gen_op_jnz_T0_label(l1);
2236
        gen_jmp_im(next_eip);
2237
        gen_op_jmp_label(l2);
2238
        gen_set_label(l1);
2239
        gen_jmp_im(val);
2240
        gen_set_label(l2);
2241
        gen_eob(s);
2242
    }
2243
}
2244

    
2245
static void gen_setcc(DisasContext *s, int b)
2246
{
2247
    int inv, jcc_op;
2248
    GenOpFunc *func;
2249

    
2250
    inv = b & 1;
2251
    jcc_op = (b >> 1) & 7;
2252
    switch(s->cc_op) {
2253
        /* we optimize the cmp/jcc case */
2254
    case CC_OP_SUBB:
2255
    case CC_OP_SUBW:
2256
    case CC_OP_SUBL:
2257
    case CC_OP_SUBQ:
2258
        func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
2259
        if (!func)
2260
            goto slow_jcc;
2261
        break;
2262

    
2263
        /* some jumps are easy to compute */
2264
    case CC_OP_ADDB:
2265
    case CC_OP_ADDW:
2266
    case CC_OP_ADDL:
2267
    case CC_OP_ADDQ:
2268

    
2269
    case CC_OP_LOGICB:
2270
    case CC_OP_LOGICW:
2271
    case CC_OP_LOGICL:
2272
    case CC_OP_LOGICQ:
2273

    
2274
    case CC_OP_INCB:
2275
    case CC_OP_INCW:
2276
    case CC_OP_INCL:
2277
    case CC_OP_INCQ:
2278

    
2279
    case CC_OP_DECB:
2280
    case CC_OP_DECW:
2281
    case CC_OP_DECL:
2282
    case CC_OP_DECQ:
2283

    
2284
    case CC_OP_SHLB:
2285
    case CC_OP_SHLW:
2286
    case CC_OP_SHLL:
2287
    case CC_OP_SHLQ:
2288
        switch(jcc_op) {
2289
        case JCC_Z:
2290
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2291
            break;
2292
        case JCC_S:
2293
            func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2294
            break;
2295
        default:
2296
            goto slow_jcc;
2297
        }
2298
        break;
2299
    default:
2300
    slow_jcc:
2301
        if (s->cc_op != CC_OP_DYNAMIC)
2302
            gen_op_set_cc_op(s->cc_op);
2303
        func = gen_setcc_slow[jcc_op];
2304
        break;
2305
    }
2306
    func();
2307
    if (inv) {
2308
        gen_op_xor_T0_1();
2309
    }
2310
}
2311

    
2312
/* move T0 to seg_reg and compute if the CPU state may change. Never
2313
   call this function with seg_reg == R_CS */
2314
static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2315
{
2316
    if (s->pe && !s->vm86) {
2317
        /* XXX: optimize by finding processor state dynamically */
2318
        if (s->cc_op != CC_OP_DYNAMIC)
2319
            gen_op_set_cc_op(s->cc_op);
2320
        gen_jmp_im(cur_eip);
2321
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2322
        tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2323
        /* abort translation because the addseg value may change or
2324
           because ss32 may change. For R_SS, translation must always
2325
           stop as a special handling must be done to disable hardware
2326
           interrupts for the next instruction */
2327
        if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2328
            s->is_jmp = 3;
2329
    } else {
2330
        gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
2331
        if (seg_reg == R_SS)
2332
            s->is_jmp = 3;
2333
    }
2334
}
2335

    
2336
static inline int svm_is_rep(int prefixes)
2337
{
2338
    return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2339
}
2340

    
2341
static inline int
2342
gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2343
                              uint32_t type, uint64_t param)
2344
{
2345
    if(!(s->flags & (INTERCEPT_SVM_MASK)))
2346
        /* no SVM activated */
2347
        return 0;
2348
    switch(type) {
2349
        /* CRx and DRx reads/writes */
2350
        case SVM_EXIT_READ_CR0 ... SVM_EXIT_EXCP_BASE - 1:
2351
            if (s->cc_op != CC_OP_DYNAMIC) {
2352
                gen_op_set_cc_op(s->cc_op);
2353
            }
2354
            gen_jmp_im(pc_start - s->cs_base);
2355
            tcg_gen_helper_0_2(helper_svm_check_intercept_param, 
2356
                               tcg_const_i32(type), tcg_const_i64(param));
2357
            /* this is a special case as we do not know if the interception occurs
2358
               so we assume there was none */
2359
            return 0;
2360
        case SVM_EXIT_MSR:
2361
            if(s->flags & (1ULL << INTERCEPT_MSR_PROT)) {
2362
                if (s->cc_op != CC_OP_DYNAMIC) {
2363
                    gen_op_set_cc_op(s->cc_op);
2364
                }
2365
                gen_jmp_im(pc_start - s->cs_base);
2366
                tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2367
                                   tcg_const_i32(type), tcg_const_i64(param));
2368
                /* this is a special case as we do not know if the interception occurs
2369
                   so we assume there was none */
2370
                return 0;
2371
            }
2372
            break;
2373
        default:
2374
            if(s->flags & (1ULL << ((type - SVM_EXIT_INTR) + INTERCEPT_INTR))) {
2375
                if (s->cc_op != CC_OP_DYNAMIC) {
2376
                    gen_op_set_cc_op(s->cc_op);
2377
                }
2378
                gen_jmp_im(pc_start - s->cs_base);
2379
                tcg_gen_helper_0_2(helper_vmexit,
2380
                                   tcg_const_i32(type), tcg_const_i64(param));
2381
                /* we can optimize this one so TBs don't get longer
2382
                   than up to vmexit */
2383
                gen_eob(s);
2384
                return 1;
2385
            }
2386
    }
2387
    return 0;
2388
}
2389

    
2390
static inline int
2391
gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2392
{
2393
    return gen_svm_check_intercept_param(s, pc_start, type, 0);
2394
}
2395

    
2396
static inline void gen_stack_update(DisasContext *s, int addend)
2397
{
2398
#ifdef TARGET_X86_64
2399
    if (CODE64(s)) {
2400
        gen_op_addq_ESP_im(addend);
2401
    } else
2402
#endif
2403
    if (s->ss32) {
2404
        gen_op_addl_ESP_im(addend);
2405
    } else {
2406
        gen_op_addw_ESP_im(addend);
2407
    }
2408
}
2409

    
2410
/* generate a push. It depends on ss32, addseg and dflag */
2411
static void gen_push_T0(DisasContext *s)
2412
{
2413
#ifdef TARGET_X86_64
2414
    if (CODE64(s)) {
2415
        gen_op_movq_A0_reg(R_ESP);
2416
        if (s->dflag) {
2417
            gen_op_addq_A0_im(-8);
2418
            gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2419
        } else {
2420
            gen_op_addq_A0_im(-2);
2421
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2422
        }
2423
        gen_op_mov_reg_A0(2, R_ESP);
2424
    } else
2425
#endif
2426
    {
2427
        gen_op_movl_A0_reg(R_ESP);
2428
        if (!s->dflag)
2429
            gen_op_addl_A0_im(-2);
2430
        else
2431
            gen_op_addl_A0_im(-4);
2432
        if (s->ss32) {
2433
            if (s->addseg) {
2434
                gen_op_movl_T1_A0();
2435
                gen_op_addl_A0_seg(R_SS);
2436
            }
2437
        } else {
2438
            gen_op_andl_A0_ffff();
2439
            gen_op_movl_T1_A0();
2440
            gen_op_addl_A0_seg(R_SS);
2441
        }
2442
        gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2443
        if (s->ss32 && !s->addseg)
2444
            gen_op_mov_reg_A0(1, R_ESP);
2445
        else
2446
            gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2447
    }
2448
}
2449

    
2450
/* generate a push. It depends on ss32, addseg and dflag */
2451
/* slower version for T1, only used for call Ev */
2452
static void gen_push_T1(DisasContext *s)
2453
{
2454
#ifdef TARGET_X86_64
2455
    if (CODE64(s)) {
2456
        gen_op_movq_A0_reg(R_ESP);
2457
        if (s->dflag) {
2458
            gen_op_addq_A0_im(-8);
2459
            gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2460
        } else {
2461
            gen_op_addq_A0_im(-2);
2462
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
2463
        }
2464
        gen_op_mov_reg_A0(2, R_ESP);
2465
    } else
2466
#endif
2467
    {
2468
        gen_op_movl_A0_reg(R_ESP);
2469
        if (!s->dflag)
2470
            gen_op_addl_A0_im(-2);
2471
        else
2472
            gen_op_addl_A0_im(-4);
2473
        if (s->ss32) {
2474
            if (s->addseg) {
2475
                gen_op_addl_A0_seg(R_SS);
2476
            }
2477
        } else {
2478
            gen_op_andl_A0_ffff();
2479
            gen_op_addl_A0_seg(R_SS);
2480
        }
2481
        gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2482

    
2483
        if (s->ss32 && !s->addseg)
2484
            gen_op_mov_reg_A0(1, R_ESP);
2485
        else
2486
            gen_stack_update(s, (-2) << s->dflag);
2487
    }
2488
}
2489

    
2490
/* two step pop is necessary for precise exceptions */
2491
static void gen_pop_T0(DisasContext *s)
2492
{
2493
#ifdef TARGET_X86_64
2494
    if (CODE64(s)) {
2495
        gen_op_movq_A0_reg(R_ESP);
2496
        gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2497
    } else
2498
#endif
2499
    {
2500
        gen_op_movl_A0_reg(R_ESP);
2501
        if (s->ss32) {
2502
            if (s->addseg)
2503
                gen_op_addl_A0_seg(R_SS);
2504
        } else {
2505
            gen_op_andl_A0_ffff();
2506
            gen_op_addl_A0_seg(R_SS);
2507
        }
2508
        gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2509
    }
2510
}
2511

    
2512
static void gen_pop_update(DisasContext *s)
2513
{
2514
#ifdef TARGET_X86_64
2515
    if (CODE64(s) && s->dflag) {
2516
        gen_stack_update(s, 8);
2517
    } else
2518
#endif
2519
    {
2520
        gen_stack_update(s, 2 << s->dflag);
2521
    }
2522
}
2523

    
2524
static void gen_stack_A0(DisasContext *s)
2525
{
2526
    gen_op_movl_A0_reg(R_ESP);
2527
    if (!s->ss32)
2528
        gen_op_andl_A0_ffff();
2529
    gen_op_movl_T1_A0();
2530
    if (s->addseg)
2531
        gen_op_addl_A0_seg(R_SS);
2532
}
2533

    
2534
/* NOTE: wrap around in 16 bit not fully handled */
2535
static void gen_pusha(DisasContext *s)
2536
{
2537
    int i;
2538
    gen_op_movl_A0_reg(R_ESP);
2539
    gen_op_addl_A0_im(-16 <<  s->dflag);
2540
    if (!s->ss32)
2541
        gen_op_andl_A0_ffff();
2542
    gen_op_movl_T1_A0();
2543
    if (s->addseg)
2544
        gen_op_addl_A0_seg(R_SS);
2545
    for(i = 0;i < 8; i++) {
2546
        gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2547
        gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2548
        gen_op_addl_A0_im(2 <<  s->dflag);
2549
    }
2550
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2551
}
2552

    
2553
/* NOTE: wrap around in 16 bit not fully handled */
2554
static void gen_popa(DisasContext *s)
2555
{
2556
    int i;
2557
    gen_op_movl_A0_reg(R_ESP);
2558
    if (!s->ss32)
2559
        gen_op_andl_A0_ffff();
2560
    gen_op_movl_T1_A0();
2561
    gen_op_addl_T1_im(16 <<  s->dflag);
2562
    if (s->addseg)
2563
        gen_op_addl_A0_seg(R_SS);
2564
    for(i = 0;i < 8; i++) {
2565
        /* ESP is not reloaded */
2566
        if (i != 3) {
2567
            gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2568
            gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2569
        }
2570
        gen_op_addl_A0_im(2 <<  s->dflag);
2571
    }
2572
    gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2573
}
2574

    
2575
static void gen_enter(DisasContext *s, int esp_addend, int level)
2576
{
2577
    int ot, opsize;
2578

    
2579
    level &= 0x1f;
2580
#ifdef TARGET_X86_64
2581
    if (CODE64(s)) {
2582
        ot = s->dflag ? OT_QUAD : OT_WORD;
2583
        opsize = 1 << ot;
2584

    
2585
        gen_op_movl_A0_reg(R_ESP);
2586
        gen_op_addq_A0_im(-opsize);
2587
        gen_op_movl_T1_A0();
2588

    
2589
        /* push bp */
2590
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2591
        gen_op_st_T0_A0(ot + s->mem_index);
2592
        if (level) {
2593
            /* XXX: must save state */
2594
            tcg_gen_helper_0_3(helper_enter64_level,
2595
                               tcg_const_i32(level),
2596
                               tcg_const_i32((ot == OT_QUAD)),
2597
                               cpu_T[1]);
2598
        }
2599
        gen_op_mov_reg_T1(ot, R_EBP);
2600
        gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2601
        gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2602
    } else
2603
#endif
2604
    {
2605
        ot = s->dflag + OT_WORD;
2606
        opsize = 2 << s->dflag;
2607

    
2608
        gen_op_movl_A0_reg(R_ESP);
2609
        gen_op_addl_A0_im(-opsize);
2610
        if (!s->ss32)
2611
            gen_op_andl_A0_ffff();
2612
        gen_op_movl_T1_A0();
2613
        if (s->addseg)
2614
            gen_op_addl_A0_seg(R_SS);
2615
        /* push bp */
2616
        gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2617
        gen_op_st_T0_A0(ot + s->mem_index);
2618
        if (level) {
2619
            /* XXX: must save state */
2620
            tcg_gen_helper_0_3(helper_enter_level,
2621
                               tcg_const_i32(level),
2622
                               tcg_const_i32(s->dflag),
2623
                               cpu_T[1]);
2624
        }
2625
        gen_op_mov_reg_T1(ot, R_EBP);
2626
        gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2627
        gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2628
    }
2629
}
2630

    
2631
static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2632
{
2633
    if (s->cc_op != CC_OP_DYNAMIC)
2634
        gen_op_set_cc_op(s->cc_op);
2635
    gen_jmp_im(cur_eip);
2636
    tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
2637
    s->is_jmp = 3;
2638
}
2639

    
2640
/* an interrupt is different from an exception because of the
2641
   privilege checks */
2642
static void gen_interrupt(DisasContext *s, int intno,
2643
                          target_ulong cur_eip, target_ulong next_eip)
2644
{
2645
    if (s->cc_op != CC_OP_DYNAMIC)
2646
        gen_op_set_cc_op(s->cc_op);
2647
    gen_jmp_im(cur_eip);
2648
    tcg_gen_helper_0_2(helper_raise_interrupt, 
2649
                       tcg_const_i32(intno), 
2650
                       tcg_const_i32(next_eip - cur_eip));
2651
    s->is_jmp = 3;
2652
}
2653

    
2654
static void gen_debug(DisasContext *s, target_ulong cur_eip)
2655
{
2656
    if (s->cc_op != CC_OP_DYNAMIC)
2657
        gen_op_set_cc_op(s->cc_op);
2658
    gen_jmp_im(cur_eip);
2659
    tcg_gen_helper_0_0(helper_debug);
2660
    s->is_jmp = 3;
2661
}
2662

    
2663
/* generate a generic end of block. Trace exception is also generated
2664
   if needed */
2665
static void gen_eob(DisasContext *s)
2666
{
2667
    if (s->cc_op != CC_OP_DYNAMIC)
2668
        gen_op_set_cc_op(s->cc_op);
2669
    if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2670
        tcg_gen_helper_0_0(helper_reset_inhibit_irq);
2671
    }
2672
    if (s->singlestep_enabled) {
2673
        tcg_gen_helper_0_0(helper_debug);
2674
    } else if (s->tf) {
2675
        tcg_gen_helper_0_0(helper_single_step);
2676
    } else {
2677
        tcg_gen_exit_tb(0);
2678
    }
2679
    s->is_jmp = 3;
2680
}
2681

    
2682
/* generate a jump to eip. No segment change must happen before as a
2683
   direct call to the next block may occur */
2684
static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2685
{
2686
    if (s->jmp_opt) {
2687
        if (s->cc_op != CC_OP_DYNAMIC) {
2688
            gen_op_set_cc_op(s->cc_op);
2689
            s->cc_op = CC_OP_DYNAMIC;
2690
        }
2691
        gen_goto_tb(s, tb_num, eip);
2692
        s->is_jmp = 3;
2693
    } else {
2694
        gen_jmp_im(eip);
2695
        gen_eob(s);
2696
    }
2697
}
2698

    
2699
static void gen_jmp(DisasContext *s, target_ulong eip)
2700
{
2701
    gen_jmp_tb(s, eip, 0);
2702
}
2703

    
2704
static inline void gen_ldq_env_A0(int idx, int offset)
2705
{
2706
    int mem_index = (idx >> 2) - 1;
2707
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2708
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
2709
}
2710

    
2711
static inline void gen_stq_env_A0(int idx, int offset)
2712
{
2713
    int mem_index = (idx >> 2) - 1;
2714
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
2715
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2716
}
2717

    
2718
static inline void gen_ldo_env_A0(int idx, int offset)
2719
{
2720
    int mem_index = (idx >> 2) - 1;
2721
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
2722
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2723
    tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2724
    tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2725
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2726
}
2727

    
2728
static inline void gen_sto_env_A0(int idx, int offset)
2729
{
2730
    int mem_index = (idx >> 2) - 1;
2731
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
2732
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
2733
    tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
2734
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
2735
    tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
2736
}
2737

    
2738
static inline void gen_op_movo(int d_offset, int s_offset)
2739
{
2740
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2741
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2742
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
2743
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
2744
}
2745

    
2746
static inline void gen_op_movq(int d_offset, int s_offset)
2747
{
2748
    tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
2749
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2750
}
2751

    
2752
static inline void gen_op_movl(int d_offset, int s_offset)
2753
{
2754
    tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
2755
    tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
2756
}
2757

    
2758
static inline void gen_op_movq_env_0(int d_offset)
2759
{
2760
    tcg_gen_movi_i64(cpu_tmp1_i64, 0);
2761
    tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
2762
}
2763

    
2764
#define SSE_SPECIAL ((void *)1)
2765
#define SSE_DUMMY ((void *)2)
2766

    
2767
#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
2768
#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
2769
                     helper_ ## x ## ss, helper_ ## x ## sd, }
2770

    
2771
static void *sse_op_table1[256][4] = {
2772
    /* 3DNow! extensions */
2773
    [0x0e] = { SSE_DUMMY }, /* femms */
2774
    [0x0f] = { SSE_DUMMY }, /* pf... */
2775
    /* pure SSE operations */
2776
    [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2777
    [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2778
    [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2779
    [0x13] = { SSE_SPECIAL, SSE_SPECIAL },  /* movlps, movlpd */
2780
    [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
2781
    [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
2782
    [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd, movshdup */
2783
    [0x17] = { SSE_SPECIAL, SSE_SPECIAL },  /* movhps, movhpd */
2784

    
2785
    [0x28] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2786
    [0x29] = { SSE_SPECIAL, SSE_SPECIAL },  /* movaps, movapd */
2787
    [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2788
    [0x2b] = { SSE_SPECIAL, SSE_SPECIAL },  /* movntps, movntpd */
2789
    [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2790
    [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2791
    [0x2e] = { helper_ucomiss, helper_ucomisd },
2792
    [0x2f] = { helper_comiss, helper_comisd },
2793
    [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2794
    [0x51] = SSE_FOP(sqrt),
2795
    [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
2796
    [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
2797
    [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
2798
    [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
2799
    [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
2800
    [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
2801
    [0x58] = SSE_FOP(add),
2802
    [0x59] = SSE_FOP(mul),
2803
    [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
2804
               helper_cvtss2sd, helper_cvtsd2ss },
2805
    [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
2806
    [0x5c] = SSE_FOP(sub),
2807
    [0x5d] = SSE_FOP(min),
2808
    [0x5e] = SSE_FOP(div),
2809
    [0x5f] = SSE_FOP(max),
2810

    
2811
    [0xc2] = SSE_FOP(cmpeq),
2812
    [0xc6] = { helper_shufps, helper_shufpd },
2813

    
2814
    /* MMX ops and their SSE extensions */
2815
    [0x60] = MMX_OP2(punpcklbw),
2816
    [0x61] = MMX_OP2(punpcklwd),
2817
    [0x62] = MMX_OP2(punpckldq),
2818
    [0x63] = MMX_OP2(packsswb),
2819
    [0x64] = MMX_OP2(pcmpgtb),
2820
    [0x65] = MMX_OP2(pcmpgtw),
2821
    [0x66] = MMX_OP2(pcmpgtl),
2822
    [0x67] = MMX_OP2(packuswb),
2823
    [0x68] = MMX_OP2(punpckhbw),
2824
    [0x69] = MMX_OP2(punpckhwd),
2825
    [0x6a] = MMX_OP2(punpckhdq),
2826
    [0x6b] = MMX_OP2(packssdw),
2827
    [0x6c] = { NULL, helper_punpcklqdq_xmm },
2828
    [0x6d] = { NULL, helper_punpckhqdq_xmm },
2829
    [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2830
    [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2831
    [0x70] = { helper_pshufw_mmx,
2832
               helper_pshufd_xmm,
2833
               helper_pshufhw_xmm,
2834
               helper_pshuflw_xmm },
2835
    [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2836
    [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2837
    [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2838
    [0x74] = MMX_OP2(pcmpeqb),
2839
    [0x75] = MMX_OP2(pcmpeqw),
2840
    [0x76] = MMX_OP2(pcmpeql),
2841
    [0x77] = { SSE_DUMMY }, /* emms */
2842
    [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
2843
    [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
2844
    [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2845
    [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2846
    [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2847
    [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2848
    [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
2849
    [0xd1] = MMX_OP2(psrlw),
2850
    [0xd2] = MMX_OP2(psrld),
2851
    [0xd3] = MMX_OP2(psrlq),
2852
    [0xd4] = MMX_OP2(paddq),
2853
    [0xd5] = MMX_OP2(pmullw),
2854
    [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2855
    [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2856
    [0xd8] = MMX_OP2(psubusb),
2857
    [0xd9] = MMX_OP2(psubusw),
2858
    [0xda] = MMX_OP2(pminub),
2859
    [0xdb] = MMX_OP2(pand),
2860
    [0xdc] = MMX_OP2(paddusb),
2861
    [0xdd] = MMX_OP2(paddusw),
2862
    [0xde] = MMX_OP2(pmaxub),
2863
    [0xdf] = MMX_OP2(pandn),
2864
    [0xe0] = MMX_OP2(pavgb),
2865
    [0xe1] = MMX_OP2(psraw),
2866
    [0xe2] = MMX_OP2(psrad),
2867
    [0xe3] = MMX_OP2(pavgw),
2868
    [0xe4] = MMX_OP2(pmulhuw),
2869
    [0xe5] = MMX_OP2(pmulhw),
2870
    [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
2871
    [0xe7] = { SSE_SPECIAL , SSE_SPECIAL },  /* movntq, movntq */
2872
    [0xe8] = MMX_OP2(psubsb),
2873
    [0xe9] = MMX_OP2(psubsw),
2874
    [0xea] = MMX_OP2(pminsw),
2875
    [0xeb] = MMX_OP2(por),
2876
    [0xec] = MMX_OP2(paddsb),
2877
    [0xed] = MMX_OP2(paddsw),
2878
    [0xee] = MMX_OP2(pmaxsw),
2879
    [0xef] = MMX_OP2(pxor),
2880
    [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2881
    [0xf1] = MMX_OP2(psllw),
2882
    [0xf2] = MMX_OP2(pslld),
2883
    [0xf3] = MMX_OP2(psllq),
2884
    [0xf4] = MMX_OP2(pmuludq),
2885
    [0xf5] = MMX_OP2(pmaddwd),
2886
    [0xf6] = MMX_OP2(psadbw),
2887
    [0xf7] = MMX_OP2(maskmov),
2888
    [0xf8] = MMX_OP2(psubb),
2889
    [0xf9] = MMX_OP2(psubw),
2890
    [0xfa] = MMX_OP2(psubl),
2891
    [0xfb] = MMX_OP2(psubq),
2892
    [0xfc] = MMX_OP2(paddb),
2893
    [0xfd] = MMX_OP2(paddw),
2894
    [0xfe] = MMX_OP2(paddl),
2895
};
2896

    
2897
static void *sse_op_table2[3 * 8][2] = {
2898
    [0 + 2] = MMX_OP2(psrlw),
2899
    [0 + 4] = MMX_OP2(psraw),
2900
    [0 + 6] = MMX_OP2(psllw),
2901
    [8 + 2] = MMX_OP2(psrld),
2902
    [8 + 4] = MMX_OP2(psrad),
2903
    [8 + 6] = MMX_OP2(pslld),
2904
    [16 + 2] = MMX_OP2(psrlq),
2905
    [16 + 3] = { NULL, helper_psrldq_xmm },
2906
    [16 + 6] = MMX_OP2(psllq),
2907
    [16 + 7] = { NULL, helper_pslldq_xmm },
2908
};
2909

    
2910
static void *sse_op_table3[4 * 3] = {
2911
    helper_cvtsi2ss,
2912
    helper_cvtsi2sd,
2913
    X86_64_ONLY(helper_cvtsq2ss),
2914
    X86_64_ONLY(helper_cvtsq2sd),
2915

    
2916
    helper_cvttss2si,
2917
    helper_cvttsd2si,
2918
    X86_64_ONLY(helper_cvttss2sq),
2919
    X86_64_ONLY(helper_cvttsd2sq),
2920

    
2921
    helper_cvtss2si,
2922
    helper_cvtsd2si,
2923
    X86_64_ONLY(helper_cvtss2sq),
2924
    X86_64_ONLY(helper_cvtsd2sq),
2925
};
2926

    
2927
static void *sse_op_table4[8][4] = {
2928
    SSE_FOP(cmpeq),
2929
    SSE_FOP(cmplt),
2930
    SSE_FOP(cmple),
2931
    SSE_FOP(cmpunord),
2932
    SSE_FOP(cmpneq),
2933
    SSE_FOP(cmpnlt),
2934
    SSE_FOP(cmpnle),
2935
    SSE_FOP(cmpord),
2936
};
2937

    
2938
static void *sse_op_table5[256] = {
2939
    [0x0c] = helper_pi2fw,
2940
    [0x0d] = helper_pi2fd,
2941
    [0x1c] = helper_pf2iw,
2942
    [0x1d] = helper_pf2id,
2943
    [0x8a] = helper_pfnacc,
2944
    [0x8e] = helper_pfpnacc,
2945
    [0x90] = helper_pfcmpge,
2946
    [0x94] = helper_pfmin,
2947
    [0x96] = helper_pfrcp,
2948
    [0x97] = helper_pfrsqrt,
2949
    [0x9a] = helper_pfsub,
2950
    [0x9e] = helper_pfadd,
2951
    [0xa0] = helper_pfcmpgt,
2952
    [0xa4] = helper_pfmax,
2953
    [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
2954
    [0xa7] = helper_movq, /* pfrsqit1 */
2955
    [0xaa] = helper_pfsubr,
2956
    [0xae] = helper_pfacc,
2957
    [0xb0] = helper_pfcmpeq,
2958
    [0xb4] = helper_pfmul,
2959
    [0xb6] = helper_movq, /* pfrcpit2 */
2960
    [0xb7] = helper_pmulhrw_mmx,
2961
    [0xbb] = helper_pswapd,
2962
    [0xbf] = helper_pavgb_mmx /* pavgusb */
2963
};
2964

    
2965
static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2966
{
2967
    int b1, op1_offset, op2_offset, is_xmm, val, ot;
2968
    int modrm, mod, rm, reg, reg_addr, offset_addr;
2969
    void *sse_op2;
2970

    
2971
    b &= 0xff;
2972
    if (s->prefix & PREFIX_DATA)
2973
        b1 = 1;
2974
    else if (s->prefix & PREFIX_REPZ)
2975
        b1 = 2;
2976
    else if (s->prefix & PREFIX_REPNZ)
2977
        b1 = 3;
2978
    else
2979
        b1 = 0;
2980
    sse_op2 = sse_op_table1[b][b1];
2981
    if (!sse_op2)
2982
        goto illegal_op;
2983
    if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
2984
        is_xmm = 1;
2985
    } else {
2986
        if (b1 == 0) {
2987
            /* MMX case */
2988
            is_xmm = 0;
2989
        } else {
2990
            is_xmm = 1;
2991
        }
2992
    }
2993
    /* simple MMX/SSE operation */
2994
    if (s->flags & HF_TS_MASK) {
2995
        gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2996
        return;
2997
    }
2998
    if (s->flags & HF_EM_MASK) {
2999
    illegal_op:
3000
        gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3001
        return;
3002
    }
3003
    if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
3004
        goto illegal_op;
3005
    if (b == 0x0e) {
3006
        if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3007
            goto illegal_op;
3008
        /* femms */
3009
        tcg_gen_helper_0_0(helper_emms);
3010
        return;
3011
    }
3012
    if (b == 0x77) {
3013
        /* emms */
3014
        tcg_gen_helper_0_0(helper_emms);
3015
        return;
3016
    }
3017
    /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3018
       the static cpu state) */
3019
    if (!is_xmm) {
3020
        tcg_gen_helper_0_0(helper_enter_mmx);
3021
    }
3022

    
3023
    modrm = ldub_code(s->pc++);
3024
    reg = ((modrm >> 3) & 7);
3025
    if (is_xmm)
3026
        reg |= rex_r;
3027
    mod = (modrm >> 6) & 3;
3028
    if (sse_op2 == SSE_SPECIAL) {
3029
        b |= (b1 << 8);
3030
        switch(b) {
3031
        case 0x0e7: /* movntq */
3032
            if (mod == 3)
3033
                goto illegal_op;
3034
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3035
            gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3036
            break;
3037
        case 0x1e7: /* movntdq */
3038
        case 0x02b: /* movntps */
3039
        case 0x12b: /* movntps */
3040
        case 0x3f0: /* lddqu */
3041
            if (mod == 3)
3042
                goto illegal_op;
3043
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3044
            gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3045
            break;
3046
        case 0x6e: /* movd mm, ea */
3047
#ifdef TARGET_X86_64
3048
            if (s->dflag == 2) {
3049
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3050
                tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3051
            } else
3052
#endif
3053
            {
3054
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3055
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3056
                                 offsetof(CPUX86State,fpregs[reg].mmx));
3057
                tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
3058
            }
3059
            break;
3060
        case 0x16e: /* movd xmm, ea */
3061
#ifdef TARGET_X86_64
3062
            if (s->dflag == 2) {
3063
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3064
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3065
                                 offsetof(CPUX86State,xmm_regs[reg]));
3066
                tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3067
            } else
3068
#endif
3069
            {
3070
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3071
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3072
                                 offsetof(CPUX86State,xmm_regs[reg]));
3073
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3074
                tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3075
            }
3076
            break;
3077
        case 0x6f: /* movq mm, ea */
3078
            if (mod != 3) {
3079
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3080
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3081
            } else {
3082
                rm = (modrm & 7);
3083
                tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3084
                               offsetof(CPUX86State,fpregs[rm].mmx));
3085
                tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3086
                               offsetof(CPUX86State,fpregs[reg].mmx));
3087
            }
3088
            break;
3089
        case 0x010: /* movups */
3090
        case 0x110: /* movupd */
3091
        case 0x028: /* movaps */
3092
        case 0x128: /* movapd */
3093
        case 0x16f: /* movdqa xmm, ea */
3094
        case 0x26f: /* movdqu xmm, ea */
3095
            if (mod != 3) {
3096
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3097
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3098
            } else {
3099
                rm = (modrm & 7) | REX_B(s);
3100
                gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3101
                            offsetof(CPUX86State,xmm_regs[rm]));
3102
            }
3103
            break;
3104
        case 0x210: /* movss xmm, ea */
3105
            if (mod != 3) {
3106
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3107
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3108
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3109
                gen_op_movl_T0_0();
3110
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3111
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3112
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3113
            } else {
3114
                rm = (modrm & 7) | REX_B(s);
3115
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3116
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3117
            }
3118
            break;
3119
        case 0x310: /* movsd xmm, ea */
3120
            if (mod != 3) {
3121
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3122
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3123
                gen_op_movl_T0_0();
3124
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3125
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3126
            } else {
3127
                rm = (modrm & 7) | REX_B(s);
3128
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3129
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3130
            }
3131
            break;
3132
        case 0x012: /* movlps */
3133
        case 0x112: /* movlpd */
3134
            if (mod != 3) {
3135
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3136
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3137
            } else {
3138
                /* movhlps */
3139
                rm = (modrm & 7) | REX_B(s);
3140
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3141
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3142
            }
3143
            break;
3144
        case 0x212: /* movsldup */
3145
            if (mod != 3) {
3146
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3147
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3148
            } else {
3149
                rm = (modrm & 7) | REX_B(s);
3150
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3151
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3152
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3153
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3154
            }
3155
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3156
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3157
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3158
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3159
            break;
3160
        case 0x312: /* movddup */
3161
            if (mod != 3) {
3162
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3163
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3164
            } else {
3165
                rm = (modrm & 7) | REX_B(s);
3166
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3167
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3168
            }
3169
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3170
                        offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3171
            break;
3172
        case 0x016: /* movhps */
3173
        case 0x116: /* movhpd */
3174
            if (mod != 3) {
3175
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3176
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3177
            } else {
3178
                /* movlhps */
3179
                rm = (modrm & 7) | REX_B(s);
3180
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3181
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3182
            }
3183
            break;
3184
        case 0x216: /* movshdup */
3185
            if (mod != 3) {
3186
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3187
                gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3188
            } else {
3189
                rm = (modrm & 7) | REX_B(s);
3190
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3191
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3192
                gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3193
                            offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3194
            }
3195
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3196
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3197
            gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3198
                        offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3199
            break;
3200
        case 0x7e: /* movd ea, mm */
3201
#ifdef TARGET_X86_64
3202
            if (s->dflag == 2) {
3203
                tcg_gen_ld_i64(cpu_T[0], cpu_env, 
3204
                               offsetof(CPUX86State,fpregs[reg].mmx));
3205
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3206
            } else
3207
#endif
3208
            {
3209
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
3210
                                 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3211
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3212
            }
3213
            break;
3214
        case 0x17e: /* movd ea, xmm */
3215
#ifdef TARGET_X86_64
3216
            if (s->dflag == 2) {
3217
                tcg_gen_ld_i64(cpu_T[0], cpu_env, 
3218
                               offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3219
                gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3220
            } else
3221
#endif
3222
            {
3223
                tcg_gen_ld32u_tl(cpu_T[0], cpu_env, 
3224
                                 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3225
                gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3226
            }
3227
            break;
3228
        case 0x27e: /* movq xmm, ea */
3229
            if (mod != 3) {
3230
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3231
                gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3232
            } else {
3233
                rm = (modrm & 7) | REX_B(s);
3234
                gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3235
                            offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3236
            }
3237
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3238
            break;
3239
        case 0x7f: /* movq ea, mm */
3240
            if (mod != 3) {
3241
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3242
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3243
            } else {
3244
                rm = (modrm & 7);
3245
                gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3246
                            offsetof(CPUX86State,fpregs[reg].mmx));
3247
            }
3248
            break;
3249
        case 0x011: /* movups */
3250
        case 0x111: /* movupd */
3251
        case 0x029: /* movaps */
3252
        case 0x129: /* movapd */
3253
        case 0x17f: /* movdqa ea, xmm */
3254
        case 0x27f: /* movdqu ea, xmm */
3255
            if (mod != 3) {
3256
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3257
                gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3258
            } else {
3259
                rm = (modrm & 7) | REX_B(s);
3260
                gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3261
                            offsetof(CPUX86State,xmm_regs[reg]));
3262
            }
3263
            break;
3264
        case 0x211: /* movss ea, xmm */
3265
            if (mod != 3) {
3266
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3267
                gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3268
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
3269
            } else {
3270
                rm = (modrm & 7) | REX_B(s);
3271
                gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3272
                            offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3273
            }
3274
            break;
3275
        case 0x311: /* movsd ea, xmm */
3276
            if (mod != 3) {
3277
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3278
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3279
            } else {
3280
                rm = (modrm & 7) | REX_B(s);
3281
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3282
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3283
            }
3284
            break;
3285
        case 0x013: /* movlps */
3286
        case 0x113: /* movlpd */
3287
            if (mod != 3) {
3288
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3289
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3290
            } else {
3291
                goto illegal_op;
3292
            }
3293
            break;
3294
        case 0x017: /* movhps */
3295
        case 0x117: /* movhpd */
3296
            if (mod != 3) {
3297
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3298
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3299
            } else {
3300
                goto illegal_op;
3301
            }
3302
            break;
3303
        case 0x71: /* shift mm, im */
3304
        case 0x72:
3305
        case 0x73:
3306
        case 0x171: /* shift xmm, im */
3307
        case 0x172:
3308
        case 0x173:
3309
            val = ldub_code(s->pc++);
3310
            if (is_xmm) {
3311
                gen_op_movl_T0_im(val);
3312
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3313
                gen_op_movl_T0_0();
3314
                gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3315
                op1_offset = offsetof(CPUX86State,xmm_t0);
3316
            } else {
3317
                gen_op_movl_T0_im(val);
3318
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3319
                gen_op_movl_T0_0();
3320
                gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3321
                op1_offset = offsetof(CPUX86State,mmx_t0);
3322
            }
3323
            sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3324
            if (!sse_op2)
3325
                goto illegal_op;
3326
            if (is_xmm) {
3327
                rm = (modrm & 7) | REX_B(s);
3328
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3329
            } else {
3330
                rm = (modrm & 7);
3331
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3332
            }
3333
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3334
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3335
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3336
            break;
3337
        case 0x050: /* movmskps */
3338
            rm = (modrm & 7) | REX_B(s);
3339
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3340
                             offsetof(CPUX86State,xmm_regs[rm]));
3341
            tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3342
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3343
            gen_op_mov_reg_T0(OT_LONG, reg);
3344
            break;
3345
        case 0x150: /* movmskpd */
3346
            rm = (modrm & 7) | REX_B(s);
3347
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 
3348
                             offsetof(CPUX86State,xmm_regs[rm]));
3349
            tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3350
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3351
            gen_op_mov_reg_T0(OT_LONG, reg);
3352
            break;
3353
        case 0x02a: /* cvtpi2ps */
3354
        case 0x12a: /* cvtpi2pd */
3355
            tcg_gen_helper_0_0(helper_enter_mmx);
3356
            if (mod != 3) {
3357
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3358
                op2_offset = offsetof(CPUX86State,mmx_t0);
3359
                gen_ldq_env_A0(s->mem_index, op2_offset);
3360
            } else {
3361
                rm = (modrm & 7);
3362
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3363
            }
3364
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3365
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3366
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3367
            switch(b >> 8) {
3368
            case 0x0:
3369
                tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3370
                break;
3371
            default:
3372
            case 0x1:
3373
                tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3374
                break;
3375
            }
3376
            break;
3377
        case 0x22a: /* cvtsi2ss */
3378
        case 0x32a: /* cvtsi2sd */
3379
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3380
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3381
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3382
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3383
            sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3384
            tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3385
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3386
            break;
3387
        case 0x02c: /* cvttps2pi */
3388
        case 0x12c: /* cvttpd2pi */
3389
        case 0x02d: /* cvtps2pi */
3390
        case 0x12d: /* cvtpd2pi */
3391
            tcg_gen_helper_0_0(helper_enter_mmx);
3392
            if (mod != 3) {
3393
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3394
                op2_offset = offsetof(CPUX86State,xmm_t0);
3395
                gen_ldo_env_A0(s->mem_index, op2_offset);
3396
            } else {
3397
                rm = (modrm & 7) | REX_B(s);
3398
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3399
            }
3400
            op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3401
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3402
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3403
            switch(b) {
3404
            case 0x02c:
3405
                tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3406
                break;
3407
            case 0x12c:
3408
                tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3409
                break;
3410
            case 0x02d:
3411
                tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3412
                break;
3413
            case 0x12d:
3414
                tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3415
                break;
3416
            }
3417
            break;
3418
        case 0x22c: /* cvttss2si */
3419
        case 0x32c: /* cvttsd2si */
3420
        case 0x22d: /* cvtss2si */
3421
        case 0x32d: /* cvtsd2si */
3422
            ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3423
            if (mod != 3) {
3424
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3425
                if ((b >> 8) & 1) {
3426
                    gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3427
                } else {
3428
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3429
                    gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3430
                }
3431
                op2_offset = offsetof(CPUX86State,xmm_t0);
3432
            } else {
3433
                rm = (modrm & 7) | REX_B(s);
3434
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3435
            }
3436
            sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3437
                                    (b & 1) * 4];
3438
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3439
            if (ot == OT_LONG) {
3440
                tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
3441
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3442
            } else {
3443
                tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3444
            }
3445
            gen_op_mov_reg_T0(ot, reg);
3446
            break;
3447
        case 0xc4: /* pinsrw */
3448
        case 0x1c4:
3449
            s->rip_offset = 1;
3450
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3451
            val = ldub_code(s->pc++);
3452
            if (b1) {
3453
                val &= 7;
3454
                tcg_gen_st16_tl(cpu_T[0], cpu_env,
3455
                                offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3456
            } else {
3457
                val &= 3;
3458
                tcg_gen_st16_tl(cpu_T[0], cpu_env,
3459
                                offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3460
            }
3461
            break;
3462
        case 0xc5: /* pextrw */
3463
        case 0x1c5:
3464
            if (mod != 3)
3465
                goto illegal_op;
3466
            val = ldub_code(s->pc++);
3467
            if (b1) {
3468
                val &= 7;
3469
                rm = (modrm & 7) | REX_B(s);
3470
                tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3471
                                 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3472
            } else {
3473
                val &= 3;
3474
                rm = (modrm & 7);
3475
                tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3476
                                offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3477
            }
3478
            reg = ((modrm >> 3) & 7) | rex_r;
3479
            gen_op_mov_reg_T0(OT_LONG, reg);
3480
            break;
3481
        case 0x1d6: /* movq ea, xmm */
3482
            if (mod != 3) {
3483
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3484
                gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3485
            } else {
3486
                rm = (modrm & 7) | REX_B(s);
3487
                gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3488
                            offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3489
                gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3490
            }
3491
            break;
3492
        case 0x2d6: /* movq2dq */
3493
            tcg_gen_helper_0_0(helper_enter_mmx);
3494
            rm = (modrm & 7);
3495
            gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3496
                        offsetof(CPUX86State,fpregs[rm].mmx));
3497
            gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3498
            break;
3499
        case 0x3d6: /* movdq2q */
3500
            tcg_gen_helper_0_0(helper_enter_mmx);
3501
            rm = (modrm & 7) | REX_B(s);
3502
            gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3503
                        offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3504
            break;
3505
        case 0xd7: /* pmovmskb */
3506
        case 0x1d7:
3507
            if (mod != 3)
3508
                goto illegal_op;
3509
            if (b1) {
3510
                rm = (modrm & 7) | REX_B(s);
3511
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
3512
                tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
3513
            } else {
3514
                rm = (modrm & 7);
3515
                tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
3516
                tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
3517
            }
3518
            tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3519
            reg = ((modrm >> 3) & 7) | rex_r;
3520
            gen_op_mov_reg_T0(OT_LONG, reg);
3521
            break;
3522
        default:
3523
            goto illegal_op;
3524
        }
3525
    } else {
3526
        /* generic MMX or SSE operation */
3527
        switch(b) {
3528
        case 0x70: /* pshufx insn */
3529
        case 0xc6: /* pshufx insn */
3530
        case 0xc2: /* compare insns */
3531
            s->rip_offset = 1;
3532
            break;
3533
        default:
3534
            break;
3535
        }
3536
        if (is_xmm) {
3537
            op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3538
            if (mod != 3) {
3539
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3540
                op2_offset = offsetof(CPUX86State,xmm_t0);
3541
                if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3542
                                b == 0xc2)) {
3543
                    /* specific case for SSE single instructions */
3544
                    if (b1 == 2) {
3545
                        /* 32 bit access */
3546
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3547
                        gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3548
                    } else {
3549
                        /* 64 bit access */
3550
                        gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3551
                    }
3552
                } else {
3553
                    gen_ldo_env_A0(s->mem_index, op2_offset);
3554
                }
3555
            } else {
3556
                rm = (modrm & 7) | REX_B(s);
3557
                op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3558
            }
3559
        } else {
3560
            op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3561
            if (mod != 3) {
3562
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3563
                op2_offset = offsetof(CPUX86State,mmx_t0);
3564
                gen_ldq_env_A0(s->mem_index, op2_offset);
3565
            } else {
3566
                rm = (modrm & 7);
3567
                op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3568
            }
3569
        }
3570
        switch(b) {
3571
        case 0x0f: /* 3DNow! data insns */
3572
            if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3573
                goto illegal_op;
3574
            val = ldub_code(s->pc++);
3575
            sse_op2 = sse_op_table5[val];
3576
            if (!sse_op2)
3577
                goto illegal_op;
3578
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3579
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3580
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3581
            break;
3582
        case 0x70: /* pshufx insn */
3583
        case 0xc6: /* pshufx insn */
3584
            val = ldub_code(s->pc++);
3585
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3586
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3587
            tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
3588
            break;
3589
        case 0xc2:
3590
            /* compare insns */
3591
            val = ldub_code(s->pc++);
3592
            if (val >= 8)
3593
                goto illegal_op;
3594
            sse_op2 = sse_op_table4[val][b1];
3595
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3596
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3597
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3598
            break;
3599
        case 0xf7:
3600
            /* maskmov : we must prepare A0 */
3601
            if (mod != 3)
3602
                goto illegal_op;
3603
#ifdef TARGET_X86_64
3604
            if (s->aflag == 2) {
3605
                gen_op_movq_A0_reg(R_EDI);
3606
            } else
3607
#endif
3608
            {
3609
                gen_op_movl_A0_reg(R_EDI);
3610
                if (s->aflag == 0)
3611
                    gen_op_andl_A0_ffff();
3612
            }
3613
            gen_add_A0_ds_seg(s);
3614

    
3615
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3616
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3617
            tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
3618
            break;
3619
        default:
3620
            tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3621
            tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3622
            tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3623
            break;
3624
        }
3625
        if (b == 0x2e || b == 0x2f) {
3626
            /* just to keep the EFLAGS optimization correct */
3627
            gen_op_com_dummy();
3628
            s->cc_op = CC_OP_EFLAGS;
3629
        }
3630
    }
3631
}
3632

    
3633
/* convert one instruction. s->is_jmp is set if the translation must
3634
   be stopped. Return the next pc value */
3635
static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3636
{
3637
    int b, prefixes, aflag, dflag;
3638
    int shift, ot;
3639
    int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3640
    target_ulong next_eip, tval;
3641
    int rex_w, rex_r;
3642

    
3643
    s->pc = pc_start;
3644
    prefixes = 0;
3645
    aflag = s->code32;
3646
    dflag = s->code32;
3647
    s->override = -1;
3648
    rex_w = -1;
3649
    rex_r = 0;
3650
#ifdef TARGET_X86_64
3651
    s->rex_x = 0;
3652
    s->rex_b = 0;
3653
    x86_64_hregs = 0;
3654
#endif
3655
    s->rip_offset = 0; /* for relative ip address */
3656
 next_byte:
3657
    b = ldub_code(s->pc);
3658
    s->pc++;
3659
    /* check prefixes */
3660
#ifdef TARGET_X86_64
3661
    if (CODE64(s)) {
3662
        switch (b) {
3663
        case 0xf3:
3664
            prefixes |= PREFIX_REPZ;
3665
            goto next_byte;
3666
        case 0xf2:
3667
            prefixes |= PREFIX_REPNZ;
3668
            goto next_byte;
3669
        case 0xf0:
3670
            prefixes |= PREFIX_LOCK;
3671
            goto next_byte;
3672
        case 0x2e:
3673
            s->override = R_CS;
3674
            goto next_byte;
3675
        case 0x36:
3676
            s->override = R_SS;
3677
            goto next_byte;
3678
        case 0x3e:
3679
            s->override = R_DS;
3680
            goto next_byte;
3681
        case 0x26:
3682
            s->override = R_ES;
3683
            goto next_byte;
3684
        case 0x64:
3685
            s->override = R_FS;
3686
            goto next_byte;
3687
        case 0x65:
3688
            s->override = R_GS;
3689
            goto next_byte;
3690
        case 0x66:
3691
            prefixes |= PREFIX_DATA;
3692
            goto next_byte;
3693
        case 0x67:
3694
            prefixes |= PREFIX_ADR;
3695
            goto next_byte;
3696
        case 0x40 ... 0x4f:
3697
            /* REX prefix */
3698
            rex_w = (b >> 3) & 1;
3699
            rex_r = (b & 0x4) << 1;
3700
            s->rex_x = (b & 0x2) << 2;
3701
            REX_B(s) = (b & 0x1) << 3;
3702
            x86_64_hregs = 1; /* select uniform byte register addressing */
3703
            goto next_byte;
3704
        }
3705
        if (rex_w == 1) {
3706
            /* 0x66 is ignored if rex.w is set */
3707
            dflag = 2;
3708
        } else {
3709
            if (prefixes & PREFIX_DATA)
3710
                dflag ^= 1;
3711
        }
3712
        if (!(prefixes & PREFIX_ADR))
3713
            aflag = 2;
3714
    } else
3715
#endif
3716
    {
3717
        switch (b) {
3718
        case 0xf3:
3719
            prefixes |= PREFIX_REPZ;
3720
            goto next_byte;
3721
        case 0xf2:
3722
            prefixes |= PREFIX_REPNZ;
3723
            goto next_byte;
3724
        case 0xf0:
3725
            prefixes |= PREFIX_LOCK;
3726
            goto next_byte;
3727
        case 0x2e:
3728
            s->override = R_CS;
3729
            goto next_byte;
3730
        case 0x36:
3731
            s->override = R_SS;
3732
            goto next_byte;
3733
        case 0x3e:
3734
            s->override = R_DS;
3735
            goto next_byte;
3736
        case 0x26:
3737
            s->override = R_ES;
3738
            goto next_byte;
3739
        case 0x64:
3740
            s->override = R_FS;
3741
            goto next_byte;
3742
        case 0x65:
3743
            s->override = R_GS;
3744
            goto next_byte;
3745
        case 0x66:
3746
            prefixes |= PREFIX_DATA;
3747
            goto next_byte;
3748
        case 0x67:
3749
            prefixes |= PREFIX_ADR;
3750
            goto next_byte;
3751
        }
3752
        if (prefixes & PREFIX_DATA)
3753
            dflag ^= 1;
3754
        if (prefixes & PREFIX_ADR)
3755
            aflag ^= 1;
3756
    }
3757

    
3758
    s->prefix = prefixes;
3759
    s->aflag = aflag;
3760
    s->dflag = dflag;
3761

    
3762
    /* lock generation */
3763
    if (prefixes & PREFIX_LOCK)
3764
        tcg_gen_helper_0_0(helper_lock);
3765

    
3766
    /* now check op code */
3767
 reswitch:
3768
    switch(b) {
3769
    case 0x0f:
3770
        /**************************/
3771
        /* extended op code */
3772
        b = ldub_code(s->pc++) | 0x100;
3773
        goto reswitch;
3774

    
3775
        /**************************/
3776
        /* arith & logic */
3777
    case 0x00 ... 0x05:
3778
    case 0x08 ... 0x0d:
3779
    case 0x10 ... 0x15:
3780
    case 0x18 ... 0x1d:
3781
    case 0x20 ... 0x25:
3782
    case 0x28 ... 0x2d:
3783
    case 0x30 ... 0x35:
3784
    case 0x38 ... 0x3d:
3785
        {
3786
            int op, f, val;
3787
            op = (b >> 3) & 7;
3788
            f = (b >> 1) & 3;
3789

    
3790
            if ((b & 1) == 0)
3791
                ot = OT_BYTE;
3792
            else
3793
                ot = dflag + OT_WORD;
3794

    
3795
            switch(f) {
3796
            case 0: /* OP Ev, Gv */
3797
                modrm = ldub_code(s->pc++);
3798
                reg = ((modrm >> 3) & 7) | rex_r;
3799
                mod = (modrm >> 6) & 3;
3800
                rm = (modrm & 7) | REX_B(s);
3801
                if (mod != 3) {
3802
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3803
                    opreg = OR_TMP0;
3804
                } else if (op == OP_XORL && rm == reg) {
3805
                xor_zero:
3806
                    /* xor reg, reg optimisation */
3807
                    gen_op_movl_T0_0();
3808
                    s->cc_op = CC_OP_LOGICB + ot;
3809
                    gen_op_mov_reg_T0(ot, reg);
3810
                    gen_op_update1_cc();
3811
                    break;
3812
                } else {
3813
                    opreg = rm;
3814
                }
3815
                gen_op_mov_TN_reg(ot, 1, reg);
3816
                gen_op(s, op, ot, opreg);
3817
                break;
3818
            case 1: /* OP Gv, Ev */
3819
                modrm = ldub_code(s->pc++);
3820
                mod = (modrm >> 6) & 3;
3821
                reg = ((modrm >> 3) & 7) | rex_r;
3822
                rm = (modrm & 7) | REX_B(s);
3823
                if (mod != 3) {
3824
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3825
                    gen_op_ld_T1_A0(ot + s->mem_index);
3826
                } else if (op == OP_XORL && rm == reg) {
3827
                    goto xor_zero;
3828
                } else {
3829
                    gen_op_mov_TN_reg(ot, 1, rm);
3830
                }
3831
                gen_op(s, op, ot, reg);
3832
                break;
3833
            case 2: /* OP A, Iv */
3834
                val = insn_get(s, ot);
3835
                gen_op_movl_T1_im(val);
3836
                gen_op(s, op, ot, OR_EAX);
3837
                break;
3838
            }
3839
        }
3840
        break;
3841

    
3842
    case 0x80: /* GRP1 */
3843
    case 0x81:
3844
    case 0x82:
3845
    case 0x83:
3846
        {
3847
            int val;
3848

    
3849
            if ((b & 1) == 0)
3850
                ot = OT_BYTE;
3851
            else
3852
                ot = dflag + OT_WORD;
3853

    
3854
            modrm = ldub_code(s->pc++);
3855
            mod = (modrm >> 6) & 3;
3856
            rm = (modrm & 7) | REX_B(s);
3857
            op = (modrm >> 3) & 7;
3858

    
3859
            if (mod != 3) {
3860
                if (b == 0x83)
3861
                    s->rip_offset = 1;
3862
                else
3863
                    s->rip_offset = insn_const_size(ot);
3864
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3865
                opreg = OR_TMP0;
3866
            } else {
3867
                opreg = rm;
3868
            }
3869

    
3870
            switch(b) {
3871
            default:
3872
            case 0x80:
3873
            case 0x81:
3874
            case 0x82:
3875
                val = insn_get(s, ot);
3876
                break;
3877
            case 0x83:
3878
                val = (int8_t)insn_get(s, OT_BYTE);
3879
                break;
3880
            }
3881
            gen_op_movl_T1_im(val);
3882
            gen_op(s, op, ot, opreg);
3883
        }
3884
        break;
3885

    
3886
        /**************************/
3887
        /* inc, dec, and other misc arith */
3888
    case 0x40 ... 0x47: /* inc Gv */
3889
        ot = dflag ? OT_LONG : OT_WORD;
3890
        gen_inc(s, ot, OR_EAX + (b & 7), 1);
3891
        break;
3892
    case 0x48 ... 0x4f: /* dec Gv */
3893
        ot = dflag ? OT_LONG : OT_WORD;
3894
        gen_inc(s, ot, OR_EAX + (b & 7), -1);
3895
        break;
3896
    case 0xf6: /* GRP3 */
3897
    case 0xf7:
3898
        if ((b & 1) == 0)
3899
            ot = OT_BYTE;
3900
        else
3901
            ot = dflag + OT_WORD;
3902

    
3903
        modrm = ldub_code(s->pc++);
3904
        mod = (modrm >> 6) & 3;
3905
        rm = (modrm & 7) | REX_B(s);
3906
        op = (modrm >> 3) & 7;
3907
        if (mod != 3) {
3908
            if (op == 0)
3909
                s->rip_offset = insn_const_size(ot);
3910
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3911
            gen_op_ld_T0_A0(ot + s->mem_index);
3912
        } else {
3913
            gen_op_mov_TN_reg(ot, 0, rm);
3914
        }
3915

    
3916
        switch(op) {
3917
        case 0: /* test */
3918
            val = insn_get(s, ot);
3919
            gen_op_movl_T1_im(val);
3920
            gen_op_testl_T0_T1_cc();
3921
            s->cc_op = CC_OP_LOGICB + ot;
3922
            break;
3923
        case 2: /* not */
3924
            tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
3925
            if (mod != 3) {
3926
                gen_op_st_T0_A0(ot + s->mem_index);
3927
            } else {
3928
                gen_op_mov_reg_T0(ot, rm);
3929
            }
3930
            break;
3931
        case 3: /* neg */
3932
            tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
3933
            if (mod != 3) {
3934
                gen_op_st_T0_A0(ot + s->mem_index);
3935
            } else {
3936
                gen_op_mov_reg_T0(ot, rm);
3937
            }
3938
            gen_op_update_neg_cc();
3939
            s->cc_op = CC_OP_SUBB + ot;
3940
            break;
3941
        case 4: /* mul */
3942
            switch(ot) {
3943
            case OT_BYTE:
3944
                gen_op_mulb_AL_T0();
3945
                s->cc_op = CC_OP_MULB;
3946
                break;
3947
            case OT_WORD:
3948
                gen_op_mulw_AX_T0();
3949
                s->cc_op = CC_OP_MULW;
3950
                break;
3951
            default:
3952
            case OT_LONG:
3953
                gen_op_mull_EAX_T0();
3954
                s->cc_op = CC_OP_MULL;
3955
                break;
3956
#ifdef TARGET_X86_64
3957
            case OT_QUAD:
3958
                gen_op_mulq_EAX_T0();
3959
                s->cc_op = CC_OP_MULQ;
3960
                break;
3961
#endif
3962
            }
3963
            break;
3964
        case 5: /* imul */
3965
            switch(ot) {
3966
            case OT_BYTE:
3967
                gen_op_imulb_AL_T0();
3968
                s->cc_op = CC_OP_MULB;
3969
                break;
3970
            case OT_WORD:
3971
                gen_op_imulw_AX_T0();
3972
                s->cc_op = CC_OP_MULW;
3973
                break;
3974
            default:
3975
            case OT_LONG:
3976
                gen_op_imull_EAX_T0();
3977
                s->cc_op = CC_OP_MULL;
3978
                break;
3979
#ifdef TARGET_X86_64
3980
            case OT_QUAD:
3981
                gen_op_imulq_EAX_T0();
3982
                s->cc_op = CC_OP_MULQ;
3983
                break;
3984
#endif
3985
            }
3986
            break;
3987
        case 6: /* div */
3988
            switch(ot) {
3989
            case OT_BYTE:
3990
                gen_jmp_im(pc_start - s->cs_base);
3991
                tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
3992
                break;
3993
            case OT_WORD:
3994
                gen_jmp_im(pc_start - s->cs_base);
3995
                tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
3996
                break;
3997
            default:
3998
            case OT_LONG:
3999
                gen_jmp_im(pc_start - s->cs_base);
4000
                tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
4001
                break;
4002
#ifdef TARGET_X86_64
4003
            case OT_QUAD:
4004
                gen_jmp_im(pc_start - s->cs_base);
4005
                tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
4006
                break;
4007
#endif
4008
            }
4009
            break;
4010
        case 7: /* idiv */
4011
            switch(ot) {
4012
            case OT_BYTE:
4013
                gen_jmp_im(pc_start - s->cs_base);
4014
                tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
4015
                break;
4016
            case OT_WORD:
4017
                gen_jmp_im(pc_start - s->cs_base);
4018
                tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
4019
                break;
4020
            default:
4021
            case OT_LONG:
4022
                gen_jmp_im(pc_start - s->cs_base);
4023
                tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
4024
                break;
4025
#ifdef TARGET_X86_64
4026
            case OT_QUAD:
4027
                gen_jmp_im(pc_start - s->cs_base);
4028
                tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
4029
                break;
4030
#endif
4031
            }
4032
            break;
4033
        default:
4034
            goto illegal_op;
4035
        }
4036
        break;
4037

    
4038
    case 0xfe: /* GRP4 */
4039
    case 0xff: /* GRP5 */
4040
        if ((b & 1) == 0)
4041
            ot = OT_BYTE;
4042
        else
4043
            ot = dflag + OT_WORD;
4044

    
4045
        modrm = ldub_code(s->pc++);
4046
        mod = (modrm >> 6) & 3;
4047
        rm = (modrm & 7) | REX_B(s);
4048
        op = (modrm >> 3) & 7;
4049
        if (op >= 2 && b == 0xfe) {
4050
            goto illegal_op;
4051
        }
4052
        if (CODE64(s)) {
4053
            if (op == 2 || op == 4) {
4054
                /* operand size for jumps is 64 bit */
4055
                ot = OT_QUAD;
4056
            } else if (op == 3 || op == 5) {
4057
                /* for call calls, the operand is 16 or 32 bit, even
4058
                   in long mode */
4059
                ot = dflag ? OT_LONG : OT_WORD;
4060
            } else if (op == 6) {
4061
                /* default push size is 64 bit */
4062
                ot = dflag ? OT_QUAD : OT_WORD;
4063
            }
4064
        }
4065
        if (mod != 3) {
4066
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4067
            if (op >= 2 && op != 3 && op != 5)
4068
                gen_op_ld_T0_A0(ot + s->mem_index);
4069
        } else {
4070
            gen_op_mov_TN_reg(ot, 0, rm);
4071
        }
4072

    
4073
        switch(op) {
4074
        case 0: /* inc Ev */
4075
            if (mod != 3)
4076
                opreg = OR_TMP0;
4077
            else
4078
                opreg = rm;
4079
            gen_inc(s, ot, opreg, 1);
4080
            break;
4081
        case 1: /* dec Ev */
4082
            if (mod != 3)
4083
                opreg = OR_TMP0;
4084
            else
4085
                opreg = rm;
4086
            gen_inc(s, ot, opreg, -1);
4087
            break;
4088
        case 2: /* call Ev */
4089
            /* XXX: optimize if memory (no 'and' is necessary) */
4090
            if (s->dflag == 0)
4091
                gen_op_andl_T0_ffff();
4092
            next_eip = s->pc - s->cs_base;
4093
            gen_movtl_T1_im(next_eip);
4094
            gen_push_T1(s);
4095
            gen_op_jmp_T0();
4096
            gen_eob(s);
4097
            break;
4098
        case 3: /* lcall Ev */
4099
            gen_op_ld_T1_A0(ot + s->mem_index);
4100
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4101
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4102
        do_lcall:
4103
            if (s->pe && !s->vm86) {
4104
                if (s->cc_op != CC_OP_DYNAMIC)
4105
                    gen_op_set_cc_op(s->cc_op);
4106
                gen_jmp_im(pc_start - s->cs_base);
4107
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4108
                tcg_gen_helper_0_4(helper_lcall_protected,
4109
                                   cpu_tmp2_i32, cpu_T[1],
4110
                                   tcg_const_i32(dflag), 
4111
                                   tcg_const_i32(s->pc - pc_start));
4112
            } else {
4113
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4114
                tcg_gen_helper_0_4(helper_lcall_real,
4115
                                   cpu_tmp2_i32, cpu_T[1],
4116
                                   tcg_const_i32(dflag), 
4117
                                   tcg_const_i32(s->pc - s->cs_base));
4118
            }
4119
            gen_eob(s);
4120
            break;
4121
        case 4: /* jmp Ev */
4122
            if (s->dflag == 0)
4123
                gen_op_andl_T0_ffff();
4124
            gen_op_jmp_T0();
4125
            gen_eob(s);
4126
            break;
4127
        case 5: /* ljmp Ev */
4128
            gen_op_ld_T1_A0(ot + s->mem_index);
4129
            gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4130
            gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4131
        do_ljmp:
4132
            if (s->pe && !s->vm86) {
4133
                if (s->cc_op != CC_OP_DYNAMIC)
4134
                    gen_op_set_cc_op(s->cc_op);
4135
                gen_jmp_im(pc_start - s->cs_base);
4136
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4137
                tcg_gen_helper_0_3(helper_ljmp_protected,
4138
                                   cpu_tmp2_i32,
4139
                                   cpu_T[1],
4140
                                   tcg_const_i32(s->pc - pc_start));
4141
            } else {
4142
                gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
4143
                gen_op_movl_T0_T1();
4144
                gen_op_jmp_T0();
4145
            }
4146
            gen_eob(s);
4147
            break;
4148
        case 6: /* push Ev */
4149
            gen_push_T0(s);
4150
            break;
4151
        default:
4152
            goto illegal_op;
4153
        }
4154
        break;
4155

    
4156
    case 0x84: /* test Ev, Gv */
4157
    case 0x85:
4158
        if ((b & 1) == 0)
4159
            ot = OT_BYTE;
4160
        else
4161
            ot = dflag + OT_WORD;
4162

    
4163
        modrm = ldub_code(s->pc++);
4164
        mod = (modrm >> 6) & 3;
4165
        rm = (modrm & 7) | REX_B(s);
4166
        reg = ((modrm >> 3) & 7) | rex_r;
4167

    
4168
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4169
        gen_op_mov_TN_reg(ot, 1, reg);
4170
        gen_op_testl_T0_T1_cc();
4171
        s->cc_op = CC_OP_LOGICB + ot;
4172
        break;
4173

    
4174
    case 0xa8: /* test eAX, Iv */
4175
    case 0xa9:
4176
        if ((b & 1) == 0)
4177
            ot = OT_BYTE;
4178
        else
4179
            ot = dflag + OT_WORD;
4180
        val = insn_get(s, ot);
4181

    
4182
        gen_op_mov_TN_reg(ot, 0, OR_EAX);
4183
        gen_op_movl_T1_im(val);
4184
        gen_op_testl_T0_T1_cc();
4185
        s->cc_op = CC_OP_LOGICB + ot;
4186
        break;
4187

    
4188
    case 0x98: /* CWDE/CBW */
4189
#ifdef TARGET_X86_64
4190
        if (dflag == 2) {
4191
            gen_op_movslq_RAX_EAX();
4192
        } else
4193
#endif
4194
        if (dflag == 1)
4195
            gen_op_movswl_EAX_AX();
4196
        else
4197
            gen_op_movsbw_AX_AL();
4198
        break;
4199
    case 0x99: /* CDQ/CWD */
4200
#ifdef TARGET_X86_64
4201
        if (dflag == 2) {
4202
            gen_op_movsqo_RDX_RAX();
4203
        } else
4204
#endif
4205
        if (dflag == 1)
4206
            gen_op_movslq_EDX_EAX();
4207
        else
4208
            gen_op_movswl_DX_AX();
4209
        break;
4210
    case 0x1af: /* imul Gv, Ev */
4211
    case 0x69: /* imul Gv, Ev, I */
4212
    case 0x6b:
4213
        ot = dflag + OT_WORD;
4214
        modrm = ldub_code(s->pc++);
4215
        reg = ((modrm >> 3) & 7) | rex_r;
4216
        if (b == 0x69)
4217
            s->rip_offset = insn_const_size(ot);
4218
        else if (b == 0x6b)
4219
            s->rip_offset = 1;
4220
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4221
        if (b == 0x69) {
4222
            val = insn_get(s, ot);
4223
            gen_op_movl_T1_im(val);
4224
        } else if (b == 0x6b) {
4225
            val = (int8_t)insn_get(s, OT_BYTE);
4226
            gen_op_movl_T1_im(val);
4227
        } else {
4228
            gen_op_mov_TN_reg(ot, 1, reg);
4229
        }
4230

    
4231
#ifdef TARGET_X86_64
4232
        if (ot == OT_QUAD) {
4233
            gen_op_imulq_T0_T1();
4234
        } else
4235
#endif
4236
        if (ot == OT_LONG) {
4237
            gen_op_imull_T0_T1();
4238
        } else {
4239
            gen_op_imulw_T0_T1();
4240
        }
4241
        gen_op_mov_reg_T0(ot, reg);
4242
        s->cc_op = CC_OP_MULB + ot;
4243
        break;
4244
    case 0x1c0:
4245
    case 0x1c1: /* xadd Ev, Gv */
4246
        if ((b & 1) == 0)
4247
            ot = OT_BYTE;
4248
        else
4249
            ot = dflag + OT_WORD;
4250
        modrm = ldub_code(s->pc++);
4251
        reg = ((modrm >> 3) & 7) | rex_r;
4252
        mod = (modrm >> 6) & 3;
4253
        if (mod == 3) {
4254
            rm = (modrm & 7) | REX_B(s);
4255
            gen_op_mov_TN_reg(ot, 0, reg);
4256
            gen_op_mov_TN_reg(ot, 1, rm);
4257
            gen_op_addl_T0_T1();
4258
            gen_op_mov_reg_T1(ot, reg);
4259
            gen_op_mov_reg_T0(ot, rm);
4260
        } else {
4261
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4262
            gen_op_mov_TN_reg(ot, 0, reg);
4263
            gen_op_ld_T1_A0(ot + s->mem_index);
4264
            gen_op_addl_T0_T1();
4265
            gen_op_st_T0_A0(ot + s->mem_index);
4266
            gen_op_mov_reg_T1(ot, reg);
4267
        }
4268
        gen_op_update2_cc();
4269
        s->cc_op = CC_OP_ADDB + ot;
4270
        break;
4271
    case 0x1b0:
4272
    case 0x1b1: /* cmpxchg Ev, Gv */
4273
        if ((b & 1) == 0)
4274
            ot = OT_BYTE;
4275
        else
4276
            ot = dflag + OT_WORD;
4277
        modrm = ldub_code(s->pc++);
4278
        reg = ((modrm >> 3) & 7) | rex_r;
4279
        mod = (modrm >> 6) & 3;
4280
        gen_op_mov_TN_reg(ot, 1, reg);
4281
        if (mod == 3) {
4282
            rm = (modrm & 7) | REX_B(s);
4283
            gen_op_mov_TN_reg(ot, 0, rm);
4284
            gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
4285
            gen_op_mov_reg_T0(ot, rm);
4286
        } else {
4287
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4288
            gen_op_ld_T0_A0(ot + s->mem_index);
4289
            gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
4290
        }
4291
        s->cc_op = CC_OP_SUBB + ot;
4292
        break;
4293
    case 0x1c7: /* cmpxchg8b */
4294
        modrm = ldub_code(s->pc++);
4295
        mod = (modrm >> 6) & 3;
4296
        if ((mod == 3) || ((modrm & 0x38) != 0x8))
4297
            goto illegal_op;
4298
        gen_jmp_im(pc_start - s->cs_base);
4299
        if (s->cc_op != CC_OP_DYNAMIC)
4300
            gen_op_set_cc_op(s->cc_op);
4301
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4302
        gen_op_cmpxchg8b();
4303
        s->cc_op = CC_OP_EFLAGS;
4304
        break;
4305

    
4306
        /**************************/
4307
        /* push/pop */
4308
    case 0x50 ... 0x57: /* push */
4309
        gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
4310
        gen_push_T0(s);
4311
        break;
4312
    case 0x58 ... 0x5f: /* pop */
4313
        if (CODE64(s)) {
4314
            ot = dflag ? OT_QUAD : OT_WORD;
4315
        } else {
4316
            ot = dflag + OT_WORD;
4317
        }
4318
        gen_pop_T0(s);
4319
        /* NOTE: order is important for pop %sp */
4320
        gen_pop_update(s);
4321
        gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
4322
        break;
4323
    case 0x60: /* pusha */
4324
        if (CODE64(s))
4325
            goto illegal_op;
4326
        gen_pusha(s);
4327
        break;
4328
    case 0x61: /* popa */
4329
        if (CODE64(s))
4330
            goto illegal_op;
4331
        gen_popa(s);
4332
        break;
4333
    case 0x68: /* push Iv */
4334
    case 0x6a:
4335
        if (CODE64(s)) {
4336
            ot = dflag ? OT_QUAD : OT_WORD;
4337
        } else {
4338
            ot = dflag + OT_WORD;
4339
        }
4340
        if (b == 0x68)
4341
            val = insn_get(s, ot);
4342
        else
4343
            val = (int8_t)insn_get(s, OT_BYTE);
4344
        gen_op_movl_T0_im(val);
4345
        gen_push_T0(s);
4346
        break;
4347
    case 0x8f: /* pop Ev */
4348
        if (CODE64(s)) {
4349
            ot = dflag ? OT_QUAD : OT_WORD;
4350
        } else {
4351
            ot = dflag + OT_WORD;
4352
        }
4353
        modrm = ldub_code(s->pc++);
4354
        mod = (modrm >> 6) & 3;
4355
        gen_pop_T0(s);
4356
        if (mod == 3) {
4357
            /* NOTE: order is important for pop %sp */
4358
            gen_pop_update(s);
4359
            rm = (modrm & 7) | REX_B(s);
4360
            gen_op_mov_reg_T0(ot, rm);
4361
        } else {
4362
            /* NOTE: order is important too for MMU exceptions */
4363
            s->popl_esp_hack = 1 << ot;
4364
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4365
            s->popl_esp_hack = 0;
4366
            gen_pop_update(s);
4367
        }
4368
        break;
4369
    case 0xc8: /* enter */
4370
        {
4371
            int level;
4372
            val = lduw_code(s->pc);
4373
            s->pc += 2;
4374
            level = ldub_code(s->pc++);
4375
            gen_enter(s, val, level);
4376
        }
4377
        break;
4378
    case 0xc9: /* leave */
4379
        /* XXX: exception not precise (ESP is updated before potential exception) */
4380
        if (CODE64(s)) {
4381
            gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
4382
            gen_op_mov_reg_T0(OT_QUAD, R_ESP);
4383
        } else if (s->ss32) {
4384
            gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
4385
            gen_op_mov_reg_T0(OT_LONG, R_ESP);
4386
        } else {
4387
            gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
4388
            gen_op_mov_reg_T0(OT_WORD, R_ESP);
4389
        }
4390
        gen_pop_T0(s);
4391
        if (CODE64(s)) {
4392
            ot = dflag ? OT_QUAD : OT_WORD;
4393
        } else {
4394
            ot = dflag + OT_WORD;
4395
        }
4396
        gen_op_mov_reg_T0(ot, R_EBP);
4397
        gen_pop_update(s);
4398
        break;
4399
    case 0x06: /* push es */
4400
    case 0x0e: /* push cs */
4401
    case 0x16: /* push ss */
4402
    case 0x1e: /* push ds */
4403
        if (CODE64(s))
4404
            goto illegal_op;
4405
        gen_op_movl_T0_seg(b >> 3);
4406
        gen_push_T0(s);
4407
        break;
4408
    case 0x1a0: /* push fs */
4409
    case 0x1a8: /* push gs */
4410
        gen_op_movl_T0_seg((b >> 3) & 7);
4411
        gen_push_T0(s);
4412
        break;
4413
    case 0x07: /* pop es */
4414
    case 0x17: /* pop ss */
4415
    case 0x1f: /* pop ds */
4416
        if (CODE64(s))
4417
            goto illegal_op;
4418
        reg = b >> 3;
4419
        gen_pop_T0(s);
4420
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4421
        gen_pop_update(s);
4422
        if (reg == R_SS) {
4423
            /* if reg == SS, inhibit interrupts/trace. */
4424
            /* If several instructions disable interrupts, only the
4425
               _first_ does it */
4426
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4427
                tcg_gen_helper_0_0(helper_set_inhibit_irq);
4428
            s->tf = 0;
4429
        }
4430
        if (s->is_jmp) {
4431
            gen_jmp_im(s->pc - s->cs_base);
4432
            gen_eob(s);
4433
        }
4434
        break;
4435
    case 0x1a1: /* pop fs */
4436
    case 0x1a9: /* pop gs */
4437
        gen_pop_T0(s);
4438
        gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
4439
        gen_pop_update(s);
4440
        if (s->is_jmp) {
4441
            gen_jmp_im(s->pc - s->cs_base);
4442
            gen_eob(s);
4443
        }
4444
        break;
4445

    
4446
        /**************************/
4447
        /* mov */
4448
    case 0x88:
4449
    case 0x89: /* mov Gv, Ev */
4450
        if ((b & 1) == 0)
4451
            ot = OT_BYTE;
4452
        else
4453
            ot = dflag + OT_WORD;
4454
        modrm = ldub_code(s->pc++);
4455
        reg = ((modrm >> 3) & 7) | rex_r;
4456

    
4457
        /* generate a generic store */
4458
        gen_ldst_modrm(s, modrm, ot, reg, 1);
4459
        break;
4460
    case 0xc6:
4461
    case 0xc7: /* mov Ev, Iv */
4462
        if ((b & 1) == 0)
4463
            ot = OT_BYTE;
4464
        else
4465
            ot = dflag + OT_WORD;
4466
        modrm = ldub_code(s->pc++);
4467
        mod = (modrm >> 6) & 3;
4468
        if (mod != 3) {
4469
            s->rip_offset = insn_const_size(ot);
4470
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4471
        }
4472
        val = insn_get(s, ot);
4473
        gen_op_movl_T0_im(val);
4474
        if (mod != 3)
4475
            gen_op_st_T0_A0(ot + s->mem_index);
4476
        else
4477
            gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
4478
        break;
4479
    case 0x8a:
4480
    case 0x8b: /* mov Ev, Gv */
4481
        if ((b & 1) == 0)
4482
            ot = OT_BYTE;
4483
        else
4484
            ot = OT_WORD + dflag;
4485
        modrm = ldub_code(s->pc++);
4486
        reg = ((modrm >> 3) & 7) | rex_r;
4487

    
4488
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4489
        gen_op_mov_reg_T0(ot, reg);
4490
        break;
4491
    case 0x8e: /* mov seg, Gv */
4492
        modrm = ldub_code(s->pc++);
4493
        reg = (modrm >> 3) & 7;
4494
        if (reg >= 6 || reg == R_CS)
4495
            goto illegal_op;
4496
        gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4497
        gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4498
        if (reg == R_SS) {
4499
            /* if reg == SS, inhibit interrupts/trace */
4500
            /* If several instructions disable interrupts, only the
4501
               _first_ does it */
4502
            if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4503
                tcg_gen_helper_0_0(helper_set_inhibit_irq);
4504
            s->tf = 0;
4505
        }
4506
        if (s->is_jmp) {
4507
            gen_jmp_im(s->pc - s->cs_base);
4508
            gen_eob(s);
4509
        }
4510
        break;
4511
    case 0x8c: /* mov Gv, seg */
4512
        modrm = ldub_code(s->pc++);
4513
        reg = (modrm >> 3) & 7;
4514
        mod = (modrm >> 6) & 3;
4515
        if (reg >= 6)
4516
            goto illegal_op;
4517
        gen_op_movl_T0_seg(reg);
4518
        if (mod == 3)
4519
            ot = OT_WORD + dflag;
4520
        else
4521
            ot = OT_WORD;
4522
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4523
        break;
4524

    
4525
    case 0x1b6: /* movzbS Gv, Eb */
4526
    case 0x1b7: /* movzwS Gv, Eb */
4527
    case 0x1be: /* movsbS Gv, Eb */
4528
    case 0x1bf: /* movswS Gv, Eb */
4529
        {
4530
            int d_ot;
4531
            /* d_ot is the size of destination */
4532
            d_ot = dflag + OT_WORD;
4533
            /* ot is the size of source */
4534
            ot = (b & 1) + OT_BYTE;
4535
            modrm = ldub_code(s->pc++);
4536
            reg = ((modrm >> 3) & 7) | rex_r;
4537
            mod = (modrm >> 6) & 3;
4538
            rm = (modrm & 7) | REX_B(s);
4539

    
4540
            if (mod == 3) {
4541
                gen_op_mov_TN_reg(ot, 0, rm);
4542
                switch(ot | (b & 8)) {
4543
                case OT_BYTE:
4544
                    gen_op_movzbl_T0_T0();
4545
                    break;
4546
                case OT_BYTE | 8:
4547
                    gen_op_movsbl_T0_T0();
4548
                    break;
4549
                case OT_WORD:
4550
                    gen_op_movzwl_T0_T0();
4551
                    break;
4552
                default:
4553
                case OT_WORD | 8:
4554
                    gen_op_movswl_T0_T0();
4555
                    break;
4556
                }
4557
                gen_op_mov_reg_T0(d_ot, reg);
4558
            } else {
4559
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4560
                if (b & 8) {
4561
                    gen_op_lds_T0_A0(ot + s->mem_index);
4562
                } else {
4563
                    gen_op_ldu_T0_A0(ot + s->mem_index);
4564
                }
4565
                gen_op_mov_reg_T0(d_ot, reg);
4566
            }
4567
        }
4568
        break;
4569

    
4570
    case 0x8d: /* lea */
4571
        ot = dflag + OT_WORD;
4572
        modrm = ldub_code(s->pc++);
4573
        mod = (modrm >> 6) & 3;
4574
        if (mod == 3)
4575
            goto illegal_op;
4576
        reg = ((modrm >> 3) & 7) | rex_r;
4577
        /* we must ensure that no segment is added */
4578
        s->override = -1;
4579
        val = s->addseg;
4580
        s->addseg = 0;
4581
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4582
        s->addseg = val;
4583
        gen_op_mov_reg_A0(ot - OT_WORD, reg);
4584
        break;
4585

    
4586
    case 0xa0: /* mov EAX, Ov */
4587
    case 0xa1:
4588
    case 0xa2: /* mov Ov, EAX */
4589
    case 0xa3:
4590
        {
4591
            target_ulong offset_addr;
4592

    
4593
            if ((b & 1) == 0)
4594
                ot = OT_BYTE;
4595
            else
4596
                ot = dflag + OT_WORD;
4597
#ifdef TARGET_X86_64
4598
            if (s->aflag == 2) {
4599
                offset_addr = ldq_code(s->pc);
4600
                s->pc += 8;
4601
                gen_op_movq_A0_im(offset_addr);
4602
            } else
4603
#endif
4604
            {
4605
                if (s->aflag) {
4606
                    offset_addr = insn_get(s, OT_LONG);
4607
                } else {
4608
                    offset_addr = insn_get(s, OT_WORD);
4609
                }
4610
                gen_op_movl_A0_im(offset_addr);
4611
            }
4612
            gen_add_A0_ds_seg(s);
4613
            if ((b & 2) == 0) {
4614
                gen_op_ld_T0_A0(ot + s->mem_index);
4615
                gen_op_mov_reg_T0(ot, R_EAX);
4616
            } else {
4617
                gen_op_mov_TN_reg(ot, 0, R_EAX);
4618
                gen_op_st_T0_A0(ot + s->mem_index);
4619
            }
4620
        }
4621
        break;
4622
    case 0xd7: /* xlat */
4623
#ifdef TARGET_X86_64
4624
        if (s->aflag == 2) {
4625
            gen_op_movq_A0_reg(R_EBX);
4626
            gen_op_addq_A0_AL();
4627
        } else
4628
#endif
4629
        {
4630
            gen_op_movl_A0_reg(R_EBX);
4631
            gen_op_addl_A0_AL();
4632
            if (s->aflag == 0)
4633
                gen_op_andl_A0_ffff();
4634
        }
4635
        gen_add_A0_ds_seg(s);
4636
        gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
4637
        gen_op_mov_reg_T0(OT_BYTE, R_EAX);
4638
        break;
4639
    case 0xb0 ... 0xb7: /* mov R, Ib */
4640
        val = insn_get(s, OT_BYTE);
4641
        gen_op_movl_T0_im(val);
4642
        gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
4643
        break;
4644
    case 0xb8 ... 0xbf: /* mov R, Iv */
4645
#ifdef TARGET_X86_64
4646
        if (dflag == 2) {
4647
            uint64_t tmp;
4648
            /* 64 bit case */
4649
            tmp = ldq_code(s->pc);
4650
            s->pc += 8;
4651
            reg = (b & 7) | REX_B(s);
4652
            gen_movtl_T0_im(tmp);
4653
            gen_op_mov_reg_T0(OT_QUAD, reg);
4654
        } else
4655
#endif
4656
        {
4657
            ot = dflag ? OT_LONG : OT_WORD;
4658
            val = insn_get(s, ot);
4659
            reg = (b & 7) | REX_B(s);
4660
            gen_op_movl_T0_im(val);
4661
            gen_op_mov_reg_T0(ot, reg);
4662
        }
4663
        break;
4664

    
4665
    case 0x91 ... 0x97: /* xchg R, EAX */
4666
        ot = dflag + OT_WORD;
4667
        reg = (b & 7) | REX_B(s);
4668
        rm = R_EAX;
4669
        goto do_xchg_reg;
4670
    case 0x86:
4671
    case 0x87: /* xchg Ev, Gv */
4672
        if ((b & 1) == 0)
4673
            ot = OT_BYTE;
4674
        else
4675
            ot = dflag + OT_WORD;
4676
        modrm = ldub_code(s->pc++);
4677
        reg = ((modrm >> 3) & 7) | rex_r;
4678
        mod = (modrm >> 6) & 3;
4679
        if (mod == 3) {
4680
            rm = (modrm & 7) | REX_B(s);
4681
        do_xchg_reg:
4682
            gen_op_mov_TN_reg(ot, 0, reg);
4683
            gen_op_mov_TN_reg(ot, 1, rm);
4684
            gen_op_mov_reg_T0(ot, rm);
4685
            gen_op_mov_reg_T1(ot, reg);
4686
        } else {
4687
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4688
            gen_op_mov_TN_reg(ot, 0, reg);
4689
            /* for xchg, lock is implicit */
4690
            if (!(prefixes & PREFIX_LOCK))
4691
                tcg_gen_helper_0_0(helper_lock);
4692
            gen_op_ld_T1_A0(ot + s->mem_index);
4693
            gen_op_st_T0_A0(ot + s->mem_index);
4694
            if (!(prefixes & PREFIX_LOCK))
4695
                tcg_gen_helper_0_0(helper_unlock);
4696
            gen_op_mov_reg_T1(ot, reg);
4697
        }
4698
        break;
4699
    case 0xc4: /* les Gv */
4700
        if (CODE64(s))
4701
            goto illegal_op;
4702
        op = R_ES;
4703
        goto do_lxx;
4704
    case 0xc5: /* lds Gv */
4705
        if (CODE64(s))
4706
            goto illegal_op;
4707
        op = R_DS;
4708
        goto do_lxx;
4709
    case 0x1b2: /* lss Gv */
4710
        op = R_SS;
4711
        goto do_lxx;
4712
    case 0x1b4: /* lfs Gv */
4713
        op = R_FS;
4714
        goto do_lxx;
4715
    case 0x1b5: /* lgs Gv */
4716
        op = R_GS;
4717
    do_lxx:
4718
        ot = dflag ? OT_LONG : OT_WORD;
4719
        modrm = ldub_code(s->pc++);
4720
        reg = ((modrm >> 3) & 7) | rex_r;
4721
        mod = (modrm >> 6) & 3;
4722
        if (mod == 3)
4723
            goto illegal_op;
4724
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4725
        gen_op_ld_T1_A0(ot + s->mem_index);
4726
        gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4727
        /* load the segment first to handle exceptions properly */
4728
        gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
4729
        gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4730
        /* then put the data */
4731
        gen_op_mov_reg_T1(ot, reg);
4732
        if (s->is_jmp) {
4733
            gen_jmp_im(s->pc - s->cs_base);
4734
            gen_eob(s);
4735
        }
4736
        break;
4737

    
4738
        /************************/
4739
        /* shifts */
4740
    case 0xc0:
4741
    case 0xc1:
4742
        /* shift Ev,Ib */
4743
        shift = 2;
4744
    grp2:
4745
        {
4746
            if ((b & 1) == 0)
4747
                ot = OT_BYTE;
4748
            else
4749
                ot = dflag + OT_WORD;
4750

    
4751
            modrm = ldub_code(s->pc++);
4752
            mod = (modrm >> 6) & 3;
4753
            op = (modrm >> 3) & 7;
4754

    
4755
            if (mod != 3) {
4756
                if (shift == 2) {
4757
                    s->rip_offset = 1;
4758
                }
4759
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4760
                opreg = OR_TMP0;
4761
            } else {
4762
                opreg = (modrm & 7) | REX_B(s);
4763
            }
4764

    
4765
            /* simpler op */
4766
            if (shift == 0) {
4767
                gen_shift(s, op, ot, opreg, OR_ECX);
4768
            } else {
4769
                if (shift == 2) {
4770
                    shift = ldub_code(s->pc++);
4771
                }
4772
                gen_shifti(s, op, ot, opreg, shift);
4773
            }
4774
        }
4775
        break;
4776
    case 0xd0:
4777
    case 0xd1:
4778
        /* shift Ev,1 */
4779
        shift = 1;
4780
        goto grp2;
4781
    case 0xd2:
4782
    case 0xd3:
4783
        /* shift Ev,cl */
4784
        shift = 0;
4785
        goto grp2;
4786

    
4787
    case 0x1a4: /* shld imm */
4788
        op = 0;
4789
        shift = 1;
4790
        goto do_shiftd;
4791
    case 0x1a5: /* shld cl */
4792
        op = 0;
4793
        shift = 0;
4794
        goto do_shiftd;
4795
    case 0x1ac: /* shrd imm */
4796
        op = 1;
4797
        shift = 1;
4798
        goto do_shiftd;
4799
    case 0x1ad: /* shrd cl */
4800
        op = 1;
4801
        shift = 0;
4802
    do_shiftd:
4803
        ot = dflag + OT_WORD;
4804
        modrm = ldub_code(s->pc++);
4805
        mod = (modrm >> 6) & 3;
4806
        rm = (modrm & 7) | REX_B(s);
4807
        reg = ((modrm >> 3) & 7) | rex_r;
4808
        if (mod != 3) {
4809
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4810
            opreg = OR_TMP0;
4811
        } else {
4812
            opreg = rm;
4813
        }
4814
        gen_op_mov_TN_reg(ot, 1, reg);
4815

    
4816
        if (shift) {
4817
            val = ldub_code(s->pc++);
4818
            tcg_gen_movi_tl(cpu_T3, val);
4819
        } else {
4820
            tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
4821
        }
4822
        gen_shiftd_rm_T1_T3(s, ot, opreg, op);
4823
        break;
4824

    
4825
        /************************/
4826
        /* floats */
4827
    case 0xd8 ... 0xdf:
4828
        if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4829
            /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4830
            /* XXX: what to do if illegal op ? */
4831
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4832
            break;
4833
        }
4834
        modrm = ldub_code(s->pc++);
4835
        mod = (modrm >> 6) & 3;
4836
        rm = modrm & 7;
4837
        op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4838
        if (mod != 3) {
4839
            /* memory op */
4840
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4841
            switch(op) {
4842
            case 0x00 ... 0x07: /* fxxxs */
4843
            case 0x10 ... 0x17: /* fixxxl */
4844
            case 0x20 ... 0x27: /* fxxxl */
4845
            case 0x30 ... 0x37: /* fixxx */
4846
                {
4847
                    int op1;
4848
                    op1 = op & 7;
4849

    
4850
                    switch(op >> 4) {
4851
                    case 0:
4852
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4853
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4854
                        tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
4855
                        break;
4856
                    case 1:
4857
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4858
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4859
                        tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4860
                        break;
4861
                    case 2:
4862
                        tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
4863
                                          (s->mem_index >> 2) - 1);
4864
                        tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
4865
                        break;
4866
                    case 3:
4867
                    default:
4868
                        gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4869
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4870
                        tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
4871
                        break;
4872
                    }
4873

    
4874
                    tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
4875
                    if (op1 == 3) {
4876
                        /* fcomp needs pop */
4877
                        tcg_gen_helper_0_0(helper_fpop);
4878
                    }
4879
                }
4880
                break;
4881
            case 0x08: /* flds */
4882
            case 0x0a: /* fsts */
4883
            case 0x0b: /* fstps */
4884
            case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4885
            case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4886
            case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4887
                switch(op & 7) {
4888
                case 0:
4889
                    switch(op >> 4) {
4890
                    case 0:
4891
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4892
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4893
                        tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
4894
                        break;
4895
                    case 1:
4896
                        gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4897
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4898
                        tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4899
                        break;
4900
                    case 2:
4901
                        tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
4902
                                          (s->mem_index >> 2) - 1);
4903
                        tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
4904
                        break;
4905
                    case 3:
4906
                    default:
4907
                        gen_op_lds_T0_A0(OT_WORD + s->mem_index);
4908
                        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4909
                        tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
4910
                        break;
4911
                    }
4912
                    break;
4913
                case 1:
4914
                    /* XXX: the corresponding CPUID bit must be tested ! */
4915
                    switch(op >> 4) {
4916
                    case 1:
4917
                        tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
4918
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4919
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4920
                        break;
4921
                    case 2:
4922
                        tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
4923
                        tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
4924
                                          (s->mem_index >> 2) - 1);
4925
                        break;
4926
                    case 3:
4927
                    default:
4928
                        tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
4929
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4930
                        gen_op_st_T0_A0(OT_WORD + s->mem_index);
4931
                        break;
4932
                    }
4933
                    tcg_gen_helper_0_0(helper_fpop);
4934
                    break;
4935
                default:
4936
                    switch(op >> 4) {
4937
                    case 0:
4938
                        tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
4939
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4940
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4941
                        break;
4942
                    case 1:
4943
                        tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
4944
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4945
                        gen_op_st_T0_A0(OT_LONG + s->mem_index);
4946
                        break;
4947
                    case 2:
4948
                        tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
4949
                        tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
4950
                                          (s->mem_index >> 2) - 1);
4951
                        break;
4952
                    case 3:
4953
                    default:
4954
                        tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
4955
                        tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4956
                        gen_op_st_T0_A0(OT_WORD + s->mem_index);
4957
                        break;
4958
                    }
4959
                    if ((op & 7) == 3)
4960
                        tcg_gen_helper_0_0(helper_fpop);
4961
                    break;
4962
                }
4963
                break;
4964
            case 0x0c: /* fldenv mem */
4965
                if (s->cc_op != CC_OP_DYNAMIC)
4966
                    gen_op_set_cc_op(s->cc_op);
4967
                gen_jmp_im(pc_start - s->cs_base);
4968
                tcg_gen_helper_0_2(helper_fldenv, 
4969
                                   cpu_A0, tcg_const_i32(s->dflag));
4970
                break;
4971
            case 0x0d: /* fldcw mem */
4972
                gen_op_ld_T0_A0(OT_WORD + s->mem_index);
4973
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4974
                tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
4975
                break;
4976
            case 0x0e: /* fnstenv mem */
4977
                if (s->cc_op != CC_OP_DYNAMIC)
4978
                    gen_op_set_cc_op(s->cc_op);
4979
                gen_jmp_im(pc_start - s->cs_base);
4980
                tcg_gen_helper_0_2(helper_fstenv,
4981
                                   cpu_A0, tcg_const_i32(s->dflag));
4982
                break;
4983
            case 0x0f: /* fnstcw mem */
4984
                tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
4985
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4986
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
4987
                break;
4988
            case 0x1d: /* fldt mem */
4989
                if (s->cc_op != CC_OP_DYNAMIC)
4990
                    gen_op_set_cc_op(s->cc_op);
4991
                gen_jmp_im(pc_start - s->cs_base);
4992
                tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
4993
                break;
4994
            case 0x1f: /* fstpt mem */
4995
                if (s->cc_op != CC_OP_DYNAMIC)
4996
                    gen_op_set_cc_op(s->cc_op);
4997
                gen_jmp_im(pc_start - s->cs_base);
4998
                tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
4999
                tcg_gen_helper_0_0(helper_fpop);
5000
                break;
5001
            case 0x2c: /* frstor mem */
5002
                if (s->cc_op != CC_OP_DYNAMIC)
5003
                    gen_op_set_cc_op(s->cc_op);
5004
                gen_jmp_im(pc_start - s->cs_base);
5005
                tcg_gen_helper_0_2(helper_frstor,
5006
                                   cpu_A0, tcg_const_i32(s->dflag));
5007
                break;
5008
            case 0x2e: /* fnsave mem */
5009
                if (s->cc_op != CC_OP_DYNAMIC)
5010
                    gen_op_set_cc_op(s->cc_op);
5011
                gen_jmp_im(pc_start - s->cs_base);
5012
                tcg_gen_helper_0_2(helper_fsave,
5013
                                   cpu_A0, tcg_const_i32(s->dflag));
5014
                break;
5015
            case 0x2f: /* fnstsw mem */
5016
                tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5017
                tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5018
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
5019
                break;
5020
            case 0x3c: /* fbld */
5021
                if (s->cc_op != CC_OP_DYNAMIC)
5022
                    gen_op_set_cc_op(s->cc_op);
5023
                gen_jmp_im(pc_start - s->cs_base);
5024
                tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
5025
                break;
5026
            case 0x3e: /* fbstp */
5027
                if (s->cc_op != CC_OP_DYNAMIC)
5028
                    gen_op_set_cc_op(s->cc_op);
5029
                gen_jmp_im(pc_start - s->cs_base);
5030
                tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
5031
                tcg_gen_helper_0_0(helper_fpop);
5032
                break;
5033
            case 0x3d: /* fildll */
5034
                tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 
5035
                                  (s->mem_index >> 2) - 1);
5036
                tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
5037
                break;
5038
            case 0x3f: /* fistpll */
5039
                tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
5040
                tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 
5041
                                  (s->mem_index >> 2) - 1);
5042
                tcg_gen_helper_0_0(helper_fpop);
5043
                break;
5044
            default:
5045
                goto illegal_op;
5046
            }
5047
        } else {
5048
            /* register float ops */
5049
            opreg = rm;
5050

    
5051
            switch(op) {
5052
            case 0x08: /* fld sti */
5053
                tcg_gen_helper_0_0(helper_fpush);
5054
                tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
5055
                break;
5056
            case 0x09: /* fxchg sti */
5057
            case 0x29: /* fxchg4 sti, undocumented op */
5058
            case 0x39: /* fxchg7 sti, undocumented op */
5059
                tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
5060
                break;
5061
            case 0x0a: /* grp d9/2 */
5062
                switch(rm) {
5063
                case 0: /* fnop */
5064
                    /* check exceptions (FreeBSD FPU probe) */
5065
                    if (s->cc_op != CC_OP_DYNAMIC)
5066
                        gen_op_set_cc_op(s->cc_op);
5067
                    gen_jmp_im(pc_start - s->cs_base);
5068
                    tcg_gen_helper_0_0(helper_fwait);
5069
                    break;
5070
                default:
5071
                    goto illegal_op;
5072
                }
5073
                break;
5074
            case 0x0c: /* grp d9/4 */
5075
                switch(rm) {
5076
                case 0: /* fchs */
5077
                    tcg_gen_helper_0_0(helper_fchs_ST0);
5078
                    break;
5079
                case 1: /* fabs */
5080
                    tcg_gen_helper_0_0(helper_fabs_ST0);
5081
                    break;
5082
                case 4: /* ftst */
5083
                    tcg_gen_helper_0_0(helper_fldz_FT0);
5084
                    tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5085
                    break;
5086
                case 5: /* fxam */
5087
                    tcg_gen_helper_0_0(helper_fxam_ST0);
5088
                    break;
5089
                default:
5090
                    goto illegal_op;
5091
                }
5092
                break;
5093
            case 0x0d: /* grp d9/5 */
5094
                {
5095
                    switch(rm) {
5096
                    case 0:
5097
                        tcg_gen_helper_0_0(helper_fpush);
5098
                        tcg_gen_helper_0_0(helper_fld1_ST0);
5099
                        break;
5100
                    case 1:
5101
                        tcg_gen_helper_0_0(helper_fpush);
5102
                        tcg_gen_helper_0_0(helper_fldl2t_ST0);
5103
                        break;
5104
                    case 2:
5105
                        tcg_gen_helper_0_0(helper_fpush);
5106
                        tcg_gen_helper_0_0(helper_fldl2e_ST0);
5107
                        break;
5108
                    case 3:
5109
                        tcg_gen_helper_0_0(helper_fpush);
5110
                        tcg_gen_helper_0_0(helper_fldpi_ST0);
5111
                        break;
5112
                    case 4:
5113
                        tcg_gen_helper_0_0(helper_fpush);
5114
                        tcg_gen_helper_0_0(helper_fldlg2_ST0);
5115
                        break;
5116
                    case 5:
5117
                        tcg_gen_helper_0_0(helper_fpush);
5118
                        tcg_gen_helper_0_0(helper_fldln2_ST0);
5119
                        break;
5120
                    case 6:
5121
                        tcg_gen_helper_0_0(helper_fpush);
5122
                        tcg_gen_helper_0_0(helper_fldz_ST0);
5123
                        break;
5124
                    default:
5125
                        goto illegal_op;
5126
                    }
5127
                }
5128
                break;
5129
            case 0x0e: /* grp d9/6 */
5130
                switch(rm) {
5131
                case 0: /* f2xm1 */
5132
                    tcg_gen_helper_0_0(helper_f2xm1);
5133
                    break;
5134
                case 1: /* fyl2x */
5135
                    tcg_gen_helper_0_0(helper_fyl2x);
5136
                    break;
5137
                case 2: /* fptan */
5138
                    tcg_gen_helper_0_0(helper_fptan);
5139
                    break;
5140
                case 3: /* fpatan */
5141
                    tcg_gen_helper_0_0(helper_fpatan);
5142
                    break;
5143
                case 4: /* fxtract */
5144
                    tcg_gen_helper_0_0(helper_fxtract);
5145
                    break;
5146
                case 5: /* fprem1 */
5147
                    tcg_gen_helper_0_0(helper_fprem1);
5148
                    break;
5149
                case 6: /* fdecstp */
5150
                    tcg_gen_helper_0_0(helper_fdecstp);
5151
                    break;
5152
                default:
5153
                case 7: /* fincstp */
5154
                    tcg_gen_helper_0_0(helper_fincstp);
5155
                    break;
5156
                }
5157
                break;
5158
            case 0x0f: /* grp d9/7 */
5159
                switch(rm) {
5160
                case 0: /* fprem */
5161
                    tcg_gen_helper_0_0(helper_fprem);
5162
                    break;
5163
                case 1: /* fyl2xp1 */
5164
                    tcg_gen_helper_0_0(helper_fyl2xp1);
5165
                    break;
5166
                case 2: /* fsqrt */
5167
                    tcg_gen_helper_0_0(helper_fsqrt);
5168
                    break;
5169
                case 3: /* fsincos */
5170
                    tcg_gen_helper_0_0(helper_fsincos);
5171
                    break;
5172
                case 5: /* fscale */
5173
                    tcg_gen_helper_0_0(helper_fscale);
5174
                    break;
5175
                case 4: /* frndint */
5176
                    tcg_gen_helper_0_0(helper_frndint);
5177
                    break;
5178
                case 6: /* fsin */
5179
                    tcg_gen_helper_0_0(helper_fsin);
5180
                    break;
5181
                default:
5182
                case 7: /* fcos */
5183
                    tcg_gen_helper_0_0(helper_fcos);
5184
                    break;
5185
                }
5186
                break;
5187
            case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
5188
            case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
5189
            case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
5190
                {
5191
                    int op1;
5192

    
5193
                    op1 = op & 7;
5194
                    if (op >= 0x20) {
5195
                        tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
5196
                        if (op >= 0x30)
5197
                            tcg_gen_helper_0_0(helper_fpop);
5198
                    } else {
5199
                        tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5200
                        tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5201
                    }
5202
                }
5203
                break;
5204
            case 0x02: /* fcom */
5205
            case 0x22: /* fcom2, undocumented op */
5206
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5207
                tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5208
                break;
5209
            case 0x03: /* fcomp */
5210
            case 0x23: /* fcomp3, undocumented op */
5211
            case 0x32: /* fcomp5, undocumented op */
5212
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5213
                tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5214
                tcg_gen_helper_0_0(helper_fpop);
5215
                break;
5216
            case 0x15: /* da/5 */
5217
                switch(rm) {
5218
                case 1: /* fucompp */
5219
                    tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5220
                    tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5221
                    tcg_gen_helper_0_0(helper_fpop);
5222
                    tcg_gen_helper_0_0(helper_fpop);
5223
                    break;
5224
                default:
5225
                    goto illegal_op;
5226
                }
5227
                break;
5228
            case 0x1c:
5229
                switch(rm) {
5230
                case 0: /* feni (287 only, just do nop here) */
5231
                    break;
5232
                case 1: /* fdisi (287 only, just do nop here) */
5233
                    break;
5234
                case 2: /* fclex */
5235
                    tcg_gen_helper_0_0(helper_fclex);
5236
                    break;
5237
                case 3: /* fninit */
5238
                    tcg_gen_helper_0_0(helper_fninit);
5239
                    break;
5240
                case 4: /* fsetpm (287 only, just do nop here) */
5241
                    break;
5242
                default:
5243
                    goto illegal_op;
5244
                }
5245
                break;
5246
            case 0x1d: /* fucomi */
5247
                if (s->cc_op != CC_OP_DYNAMIC)
5248
                    gen_op_set_cc_op(s->cc_op);
5249
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5250
                tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5251
                gen_op_fcomi_dummy();
5252
                s->cc_op = CC_OP_EFLAGS;
5253
                break;
5254
            case 0x1e: /* fcomi */
5255
                if (s->cc_op != CC_OP_DYNAMIC)
5256
                    gen_op_set_cc_op(s->cc_op);
5257
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5258
                tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5259
                gen_op_fcomi_dummy();
5260
                s->cc_op = CC_OP_EFLAGS;
5261
                break;
5262
            case 0x28: /* ffree sti */
5263
                tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5264
                break;
5265
            case 0x2a: /* fst sti */
5266
                tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5267
                break;
5268
            case 0x2b: /* fstp sti */
5269
            case 0x0b: /* fstp1 sti, undocumented op */
5270
            case 0x3a: /* fstp8 sti, undocumented op */
5271
            case 0x3b: /* fstp9 sti, undocumented op */
5272
                tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
5273
                tcg_gen_helper_0_0(helper_fpop);
5274
                break;
5275
            case 0x2c: /* fucom st(i) */
5276
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5277
                tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5278
                break;
5279
            case 0x2d: /* fucomp st(i) */
5280
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5281
                tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
5282
                tcg_gen_helper_0_0(helper_fpop);
5283
                break;
5284
            case 0x33: /* de/3 */
5285
                switch(rm) {
5286
                case 1: /* fcompp */
5287
                    tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
5288
                    tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
5289
                    tcg_gen_helper_0_0(helper_fpop);
5290
                    tcg_gen_helper_0_0(helper_fpop);
5291
                    break;
5292
                default:
5293
                    goto illegal_op;
5294
                }
5295
                break;
5296
            case 0x38: /* ffreep sti, undocumented op */
5297
                tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
5298
                tcg_gen_helper_0_0(helper_fpop);
5299
                break;
5300
            case 0x3c: /* df/4 */
5301
                switch(rm) {
5302
                case 0:
5303
                    tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
5304
                    tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
5305
                    gen_op_mov_reg_T0(OT_WORD, R_EAX);
5306
                    break;
5307
                default:
5308
                    goto illegal_op;
5309
                }
5310
                break;
5311
            case 0x3d: /* fucomip */
5312
                if (s->cc_op != CC_OP_DYNAMIC)
5313
                    gen_op_set_cc_op(s->cc_op);
5314
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5315
                tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
5316
                tcg_gen_helper_0_0(helper_fpop);
5317
                gen_op_fcomi_dummy();
5318
                s->cc_op = CC_OP_EFLAGS;
5319
                break;
5320
            case 0x3e: /* fcomip */
5321
                if (s->cc_op != CC_OP_DYNAMIC)
5322
                    gen_op_set_cc_op(s->cc_op);
5323
                tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
5324
                tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
5325
                tcg_gen_helper_0_0(helper_fpop);
5326
                gen_op_fcomi_dummy();
5327
                s->cc_op = CC_OP_EFLAGS;
5328
                break;
5329
            case 0x10 ... 0x13: /* fcmovxx */
5330
            case 0x18 ... 0x1b:
5331
                {
5332
                    int op1, l1;
5333
                    const static uint8_t fcmov_cc[8] = {
5334
                        (JCC_B << 1),
5335
                        (JCC_Z << 1),
5336
                        (JCC_BE << 1),
5337
                        (JCC_P << 1),
5338
                    };
5339
                    op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
5340
                    gen_setcc(s, op1);
5341
                    l1 = gen_new_label();
5342
                    tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[0], tcg_const_tl(0), l1);
5343
                    tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
5344
                    gen_set_label(l1);
5345
                }
5346
                break;
5347
            default:
5348
                goto illegal_op;
5349
            }
5350
        }
5351
        break;
5352
        /************************/
5353
        /* string ops */
5354

    
5355
    case 0xa4: /* movsS */
5356
    case 0xa5:
5357
        if ((b & 1) == 0)
5358
            ot = OT_BYTE;
5359
        else
5360
            ot = dflag + OT_WORD;
5361

    
5362
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5363
            gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5364
        } else {
5365
            gen_movs(s, ot);
5366
        }
5367
        break;
5368

    
5369
    case 0xaa: /* stosS */
5370
    case 0xab:
5371
        if ((b & 1) == 0)
5372
            ot = OT_BYTE;
5373
        else
5374
            ot = dflag + OT_WORD;
5375

    
5376
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5377
            gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5378
        } else {
5379
            gen_stos(s, ot);
5380
        }
5381
        break;
5382
    case 0xac: /* lodsS */
5383
    case 0xad:
5384
        if ((b & 1) == 0)
5385
            ot = OT_BYTE;
5386
        else
5387
            ot = dflag + OT_WORD;
5388
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5389
            gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5390
        } else {
5391
            gen_lods(s, ot);
5392
        }
5393
        break;
5394
    case 0xae: /* scasS */
5395
    case 0xaf:
5396
        if ((b & 1) == 0)
5397
            ot = OT_BYTE;
5398
        else
5399
            ot = dflag + OT_WORD;
5400
        if (prefixes & PREFIX_REPNZ) {
5401
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5402
        } else if (prefixes & PREFIX_REPZ) {
5403
            gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5404
        } else {
5405
            gen_scas(s, ot);
5406
            s->cc_op = CC_OP_SUBB + ot;
5407
        }
5408
        break;
5409

    
5410
    case 0xa6: /* cmpsS */
5411
    case 0xa7:
5412
        if ((b & 1) == 0)
5413
            ot = OT_BYTE;
5414
        else
5415
            ot = dflag + OT_WORD;
5416
        if (prefixes & PREFIX_REPNZ) {
5417
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
5418
        } else if (prefixes & PREFIX_REPZ) {
5419
            gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
5420
        } else {
5421
            gen_cmps(s, ot);
5422
            s->cc_op = CC_OP_SUBB + ot;
5423
        }
5424
        break;
5425
    case 0x6c: /* insS */
5426
    case 0x6d:
5427
        if ((b & 1) == 0)
5428
            ot = OT_BYTE;
5429
        else
5430
            ot = dflag ? OT_LONG : OT_WORD;
5431
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5432
        gen_op_andl_T0_ffff();
5433
        gen_check_io(s, ot, pc_start - s->cs_base, 
5434
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
5435
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5436
            gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5437
        } else {
5438
            gen_ins(s, ot);
5439
        }
5440
        break;
5441
    case 0x6e: /* outsS */
5442
    case 0x6f:
5443
        if ((b & 1) == 0)
5444
            ot = OT_BYTE;
5445
        else
5446
            ot = dflag ? OT_LONG : OT_WORD;
5447
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5448
        gen_op_andl_T0_ffff();
5449
        gen_check_io(s, ot, pc_start - s->cs_base,
5450
                     svm_is_rep(prefixes) | 4);
5451
        if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
5452
            gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
5453
        } else {
5454
            gen_outs(s, ot);
5455
        }
5456
        break;
5457

    
5458
        /************************/
5459
        /* port I/O */
5460

    
5461
    case 0xe4:
5462
    case 0xe5:
5463
        if ((b & 1) == 0)
5464
            ot = OT_BYTE;
5465
        else
5466
            ot = dflag ? OT_LONG : OT_WORD;
5467
        val = ldub_code(s->pc++);
5468
        gen_op_movl_T0_im(val);
5469
        gen_check_io(s, ot, pc_start - s->cs_base,
5470
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5471
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5472
        tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5473
        gen_op_mov_reg_T1(ot, R_EAX);
5474
        break;
5475
    case 0xe6:
5476
    case 0xe7:
5477
        if ((b & 1) == 0)
5478
            ot = OT_BYTE;
5479
        else
5480
            ot = dflag ? OT_LONG : OT_WORD;
5481
        val = ldub_code(s->pc++);
5482
        gen_op_movl_T0_im(val);
5483
        gen_check_io(s, ot, pc_start - s->cs_base,
5484
                     svm_is_rep(prefixes));
5485
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5486

    
5487
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5488
        tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5489
        tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5490
        tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5491
        break;
5492
    case 0xec:
5493
    case 0xed:
5494
        if ((b & 1) == 0)
5495
            ot = OT_BYTE;
5496
        else
5497
            ot = dflag ? OT_LONG : OT_WORD;
5498
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5499
        gen_op_andl_T0_ffff();
5500
        gen_check_io(s, ot, pc_start - s->cs_base,
5501
                     SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
5502
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5503
        tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
5504
        gen_op_mov_reg_T1(ot, R_EAX);
5505
        break;
5506
    case 0xee:
5507
    case 0xef:
5508
        if ((b & 1) == 0)
5509
            ot = OT_BYTE;
5510
        else
5511
            ot = dflag ? OT_LONG : OT_WORD;
5512
        gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
5513
        gen_op_andl_T0_ffff();
5514
        gen_check_io(s, ot, pc_start - s->cs_base,
5515
                     svm_is_rep(prefixes));
5516
        gen_op_mov_TN_reg(ot, 1, R_EAX);
5517

    
5518
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5519
        tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
5520
        tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
5521
        tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
5522
        break;
5523

    
5524
        /************************/
5525
        /* control */
5526
    case 0xc2: /* ret im */
5527
        val = ldsw_code(s->pc);
5528
        s->pc += 2;
5529
        gen_pop_T0(s);
5530
        if (CODE64(s) && s->dflag)
5531
            s->dflag = 2;
5532
        gen_stack_update(s, val + (2 << s->dflag));
5533
        if (s->dflag == 0)
5534
            gen_op_andl_T0_ffff();
5535
        gen_op_jmp_T0();
5536
        gen_eob(s);
5537
        break;
5538
    case 0xc3: /* ret */
5539
        gen_pop_T0(s);
5540
        gen_pop_update(s);
5541
        if (s->dflag == 0)
5542
            gen_op_andl_T0_ffff();
5543
        gen_op_jmp_T0();
5544
        gen_eob(s);
5545
        break;
5546
    case 0xca: /* lret im */
5547
        val = ldsw_code(s->pc);
5548
        s->pc += 2;
5549
    do_lret:
5550
        if (s->pe && !s->vm86) {
5551
            if (s->cc_op != CC_OP_DYNAMIC)
5552
                gen_op_set_cc_op(s->cc_op);
5553
            gen_jmp_im(pc_start - s->cs_base);
5554
            tcg_gen_helper_0_2(helper_lret_protected,
5555
                               tcg_const_i32(s->dflag), 
5556
                               tcg_const_i32(val));
5557
        } else {
5558
            gen_stack_A0(s);
5559
            /* pop offset */
5560
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5561
            if (s->dflag == 0)
5562
                gen_op_andl_T0_ffff();
5563
            /* NOTE: keeping EIP updated is not a problem in case of
5564
               exception */
5565
            gen_op_jmp_T0();
5566
            /* pop selector */
5567
            gen_op_addl_A0_im(2 << s->dflag);
5568
            gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
5569
            gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5570
            /* add stack offset */
5571
            gen_stack_update(s, val + (4 << s->dflag));
5572
        }
5573
        gen_eob(s);
5574
        break;
5575
    case 0xcb: /* lret */
5576
        val = 0;
5577
        goto do_lret;
5578
    case 0xcf: /* iret */
5579
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET))
5580
            break;
5581
        if (!s->pe) {
5582
            /* real mode */
5583
            tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5584
            s->cc_op = CC_OP_EFLAGS;
5585
        } else if (s->vm86) {
5586
            if (s->iopl != 3) {
5587
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5588
            } else {
5589
                tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
5590
                s->cc_op = CC_OP_EFLAGS;
5591
            }
5592
        } else {
5593
            if (s->cc_op != CC_OP_DYNAMIC)
5594
                gen_op_set_cc_op(s->cc_op);
5595
            gen_jmp_im(pc_start - s->cs_base);
5596
            tcg_gen_helper_0_2(helper_iret_protected,
5597
                               tcg_const_i32(s->dflag), 
5598
                               tcg_const_i32(s->pc - s->cs_base));
5599
            s->cc_op = CC_OP_EFLAGS;
5600
        }
5601
        gen_eob(s);
5602
        break;
5603
    case 0xe8: /* call im */
5604
        {
5605
            if (dflag)
5606
                tval = (int32_t)insn_get(s, OT_LONG);
5607
            else
5608
                tval = (int16_t)insn_get(s, OT_WORD);
5609
            next_eip = s->pc - s->cs_base;
5610
            tval += next_eip;
5611
            if (s->dflag == 0)
5612
                tval &= 0xffff;
5613
            gen_movtl_T0_im(next_eip);
5614
            gen_push_T0(s);
5615
            gen_jmp(s, tval);
5616
        }
5617
        break;
5618
    case 0x9a: /* lcall im */
5619
        {
5620
            unsigned int selector, offset;
5621

    
5622
            if (CODE64(s))
5623
                goto illegal_op;
5624
            ot = dflag ? OT_LONG : OT_WORD;
5625
            offset = insn_get(s, ot);
5626
            selector = insn_get(s, OT_WORD);
5627

    
5628
            gen_op_movl_T0_im(selector);
5629
            gen_op_movl_T1_imu(offset);
5630
        }
5631
        goto do_lcall;
5632
    case 0xe9: /* jmp im */
5633
        if (dflag)
5634
            tval = (int32_t)insn_get(s, OT_LONG);
5635
        else
5636
            tval = (int16_t)insn_get(s, OT_WORD);
5637
        tval += s->pc - s->cs_base;
5638
        if (s->dflag == 0)
5639
            tval &= 0xffff;
5640
        gen_jmp(s, tval);
5641
        break;
5642
    case 0xea: /* ljmp im */
5643
        {
5644
            unsigned int selector, offset;
5645

    
5646
            if (CODE64(s))
5647
                goto illegal_op;
5648
            ot = dflag ? OT_LONG : OT_WORD;
5649
            offset = insn_get(s, ot);
5650
            selector = insn_get(s, OT_WORD);
5651

    
5652
            gen_op_movl_T0_im(selector);
5653
            gen_op_movl_T1_imu(offset);
5654
        }
5655
        goto do_ljmp;
5656
    case 0xeb: /* jmp Jb */
5657
        tval = (int8_t)insn_get(s, OT_BYTE);
5658
        tval += s->pc - s->cs_base;
5659
        if (s->dflag == 0)
5660
            tval &= 0xffff;
5661
        gen_jmp(s, tval);
5662
        break;
5663
    case 0x70 ... 0x7f: /* jcc Jb */
5664
        tval = (int8_t)insn_get(s, OT_BYTE);
5665
        goto do_jcc;
5666
    case 0x180 ... 0x18f: /* jcc Jv */
5667
        if (dflag) {
5668
            tval = (int32_t)insn_get(s, OT_LONG);
5669
        } else {
5670
            tval = (int16_t)insn_get(s, OT_WORD);
5671
        }
5672
    do_jcc:
5673
        next_eip = s->pc - s->cs_base;
5674
        tval += next_eip;
5675
        if (s->dflag == 0)
5676
            tval &= 0xffff;
5677
        gen_jcc(s, b, tval, next_eip);
5678
        break;
5679

    
5680
    case 0x190 ... 0x19f: /* setcc Gv */
5681
        modrm = ldub_code(s->pc++);
5682
        gen_setcc(s, b);
5683
        gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5684
        break;
5685
    case 0x140 ... 0x14f: /* cmov Gv, Ev */
5686
        ot = dflag + OT_WORD;
5687
        modrm = ldub_code(s->pc++);
5688
        reg = ((modrm >> 3) & 7) | rex_r;
5689
        mod = (modrm >> 6) & 3;
5690
        gen_setcc(s, b);
5691
        if (mod != 3) {
5692
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5693
            gen_op_ld_T1_A0(ot + s->mem_index);
5694
        } else {
5695
            rm = (modrm & 7) | REX_B(s);
5696
            gen_op_mov_TN_reg(ot, 1, rm);
5697
        }
5698
        gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5699
        break;
5700

    
5701
        /************************/
5702
        /* flags */
5703
    case 0x9c: /* pushf */
5704
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF))
5705
            break;
5706
        if (s->vm86 && s->iopl != 3) {
5707
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5708
        } else {
5709
            if (s->cc_op != CC_OP_DYNAMIC)
5710
                gen_op_set_cc_op(s->cc_op);
5711
            gen_op_movl_T0_eflags();
5712
            gen_push_T0(s);
5713
        }
5714
        break;
5715
    case 0x9d: /* popf */
5716
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF))
5717
            break;
5718
        if (s->vm86 && s->iopl != 3) {
5719
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5720
        } else {
5721
            gen_pop_T0(s);
5722
            if (s->cpl == 0) {
5723
                if (s->dflag) {
5724
                    gen_op_movl_eflags_T0_cpl0();
5725
                } else {
5726
                    gen_op_movw_eflags_T0_cpl0();
5727
                }
5728
            } else {
5729
                if (s->cpl <= s->iopl) {
5730
                    if (s->dflag) {
5731
                        gen_op_movl_eflags_T0_io();
5732
                    } else {
5733
                        gen_op_movw_eflags_T0_io();
5734
                    }
5735
                } else {
5736
                    if (s->dflag) {
5737
                        gen_op_movl_eflags_T0();
5738
                    } else {
5739
                        gen_op_movw_eflags_T0();
5740
                    }
5741
                }
5742
            }
5743
            gen_pop_update(s);
5744
            s->cc_op = CC_OP_EFLAGS;
5745
            /* abort translation because TF flag may change */
5746
            gen_jmp_im(s->pc - s->cs_base);
5747
            gen_eob(s);
5748
        }
5749
        break;
5750
    case 0x9e: /* sahf */
5751
        if (CODE64(s))
5752
            goto illegal_op;
5753
        gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
5754
        if (s->cc_op != CC_OP_DYNAMIC)
5755
            gen_op_set_cc_op(s->cc_op);
5756
        gen_op_movb_eflags_T0();
5757
        s->cc_op = CC_OP_EFLAGS;
5758
        break;
5759
    case 0x9f: /* lahf */
5760
        if (CODE64(s))
5761
            goto illegal_op;
5762
        if (s->cc_op != CC_OP_DYNAMIC)
5763
            gen_op_set_cc_op(s->cc_op);
5764
        gen_op_movl_T0_eflags();
5765
        gen_op_mov_reg_T0(OT_BYTE, R_AH);
5766
        break;
5767
    case 0xf5: /* cmc */
5768
        if (s->cc_op != CC_OP_DYNAMIC)
5769
            gen_op_set_cc_op(s->cc_op);
5770
        gen_op_cmc();
5771
        s->cc_op = CC_OP_EFLAGS;
5772
        break;
5773
    case 0xf8: /* clc */
5774
        if (s->cc_op != CC_OP_DYNAMIC)
5775
            gen_op_set_cc_op(s->cc_op);
5776
        gen_op_clc();
5777
        s->cc_op = CC_OP_EFLAGS;
5778
        break;
5779
    case 0xf9: /* stc */
5780
        if (s->cc_op != CC_OP_DYNAMIC)
5781
            gen_op_set_cc_op(s->cc_op);
5782
        gen_op_stc();
5783
        s->cc_op = CC_OP_EFLAGS;
5784
        break;
5785
    case 0xfc: /* cld */
5786
        tcg_gen_movi_i32(cpu_tmp2_i32, 1);
5787
        tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5788
        break;
5789
    case 0xfd: /* std */
5790
        tcg_gen_movi_i32(cpu_tmp2_i32, -1);
5791
        tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
5792
        break;
5793

    
5794
        /************************/
5795
        /* bit operations */
5796
    case 0x1ba: /* bt/bts/btr/btc Gv, im */
5797
        ot = dflag + OT_WORD;
5798
        modrm = ldub_code(s->pc++);
5799
        op = (modrm >> 3) & 7;
5800
        mod = (modrm >> 6) & 3;
5801
        rm = (modrm & 7) | REX_B(s);
5802
        if (mod != 3) {
5803
            s->rip_offset = 1;
5804
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5805
            gen_op_ld_T0_A0(ot + s->mem_index);
5806
        } else {
5807
            gen_op_mov_TN_reg(ot, 0, rm);
5808
        }
5809
        /* load shift */
5810
        val = ldub_code(s->pc++);
5811
        gen_op_movl_T1_im(val);
5812
        if (op < 4)
5813
            goto illegal_op;
5814
        op -= 4;
5815
        gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5816
        s->cc_op = CC_OP_SARB + ot;
5817
        if (op != 0) {
5818
            if (mod != 3)
5819
                gen_op_st_T0_A0(ot + s->mem_index);
5820
            else
5821
                gen_op_mov_reg_T0(ot, rm);
5822
            gen_op_update_bt_cc();
5823
        }
5824
        break;
5825
    case 0x1a3: /* bt Gv, Ev */
5826
        op = 0;
5827
        goto do_btx;
5828
    case 0x1ab: /* bts */
5829
        op = 1;
5830
        goto do_btx;
5831
    case 0x1b3: /* btr */
5832
        op = 2;
5833
        goto do_btx;
5834
    case 0x1bb: /* btc */
5835
        op = 3;
5836
    do_btx:
5837
        ot = dflag + OT_WORD;
5838
        modrm = ldub_code(s->pc++);
5839
        reg = ((modrm >> 3) & 7) | rex_r;
5840
        mod = (modrm >> 6) & 3;
5841
        rm = (modrm & 7) | REX_B(s);
5842
        gen_op_mov_TN_reg(OT_LONG, 1, reg);
5843
        if (mod != 3) {
5844
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5845
            /* specific case: we need to add a displacement */
5846
            gen_op_add_bit_A0_T1[ot - OT_WORD]();
5847
            gen_op_ld_T0_A0(ot + s->mem_index);
5848
        } else {
5849
            gen_op_mov_TN_reg(ot, 0, rm);
5850
        }
5851
        gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5852
        s->cc_op = CC_OP_SARB + ot;
5853
        if (op != 0) {
5854
            if (mod != 3)
5855
                gen_op_st_T0_A0(ot + s->mem_index);
5856
            else
5857
                gen_op_mov_reg_T0(ot, rm);
5858
            gen_op_update_bt_cc();
5859
        }
5860
        break;
5861
    case 0x1bc: /* bsf */
5862
    case 0x1bd: /* bsr */
5863
        ot = dflag + OT_WORD;
5864
        modrm = ldub_code(s->pc++);
5865
        reg = ((modrm >> 3) & 7) | rex_r;
5866
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5867
        /* NOTE: in order to handle the 0 case, we must load the
5868
           result. It could be optimized with a generated jump */
5869
        gen_op_mov_TN_reg(ot, 1, reg);
5870
        gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5871
        gen_op_mov_reg_T1(ot, reg);
5872
        s->cc_op = CC_OP_LOGICB + ot;
5873
        break;
5874
        /************************/
5875
        /* bcd */
5876
    case 0x27: /* daa */
5877
        if (CODE64(s))
5878
            goto illegal_op;
5879
        if (s->cc_op != CC_OP_DYNAMIC)
5880
            gen_op_set_cc_op(s->cc_op);
5881
        gen_op_daa();
5882
        s->cc_op = CC_OP_EFLAGS;
5883
        break;
5884
    case 0x2f: /* das */
5885
        if (CODE64(s))
5886
            goto illegal_op;
5887
        if (s->cc_op != CC_OP_DYNAMIC)
5888
            gen_op_set_cc_op(s->cc_op);
5889
        gen_op_das();
5890
        s->cc_op = CC_OP_EFLAGS;
5891
        break;
5892
    case 0x37: /* aaa */
5893
        if (CODE64(s))
5894
            goto illegal_op;
5895
        if (s->cc_op != CC_OP_DYNAMIC)
5896
            gen_op_set_cc_op(s->cc_op);
5897
        gen_op_aaa();
5898
        s->cc_op = CC_OP_EFLAGS;
5899
        break;
5900
    case 0x3f: /* aas */
5901
        if (CODE64(s))
5902
            goto illegal_op;
5903
        if (s->cc_op != CC_OP_DYNAMIC)
5904
            gen_op_set_cc_op(s->cc_op);
5905
        gen_op_aas();
5906
        s->cc_op = CC_OP_EFLAGS;
5907
        break;
5908
    case 0xd4: /* aam */
5909
        if (CODE64(s))
5910
            goto illegal_op;
5911
        val = ldub_code(s->pc++);
5912
        if (val == 0) {
5913
            gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
5914
        } else {
5915
            gen_op_aam(val);
5916
            s->cc_op = CC_OP_LOGICB;
5917
        }
5918
        break;
5919
    case 0xd5: /* aad */
5920
        if (CODE64(s))
5921
            goto illegal_op;
5922
        val = ldub_code(s->pc++);
5923
        gen_op_aad(val);
5924
        s->cc_op = CC_OP_LOGICB;
5925
        break;
5926
        /************************/
5927
        /* misc */
5928
    case 0x90: /* nop */
5929
        /* XXX: xchg + rex handling */
5930
        /* XXX: correct lock test for all insn */
5931
        if (prefixes & PREFIX_LOCK)
5932
            goto illegal_op;
5933
        if (prefixes & PREFIX_REPZ) {
5934
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
5935
        }
5936
        break;
5937
    case 0x9b: /* fwait */
5938
        if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5939
            (HF_MP_MASK | HF_TS_MASK)) {
5940
            gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5941
        } else {
5942
            if (s->cc_op != CC_OP_DYNAMIC)
5943
                gen_op_set_cc_op(s->cc_op);
5944
            gen_jmp_im(pc_start - s->cs_base);
5945
            tcg_gen_helper_0_0(helper_fwait);
5946
        }
5947
        break;
5948
    case 0xcc: /* int3 */
5949
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5950
            break;
5951
        gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5952
        break;
5953
    case 0xcd: /* int N */
5954
        val = ldub_code(s->pc++);
5955
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5956
            break;
5957
        if (s->vm86 && s->iopl != 3) {
5958
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5959
        } else {
5960
            gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5961
        }
5962
        break;
5963
    case 0xce: /* into */
5964
        if (CODE64(s))
5965
            goto illegal_op;
5966
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SWINT))
5967
            break;
5968
        if (s->cc_op != CC_OP_DYNAMIC)
5969
            gen_op_set_cc_op(s->cc_op);
5970
        gen_jmp_im(pc_start - s->cs_base);
5971
        gen_op_into(s->pc - pc_start);
5972
        break;
5973
    case 0xf1: /* icebp (undocumented, exits to external debugger) */
5974
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP))
5975
            break;
5976
#if 1
5977
        gen_debug(s, pc_start - s->cs_base);
5978
#else
5979
        /* start debug */
5980
        tb_flush(cpu_single_env);
5981
        cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5982
#endif
5983
        break;
5984
    case 0xfa: /* cli */
5985
        if (!s->vm86) {
5986
            if (s->cpl <= s->iopl) {
5987
                tcg_gen_helper_0_0(helper_cli);
5988
            } else {
5989
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5990
            }
5991
        } else {
5992
            if (s->iopl == 3) {
5993
                tcg_gen_helper_0_0(helper_cli);
5994
            } else {
5995
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5996
            }
5997
        }
5998
        break;
5999
    case 0xfb: /* sti */
6000
        if (!s->vm86) {
6001
            if (s->cpl <= s->iopl) {
6002
            gen_sti:
6003
                tcg_gen_helper_0_0(helper_sti);
6004
                /* interruptions are enabled only the first insn after sti */
6005
                /* If several instructions disable interrupts, only the
6006
                   _first_ does it */
6007
                if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
6008
                    tcg_gen_helper_0_0(helper_set_inhibit_irq);
6009
                /* give a chance to handle pending irqs */
6010
                gen_jmp_im(s->pc - s->cs_base);
6011
                gen_eob(s);
6012
            } else {
6013
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6014
            }
6015
        } else {
6016
            if (s->iopl == 3) {
6017
                goto gen_sti;
6018
            } else {
6019
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6020
            }
6021
        }
6022
        break;
6023
    case 0x62: /* bound */
6024
        if (CODE64(s))
6025
            goto illegal_op;
6026
        ot = dflag ? OT_LONG : OT_WORD;
6027
        modrm = ldub_code(s->pc++);
6028
        reg = (modrm >> 3) & 7;
6029
        mod = (modrm >> 6) & 3;
6030
        if (mod == 3)
6031
            goto illegal_op;
6032
        gen_op_mov_TN_reg(ot, 0, reg);
6033
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6034
        gen_jmp_im(pc_start - s->cs_base);
6035
        tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6036
        if (ot == OT_WORD)
6037
            tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
6038
        else
6039
            tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
6040
        break;
6041
    case 0x1c8 ... 0x1cf: /* bswap reg */
6042
        reg = (b & 7) | REX_B(s);
6043
#ifdef TARGET_X86_64
6044
        if (dflag == 2) {
6045
            gen_op_mov_TN_reg(OT_QUAD, 0, reg);
6046
            tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
6047
            gen_op_mov_reg_T0(OT_QUAD, reg);
6048
        } else
6049
        {
6050
            TCGv tmp0;
6051
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
6052
            
6053
            tmp0 = tcg_temp_new(TCG_TYPE_I32);
6054
            tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
6055
            tcg_gen_bswap_i32(tmp0, tmp0);
6056
            tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
6057
            gen_op_mov_reg_T0(OT_LONG, reg);
6058
        }
6059
#else
6060
        {
6061
            gen_op_mov_TN_reg(OT_LONG, 0, reg);
6062
            tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
6063
            gen_op_mov_reg_T0(OT_LONG, reg);
6064
        }
6065
#endif
6066
        break;
6067
    case 0xd6: /* salc */
6068
        if (CODE64(s))
6069
            goto illegal_op;
6070
        if (s->cc_op != CC_OP_DYNAMIC)
6071
            gen_op_set_cc_op(s->cc_op);
6072
        gen_op_salc();
6073
        break;
6074
    case 0xe0: /* loopnz */
6075
    case 0xe1: /* loopz */
6076
        if (s->cc_op != CC_OP_DYNAMIC)
6077
            gen_op_set_cc_op(s->cc_op);
6078
        /* FALL THRU */
6079
    case 0xe2: /* loop */
6080
    case 0xe3: /* jecxz */
6081
        {
6082
            int l1, l2;
6083

    
6084
            tval = (int8_t)insn_get(s, OT_BYTE);
6085
            next_eip = s->pc - s->cs_base;
6086
            tval += next_eip;
6087
            if (s->dflag == 0)
6088
                tval &= 0xffff;
6089

    
6090
            l1 = gen_new_label();
6091
            l2 = gen_new_label();
6092
            b &= 3;
6093
            if (b == 3) {
6094
                gen_op_jz_ecx[s->aflag](l1);
6095
            } else {
6096
                gen_op_dec_ECX[s->aflag]();
6097
                if (b <= 1)
6098
                    gen_op_mov_T0_cc();
6099
                gen_op_loop[s->aflag][b](l1);
6100
            }
6101

    
6102
            gen_jmp_im(next_eip);
6103
            gen_op_jmp_label(l2);
6104
            gen_set_label(l1);
6105
            gen_jmp_im(tval);
6106
            gen_set_label(l2);
6107
            gen_eob(s);
6108
        }
6109
        break;
6110
    case 0x130: /* wrmsr */
6111
    case 0x132: /* rdmsr */
6112
        if (s->cpl != 0) {
6113
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6114
        } else {
6115
            int retval = 0;
6116
            if (b & 2) {
6117
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 0);
6118
                tcg_gen_helper_0_0(helper_rdmsr);
6119
            } else {
6120
                retval = gen_svm_check_intercept_param(s, pc_start, SVM_EXIT_MSR, 1);
6121
                tcg_gen_helper_0_0(helper_wrmsr);
6122
            }
6123
            if(retval)
6124
                gen_eob(s);
6125
        }
6126
        break;
6127
    case 0x131: /* rdtsc */
6128
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RDTSC))
6129
            break;
6130
        gen_jmp_im(pc_start - s->cs_base);
6131
        tcg_gen_helper_0_0(helper_rdtsc);
6132
        break;
6133
    case 0x133: /* rdpmc */
6134
        gen_jmp_im(pc_start - s->cs_base);
6135
        tcg_gen_helper_0_0(helper_rdpmc);
6136
        break;
6137
    case 0x134: /* sysenter */
6138
        if (CODE64(s))
6139
            goto illegal_op;
6140
        if (!s->pe) {
6141
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6142
        } else {
6143
            if (s->cc_op != CC_OP_DYNAMIC) {
6144
                gen_op_set_cc_op(s->cc_op);
6145
                s->cc_op = CC_OP_DYNAMIC;
6146
            }
6147
            gen_jmp_im(pc_start - s->cs_base);
6148
            tcg_gen_helper_0_0(helper_sysenter);
6149
            gen_eob(s);
6150
        }
6151
        break;
6152
    case 0x135: /* sysexit */
6153
        if (CODE64(s))
6154
            goto illegal_op;
6155
        if (!s->pe) {
6156
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6157
        } else {
6158
            if (s->cc_op != CC_OP_DYNAMIC) {
6159
                gen_op_set_cc_op(s->cc_op);
6160
                s->cc_op = CC_OP_DYNAMIC;
6161
            }
6162
            gen_jmp_im(pc_start - s->cs_base);
6163
            tcg_gen_helper_0_0(helper_sysexit);
6164
            gen_eob(s);
6165
        }
6166
        break;
6167
#ifdef TARGET_X86_64
6168
    case 0x105: /* syscall */
6169
        /* XXX: is it usable in real mode ? */
6170
        if (s->cc_op != CC_OP_DYNAMIC) {
6171
            gen_op_set_cc_op(s->cc_op);
6172
            s->cc_op = CC_OP_DYNAMIC;
6173
        }
6174
        gen_jmp_im(pc_start - s->cs_base);
6175
        tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
6176
        gen_eob(s);
6177
        break;
6178
    case 0x107: /* sysret */
6179
        if (!s->pe) {
6180
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6181
        } else {
6182
            if (s->cc_op != CC_OP_DYNAMIC) {
6183
                gen_op_set_cc_op(s->cc_op);
6184
                s->cc_op = CC_OP_DYNAMIC;
6185
            }
6186
            gen_jmp_im(pc_start - s->cs_base);
6187
            tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
6188
            /* condition codes are modified only in long mode */
6189
            if (s->lma)
6190
                s->cc_op = CC_OP_EFLAGS;
6191
            gen_eob(s);
6192
        }
6193
        break;
6194
#endif
6195
    case 0x1a2: /* cpuid */
6196
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CPUID))
6197
            break;
6198
        tcg_gen_helper_0_0(helper_cpuid);
6199
        break;
6200
    case 0xf4: /* hlt */
6201
        if (s->cpl != 0) {
6202
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6203
        } else {
6204
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_HLT))
6205
                break;
6206
            if (s->cc_op != CC_OP_DYNAMIC)
6207
                gen_op_set_cc_op(s->cc_op);
6208
            gen_jmp_im(s->pc - s->cs_base);
6209
            tcg_gen_helper_0_0(helper_hlt);
6210
            s->is_jmp = 3;
6211
        }
6212
        break;
6213
    case 0x100:
6214
        modrm = ldub_code(s->pc++);
6215
        mod = (modrm >> 6) & 3;
6216
        op = (modrm >> 3) & 7;
6217
        switch(op) {
6218
        case 0: /* sldt */
6219
            if (!s->pe || s->vm86)
6220
                goto illegal_op;
6221
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ))
6222
                break;
6223
            gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
6224
            ot = OT_WORD;
6225
            if (mod == 3)
6226
                ot += s->dflag;
6227
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6228
            break;
6229
        case 2: /* lldt */
6230
            if (!s->pe || s->vm86)
6231
                goto illegal_op;
6232
            if (s->cpl != 0) {
6233
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6234
            } else {
6235
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE))
6236
                    break;
6237
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6238
                gen_jmp_im(pc_start - s->cs_base);
6239
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6240
                tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
6241
            }
6242
            break;
6243
        case 1: /* str */
6244
            if (!s->pe || s->vm86)
6245
                goto illegal_op;
6246
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ))
6247
                break;
6248
            gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
6249
            ot = OT_WORD;
6250
            if (mod == 3)
6251
                ot += s->dflag;
6252
            gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
6253
            break;
6254
        case 3: /* ltr */
6255
            if (!s->pe || s->vm86)
6256
                goto illegal_op;
6257
            if (s->cpl != 0) {
6258
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6259
            } else {
6260
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE))
6261
                    break;
6262
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6263
                gen_jmp_im(pc_start - s->cs_base);
6264
                tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6265
                tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
6266
            }
6267
            break;
6268
        case 4: /* verr */
6269
        case 5: /* verw */
6270
            if (!s->pe || s->vm86)
6271
                goto illegal_op;
6272
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6273
            if (s->cc_op != CC_OP_DYNAMIC)
6274
                gen_op_set_cc_op(s->cc_op);
6275
            if (op == 4)
6276
                gen_op_verr();
6277
            else
6278
                gen_op_verw();
6279
            s->cc_op = CC_OP_EFLAGS;
6280
            break;
6281
        default:
6282
            goto illegal_op;
6283
        }
6284
        break;
6285
    case 0x101:
6286
        modrm = ldub_code(s->pc++);
6287
        mod = (modrm >> 6) & 3;
6288
        op = (modrm >> 3) & 7;
6289
        rm = modrm & 7;
6290
        switch(op) {
6291
        case 0: /* sgdt */
6292
            if (mod == 3)
6293
                goto illegal_op;
6294
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ))
6295
                break;
6296
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6297
            gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
6298
            gen_op_st_T0_A0(OT_WORD + s->mem_index);
6299
            gen_add_A0_im(s, 2);
6300
            gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
6301
            if (!s->dflag)
6302
                gen_op_andl_T0_im(0xffffff);
6303
            gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6304
            break;
6305
        case 1:
6306
            if (mod == 3) {
6307
                switch (rm) {
6308
                case 0: /* monitor */
6309
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6310
                        s->cpl != 0)
6311
                        goto illegal_op;
6312
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MONITOR))
6313
                        break;
6314
                    gen_jmp_im(pc_start - s->cs_base);
6315
#ifdef TARGET_X86_64
6316
                    if (s->aflag == 2) {
6317
                        gen_op_movq_A0_reg(R_EBX);
6318
                        gen_op_addq_A0_AL();
6319
                    } else
6320
#endif
6321
                    {
6322
                        gen_op_movl_A0_reg(R_EBX);
6323
                        gen_op_addl_A0_AL();
6324
                        if (s->aflag == 0)
6325
                            gen_op_andl_A0_ffff();
6326
                    }
6327
                    gen_add_A0_ds_seg(s);
6328
                    tcg_gen_helper_0_1(helper_monitor, cpu_A0);
6329
                    break;
6330
                case 1: /* mwait */
6331
                    if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
6332
                        s->cpl != 0)
6333
                        goto illegal_op;
6334
                    if (s->cc_op != CC_OP_DYNAMIC) {
6335
                        gen_op_set_cc_op(s->cc_op);
6336
                        s->cc_op = CC_OP_DYNAMIC;
6337
                    }
6338
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_MWAIT))
6339
                        break;
6340
                    gen_jmp_im(s->pc - s->cs_base);
6341
                    tcg_gen_helper_0_0(helper_mwait);
6342
                    gen_eob(s);
6343
                    break;
6344
                default:
6345
                    goto illegal_op;
6346
                }
6347
            } else { /* sidt */
6348
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ))
6349
                    break;
6350
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6351
                gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
6352
                gen_op_st_T0_A0(OT_WORD + s->mem_index);
6353
                gen_add_A0_im(s, 2);
6354
                gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
6355
                if (!s->dflag)
6356
                    gen_op_andl_T0_im(0xffffff);
6357
                gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6358
            }
6359
            break;
6360
        case 2: /* lgdt */
6361
        case 3: /* lidt */
6362
            if (mod == 3) {
6363
                switch(rm) {
6364
                case 0: /* VMRUN */
6365
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMRUN))
6366
                        break;
6367
                    if (s->cc_op != CC_OP_DYNAMIC)
6368
                        gen_op_set_cc_op(s->cc_op);
6369
                    gen_jmp_im(s->pc - s->cs_base);
6370
                    tcg_gen_helper_0_0(helper_vmrun);
6371
                    s->cc_op = CC_OP_EFLAGS;
6372
                    gen_eob(s);
6373
                    break;
6374
                case 1: /* VMMCALL */
6375
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMMCALL))
6376
                         break;
6377
                    /* FIXME: cause #UD if hflags & SVM */
6378
                    tcg_gen_helper_0_0(helper_vmmcall);
6379
                    break;
6380
                case 2: /* VMLOAD */
6381
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMLOAD))
6382
                         break;
6383
                    tcg_gen_helper_0_0(helper_vmload);
6384
                    break;
6385
                case 3: /* VMSAVE */
6386
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_VMSAVE))
6387
                         break;
6388
                    tcg_gen_helper_0_0(helper_vmsave);
6389
                    break;
6390
                case 4: /* STGI */
6391
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_STGI))
6392
                         break;
6393
                    tcg_gen_helper_0_0(helper_stgi);
6394
                    break;
6395
                case 5: /* CLGI */
6396
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_CLGI))
6397
                         break;
6398
                    tcg_gen_helper_0_0(helper_clgi);
6399
                    break;
6400
                case 6: /* SKINIT */
6401
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_SKINIT))
6402
                         break;
6403
                    tcg_gen_helper_0_0(helper_skinit);
6404
                    break;
6405
                case 7: /* INVLPGA */
6406
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPGA))
6407
                         break;
6408
                    tcg_gen_helper_0_0(helper_invlpga);
6409
                    break;
6410
                default:
6411
                    goto illegal_op;
6412
                }
6413
            } else if (s->cpl != 0) {
6414
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6415
            } else {
6416
                if (gen_svm_check_intercept(s, pc_start,
6417
                                            op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE))
6418
                    break;
6419
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6420
                gen_op_ld_T1_A0(OT_WORD + s->mem_index);
6421
                gen_add_A0_im(s, 2);
6422
                gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
6423
                if (!s->dflag)
6424
                    gen_op_andl_T0_im(0xffffff);
6425
                if (op == 2) {
6426
                    gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
6427
                    gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
6428
                } else {
6429
                    gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
6430
                    gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
6431
                }
6432
            }
6433
            break;
6434
        case 4: /* smsw */
6435
            if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0))
6436
                break;
6437
            gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
6438
            gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
6439
            break;
6440
        case 6: /* lmsw */
6441
            if (s->cpl != 0) {
6442
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6443
            } else {
6444
                if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0))
6445
                    break;
6446
                gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
6447
                tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
6448
                gen_jmp_im(s->pc - s->cs_base);
6449
                gen_eob(s);
6450
            }
6451
            break;
6452
        case 7: /* invlpg */
6453
            if (s->cpl != 0) {
6454
                gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6455
            } else {
6456
                if (mod == 3) {
6457
#ifdef TARGET_X86_64
6458
                    if (CODE64(s) && rm == 0) {
6459
                        /* swapgs */
6460
                        gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
6461
                        gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
6462
                        gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
6463
                        gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
6464
                    } else
6465
#endif
6466
                    {
6467
                        goto illegal_op;
6468
                    }
6469
                } else {
6470
                    if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_INVLPG))
6471
                        break;
6472
                    gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6473
                    tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
6474
                    gen_jmp_im(s->pc - s->cs_base);
6475
                    gen_eob(s);
6476
                }
6477
            }
6478
            break;
6479
        default:
6480
            goto illegal_op;
6481
        }
6482
        break;
6483
    case 0x108: /* invd */
6484
    case 0x109: /* wbinvd */
6485
        if (s->cpl != 0) {
6486
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6487
        } else {
6488
            if (gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD))
6489
                break;
6490
            /* nothing to do */
6491
        }
6492
        break;
6493
    case 0x63: /* arpl or movslS (x86_64) */
6494
#ifdef TARGET_X86_64
6495
        if (CODE64(s)) {
6496
            int d_ot;
6497
            /* d_ot is the size of destination */
6498
            d_ot = dflag + OT_WORD;
6499

    
6500
            modrm = ldub_code(s->pc++);
6501
            reg = ((modrm >> 3) & 7) | rex_r;
6502
            mod = (modrm >> 6) & 3;
6503
            rm = (modrm & 7) | REX_B(s);
6504

    
6505
            if (mod == 3) {
6506
                gen_op_mov_TN_reg(OT_LONG, 0, rm);
6507
                /* sign extend */
6508
                if (d_ot == OT_QUAD)
6509
                    gen_op_movslq_T0_T0();
6510
                gen_op_mov_reg_T0(d_ot, reg);
6511
            } else {
6512
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6513
                if (d_ot == OT_QUAD) {
6514
                    gen_op_lds_T0_A0(OT_LONG + s->mem_index);
6515
                } else {
6516
                    gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6517
                }
6518
                gen_op_mov_reg_T0(d_ot, reg);
6519
            }
6520
        } else
6521
#endif
6522
        {
6523
            if (!s->pe || s->vm86)
6524
                goto illegal_op;
6525
            ot = dflag ? OT_LONG : OT_WORD;
6526
            modrm = ldub_code(s->pc++);
6527
            reg = (modrm >> 3) & 7;
6528
            mod = (modrm >> 6) & 3;
6529
            rm = modrm & 7;
6530
            if (mod != 3) {
6531
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6532
                gen_op_ld_T0_A0(ot + s->mem_index);
6533
            } else {
6534
                gen_op_mov_TN_reg(ot, 0, rm);
6535
            }
6536
            gen_op_mov_TN_reg(ot, 1, reg);
6537
            if (s->cc_op != CC_OP_DYNAMIC)
6538
                gen_op_set_cc_op(s->cc_op);
6539
            gen_op_arpl();
6540
            s->cc_op = CC_OP_EFLAGS;
6541
            if (mod != 3) {
6542
                gen_op_st_T0_A0(ot + s->mem_index);
6543
            } else {
6544
                gen_op_mov_reg_T0(ot, rm);
6545
            }
6546
            gen_op_arpl_update();
6547
        }
6548
        break;
6549
    case 0x102: /* lar */
6550
    case 0x103: /* lsl */
6551
        if (!s->pe || s->vm86)
6552
            goto illegal_op;
6553
        ot = dflag ? OT_LONG : OT_WORD;
6554
        modrm = ldub_code(s->pc++);
6555
        reg = ((modrm >> 3) & 7) | rex_r;
6556
        gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
6557
        gen_op_mov_TN_reg(ot, 1, reg);
6558
        if (s->cc_op != CC_OP_DYNAMIC)
6559
            gen_op_set_cc_op(s->cc_op);
6560
        if (b == 0x102)
6561
            gen_op_lar();
6562
        else
6563
            gen_op_lsl();
6564
        s->cc_op = CC_OP_EFLAGS;
6565
        gen_op_mov_reg_T1(ot, reg);
6566
        break;
6567
    case 0x118:
6568
        modrm = ldub_code(s->pc++);
6569
        mod = (modrm >> 6) & 3;
6570
        op = (modrm >> 3) & 7;
6571
        switch(op) {
6572
        case 0: /* prefetchnta */
6573
        case 1: /* prefetchnt0 */
6574
        case 2: /* prefetchnt0 */
6575
        case 3: /* prefetchnt0 */
6576
            if (mod == 3)
6577
                goto illegal_op;
6578
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6579
            /* nothing more to do */
6580
            break;
6581
        default: /* nop (multi byte) */
6582
            gen_nop_modrm(s, modrm);
6583
            break;
6584
        }
6585
        break;
6586
    case 0x119 ... 0x11f: /* nop (multi byte) */
6587
        modrm = ldub_code(s->pc++);
6588
        gen_nop_modrm(s, modrm);
6589
        break;
6590
    case 0x120: /* mov reg, crN */
6591
    case 0x122: /* mov crN, reg */
6592
        if (s->cpl != 0) {
6593
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6594
        } else {
6595
            modrm = ldub_code(s->pc++);
6596
            if ((modrm & 0xc0) != 0xc0)
6597
                goto illegal_op;
6598
            rm = (modrm & 7) | REX_B(s);
6599
            reg = ((modrm >> 3) & 7) | rex_r;
6600
            if (CODE64(s))
6601
                ot = OT_QUAD;
6602
            else
6603
                ot = OT_LONG;
6604
            switch(reg) {
6605
            case 0:
6606
            case 2:
6607
            case 3:
6608
            case 4:
6609
            case 8:
6610
                if (b & 2) {
6611
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0 + reg);
6612
                    gen_op_mov_TN_reg(ot, 0, rm);
6613
                    tcg_gen_helper_0_2(helper_movl_crN_T0, 
6614
                                       tcg_const_i32(reg), cpu_T[0]);
6615
                    gen_jmp_im(s->pc - s->cs_base);
6616
                    gen_eob(s);
6617
                } else {
6618
                    gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0 + reg);
6619
#if !defined(CONFIG_USER_ONLY)
6620
                    if (reg == 8)
6621
                        tcg_gen_helper_1_0(helper_movtl_T0_cr8, cpu_T[0]);
6622
                    else
6623
#endif
6624
                        gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
6625
                    gen_op_mov_reg_T0(ot, rm);
6626
                }
6627
                break;
6628
            default:
6629
                goto illegal_op;
6630
            }
6631
        }
6632
        break;
6633
    case 0x121: /* mov reg, drN */
6634
    case 0x123: /* mov drN, reg */
6635
        if (s->cpl != 0) {
6636
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6637
        } else {
6638
            modrm = ldub_code(s->pc++);
6639
            if ((modrm & 0xc0) != 0xc0)
6640
                goto illegal_op;
6641
            rm = (modrm & 7) | REX_B(s);
6642
            reg = ((modrm >> 3) & 7) | rex_r;
6643
            if (CODE64(s))
6644
                ot = OT_QUAD;
6645
            else
6646
                ot = OT_LONG;
6647
            /* XXX: do it dynamically with CR4.DE bit */
6648
            if (reg == 4 || reg == 5 || reg >= 8)
6649
                goto illegal_op;
6650
            if (b & 2) {
6651
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
6652
                gen_op_mov_TN_reg(ot, 0, rm);
6653
                tcg_gen_helper_0_2(helper_movl_drN_T0,
6654
                                   tcg_const_i32(reg), cpu_T[0]);
6655
                gen_jmp_im(s->pc - s->cs_base);
6656
                gen_eob(s);
6657
            } else {
6658
                gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
6659
                gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6660
                gen_op_mov_reg_T0(ot, rm);
6661
            }
6662
        }
6663
        break;
6664
    case 0x106: /* clts */
6665
        if (s->cpl != 0) {
6666
            gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6667
        } else {
6668
            gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
6669
            tcg_gen_helper_0_0(helper_clts);
6670
            /* abort block because static cpu state changed */
6671
            gen_jmp_im(s->pc - s->cs_base);
6672
            gen_eob(s);
6673
        }
6674
        break;
6675
    /* MMX/3DNow!/SSE/SSE2/SSE3 support */
6676
    case 0x1c3: /* MOVNTI reg, mem */
6677
        if (!(s->cpuid_features & CPUID_SSE2))
6678
            goto illegal_op;
6679
        ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6680
        modrm = ldub_code(s->pc++);
6681
        mod = (modrm >> 6) & 3;
6682
        if (mod == 3)
6683
            goto illegal_op;
6684
        reg = ((modrm >> 3) & 7) | rex_r;
6685
        /* generate a generic store */
6686
        gen_ldst_modrm(s, modrm, ot, reg, 1);
6687
        break;
6688
    case 0x1ae:
6689
        modrm = ldub_code(s->pc++);
6690
        mod = (modrm >> 6) & 3;
6691
        op = (modrm >> 3) & 7;
6692
        switch(op) {
6693
        case 0: /* fxsave */
6694
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6695
                (s->flags & HF_EM_MASK))
6696
                goto illegal_op;
6697
            if (s->flags & HF_TS_MASK) {
6698
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6699
                break;
6700
            }
6701
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6702
            if (s->cc_op != CC_OP_DYNAMIC)
6703
                gen_op_set_cc_op(s->cc_op);
6704
            gen_jmp_im(pc_start - s->cs_base);
6705
            tcg_gen_helper_0_2(helper_fxsave, 
6706
                               cpu_A0, tcg_const_i32((s->dflag == 2)));
6707
            break;
6708
        case 1: /* fxrstor */
6709
            if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6710
                (s->flags & HF_EM_MASK))
6711
                goto illegal_op;
6712
            if (s->flags & HF_TS_MASK) {
6713
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6714
                break;
6715
            }
6716
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6717
            if (s->cc_op != CC_OP_DYNAMIC)
6718
                gen_op_set_cc_op(s->cc_op);
6719
            gen_jmp_im(pc_start - s->cs_base);
6720
            tcg_gen_helper_0_2(helper_fxrstor,
6721
                               cpu_A0, tcg_const_i32((s->dflag == 2)));
6722
            break;
6723
        case 2: /* ldmxcsr */
6724
        case 3: /* stmxcsr */
6725
            if (s->flags & HF_TS_MASK) {
6726
                gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6727
                break;
6728
            }
6729
            if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6730
                mod == 3)
6731
                goto illegal_op;
6732
            gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6733
            if (op == 2) {
6734
                gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6735
                gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6736
            } else {
6737
                gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6738
                gen_op_st_T0_A0(OT_LONG + s->mem_index);
6739
            }
6740
            break;
6741
        case 5: /* lfence */
6742
        case 6: /* mfence */
6743
            if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6744
                goto illegal_op;
6745
            break;
6746
        case 7: /* sfence / clflush */
6747
            if ((modrm & 0xc7) == 0xc0) {
6748
                /* sfence */
6749
                /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
6750
                if (!(s->cpuid_features & CPUID_SSE))
6751
                    goto illegal_op;
6752
            } else {
6753
                /* clflush */
6754
                if (!(s->cpuid_features & CPUID_CLFLUSH))
6755
                    goto illegal_op;
6756
                gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6757
            }
6758
            break;
6759
        default:
6760
            goto illegal_op;
6761
        }
6762
        break;
6763
    case 0x10d: /* 3DNow! prefetch(w) */
6764
        modrm = ldub_code(s->pc++);
6765
        mod = (modrm >> 6) & 3;
6766
        if (mod == 3)
6767
            goto illegal_op;
6768
        gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6769
        /* ignore for now */
6770
        break;
6771
    case 0x1aa: /* rsm */
6772
        if (gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM))
6773
            break;
6774
        if (!(s->flags & HF_SMM_MASK))
6775
            goto illegal_op;
6776
        if (s->cc_op != CC_OP_DYNAMIC) {
6777
            gen_op_set_cc_op(s->cc_op);
6778
            s->cc_op = CC_OP_DYNAMIC;
6779
        }
6780
        gen_jmp_im(s->pc - s->cs_base);
6781
        tcg_gen_helper_0_0(helper_rsm);
6782
        gen_eob(s);
6783
        break;
6784
    case 0x10e ... 0x10f:
6785
        /* 3DNow! instructions, ignore prefixes */
6786
        s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
6787
    case 0x110 ... 0x117:
6788
    case 0x128 ... 0x12f:
6789
    case 0x150 ... 0x177:
6790
    case 0x17c ... 0x17f:
6791
    case 0x1c2:
6792
    case 0x1c4 ... 0x1c6:
6793
    case 0x1d0 ... 0x1fe:
6794
        gen_sse(s, b, pc_start, rex_r);
6795
        break;
6796
    default:
6797
        goto illegal_op;
6798
    }
6799
    /* lock generation */
6800
    if (s->prefix & PREFIX_LOCK)
6801
        tcg_gen_helper_0_0(helper_unlock);
6802
    return s->pc;
6803
 illegal_op:
6804
    if (s->prefix & PREFIX_LOCK)
6805
        tcg_gen_helper_0_0(helper_unlock);
6806
    /* XXX: ensure that no lock was generated */
6807
    gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6808
    return s->pc;
6809
}
6810

    
6811
static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
6812
{
6813
    switch(macro_id) {
6814
#ifdef MACRO_TEST
6815
    case MACRO_TEST:
6816
        tcg_gen_helper_0_1(helper_divl_EAX_T0, cpu_T[0]);
6817
        break;
6818
#endif
6819
    }
6820
}
6821

    
6822
void optimize_flags_init(void)
6823
{
6824
#if TCG_TARGET_REG_BITS == 32
6825
    assert(sizeof(CCTable) == (1 << 3));
6826
#else
6827
    assert(sizeof(CCTable) == (1 << 4));
6828
#endif
6829
    tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
6830

    
6831
    cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
6832
#if TARGET_LONG_BITS > HOST_LONG_BITS
6833
    cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL, 
6834
                                  TCG_AREG0, offsetof(CPUState, t0), "T0");
6835
    cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
6836
                                  TCG_AREG0, offsetof(CPUState, t1), "T1");
6837
    cpu_A0 = tcg_global_mem_new(TCG_TYPE_TL,
6838
                                TCG_AREG0, offsetof(CPUState, t2), "A0");
6839
#else
6840
    cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
6841
    cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
6842
    cpu_A0 = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "A0");
6843
#endif
6844
    cpu_T3 = tcg_global_mem_new(TCG_TYPE_TL,
6845
                                TCG_AREG0, offsetof(CPUState, t3), "T3");
6846
#if defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS)
6847
    /* XXX: must be suppressed once there are less fixed registers */
6848
    cpu_tmp1_i64 = tcg_global_reg2_new_hack(TCG_TYPE_I64, TCG_AREG1, TCG_AREG2, "tmp1");
6849
#endif
6850
    cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
6851
                                   TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
6852
    cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
6853
                                    TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
6854
    cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
6855
                                    TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
6856
}
6857

    
6858
/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6859
   basic block 'tb'. If search_pc is TRUE, also generate PC
6860
   information for each intermediate instruction. */
6861
static inline int gen_intermediate_code_internal(CPUState *env,
6862
                                                 TranslationBlock *tb,
6863
                                                 int search_pc)
6864
{
6865
    DisasContext dc1, *dc = &dc1;
6866
    target_ulong pc_ptr;
6867
    uint16_t *gen_opc_end;
6868
    int j, lj, cflags;
6869
    uint64_t flags;
6870
    target_ulong pc_start;
6871
    target_ulong cs_base;
6872

    
6873
    /* generate intermediate code */
6874
    pc_start = tb->pc;
6875
    cs_base = tb->cs_base;
6876
    flags = tb->flags;
6877
    cflags = tb->cflags;
6878

    
6879
    dc->pe = (flags >> HF_PE_SHIFT) & 1;
6880
    dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6881
    dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6882
    dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6883
    dc->f_st = 0;
6884
    dc->vm86 = (flags >> VM_SHIFT) & 1;
6885
    dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6886
    dc->iopl = (flags >> IOPL_SHIFT) & 3;
6887
    dc->tf = (flags >> TF_SHIFT) & 1;
6888
    dc->singlestep_enabled = env->singlestep_enabled;
6889
    dc->cc_op = CC_OP_DYNAMIC;
6890
    dc->cs_base = cs_base;
6891
    dc->tb = tb;
6892
    dc->popl_esp_hack = 0;
6893
    /* select memory access functions */
6894
    dc->mem_index = 0;
6895
    if (flags & HF_SOFTMMU_MASK) {
6896
        if (dc->cpl == 3)
6897
            dc->mem_index = 2 * 4;
6898
        else
6899
            dc->mem_index = 1 * 4;
6900
    }
6901
    dc->cpuid_features = env->cpuid_features;
6902
    dc->cpuid_ext_features = env->cpuid_ext_features;
6903
    dc->cpuid_ext2_features = env->cpuid_ext2_features;
6904
#ifdef TARGET_X86_64
6905
    dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6906
    dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6907
#endif
6908
    dc->flags = flags;
6909
    dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6910
                    (flags & HF_INHIBIT_IRQ_MASK)
6911
#ifndef CONFIG_SOFTMMU
6912
                    || (flags & HF_SOFTMMU_MASK)
6913
#endif
6914
                    );
6915
#if 0
6916
    /* check addseg logic */
6917
    if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6918
        printf("ERROR addseg\n");
6919
#endif
6920

    
6921
    cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
6922
#if !(defined(__i386__) && (TARGET_LONG_BITS <= HOST_LONG_BITS))
6923
    cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
6924
#endif
6925
    cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
6926
    cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
6927
    cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
6928
    cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
6929
    cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
6930
    cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
6931
    cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
6932

    
6933
    gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6934

    
6935
    dc->is_jmp = DISAS_NEXT;
6936
    pc_ptr = pc_start;
6937
    lj = -1;
6938

    
6939
    for(;;) {
6940
        if (env->nb_breakpoints > 0) {
6941
            for(j = 0; j < env->nb_breakpoints; j++) {
6942
                if (env->breakpoints[j] == pc_ptr) {
6943
                    gen_debug(dc, pc_ptr - dc->cs_base);
6944
                    break;
6945
                }
6946
            }
6947
        }
6948
        if (search_pc) {
6949
            j = gen_opc_ptr - gen_opc_buf;
6950
            if (lj < j) {
6951
                lj++;
6952
                while (lj < j)
6953
                    gen_opc_instr_start[lj++] = 0;
6954
            }
6955
            gen_opc_pc[lj] = pc_ptr;
6956
            gen_opc_cc_op[lj] = dc->cc_op;
6957
            gen_opc_instr_start[lj] = 1;
6958
        }
6959
        pc_ptr = disas_insn(dc, pc_ptr);
6960
        /* stop translation if indicated */
6961
        if (dc->is_jmp)
6962
            break;
6963
        /* if single step mode, we generate only one instruction and
6964
           generate an exception */
6965
        /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6966
           the flag and abort the translation to give the irqs a
6967
           change to be happen */
6968
        if (dc->tf || dc->singlestep_enabled ||
6969
            (flags & HF_INHIBIT_IRQ_MASK) ||
6970
            (cflags & CF_SINGLE_INSN)) {
6971
            gen_jmp_im(pc_ptr - dc->cs_base);
6972
            gen_eob(dc);
6973
            break;
6974
        }
6975
        /* if too long translation, stop generation too */
6976
        if (gen_opc_ptr >= gen_opc_end ||
6977
            (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6978
            gen_jmp_im(pc_ptr - dc->cs_base);
6979
            gen_eob(dc);
6980
            break;
6981
        }
6982
    }
6983
    *gen_opc_ptr = INDEX_op_end;
6984
    /* we don't forget to fill the last values */
6985
    if (search_pc) {
6986
        j = gen_opc_ptr - gen_opc_buf;
6987
        lj++;
6988
        while (lj <= j)
6989
            gen_opc_instr_start[lj++] = 0;
6990
    }
6991

    
6992
#ifdef DEBUG_DISAS
6993
    if (loglevel & CPU_LOG_TB_CPU) {
6994
        cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6995
    }
6996
    if (loglevel & CPU_LOG_TB_IN_ASM) {
6997
        int disas_flags;
6998
        fprintf(logfile, "----------------\n");
6999
        fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
7000
#ifdef TARGET_X86_64
7001
        if (dc->code64)
7002
            disas_flags = 2;
7003
        else
7004
#endif
7005
            disas_flags = !dc->code32;
7006
        target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
7007
        fprintf(logfile, "\n");
7008
        if (loglevel & CPU_LOG_TB_OP_OPT) {
7009
            fprintf(logfile, "OP before opt:\n");
7010
            tcg_dump_ops(&tcg_ctx, logfile);
7011
            fprintf(logfile, "\n");
7012
        }
7013
    }
7014
#endif
7015

    
7016
    if (!search_pc)
7017
        tb->size = pc_ptr - pc_start;
7018
    return 0;
7019
}
7020

    
7021
int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
7022
{
7023
    return gen_intermediate_code_internal(env, tb, 0);
7024
}
7025

    
7026
int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
7027
{
7028
    return gen_intermediate_code_internal(env, tb, 1);
7029
}
7030

    
7031
void gen_pc_load(CPUState *env, TranslationBlock *tb,
7032
                unsigned long searched_pc, int pc_pos, void *puc)
7033
{
7034
    int cc_op;
7035
#ifdef DEBUG_DISAS
7036
    if (loglevel & CPU_LOG_TB_OP) {
7037
        int i;
7038
        fprintf(logfile, "RESTORE:\n");
7039
        for(i = 0;i <= pc_pos; i++) {
7040
            if (gen_opc_instr_start[i]) {
7041
                fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
7042
            }
7043
        }
7044
        fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
7045
                searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
7046
                (uint32_t)tb->cs_base);
7047
    }
7048
#endif
7049
    env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
7050
    cc_op = gen_opc_cc_op[pc_pos];
7051
    if (cc_op != CC_OP_DYNAMIC)
7052
        env->cc_op = cc_op;
7053
}